1 /* Interprocedural constant propagation
2 Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
5 Contributed by Razya Ladelsky <RAZYA@il.ibm.com> and Martin Jambor
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 /* Interprocedural constant propagation (IPA-CP).
26 The goal of this transformation is to
28 1) discover functions which are always invoked with some arguments with the
29 same known constant values and modify the functions so that the
30 subsequent optimizations can take advantage of the knowledge, and
32 2) partial specialization - create specialized versions of functions
33 transformed in this way if some parameters are known constants only in
34 certain contexts but the estimated tradeoff between speedup and cost size
37 The algorithm also propagates types and attempts to perform type based
38 devirtualization. Types are propagated much like constants.
40 The algorithm basically consists of three stages. In the first, functions
41 are analyzed one at a time and jump functions are constructed for all known
42 call-sites. In the second phase, the pass propagates information from the
43 jump functions across the call to reveal what values are available at what
44 call sites, performs estimations of effects of known values on functions and
45 their callees, and finally decides what specialized extra versions should be
46 created. In the third, the special versions materialize and appropriate
49 The algorithm used is to a certain extent based on "Interprocedural Constant
50 Propagation", by David Callahan, Keith D Cooper, Ken Kennedy, Linda Torczon,
51 Comp86, pg 152-161 and "A Methodology for Procedure Cloning" by Keith D
52 Cooper, Mary W. Hall, and Ken Kennedy.
55 First stage - intraprocedural analysis
56 =======================================
58 This phase computes jump_function and modification flags.
60 A jump function for a call-site represents the values passed as an actual
61 arguments of a given call-site. In principle, there are three types of
64 Pass through - the caller's formal parameter is passed as an actual
65 argument, plus an operation on it can be performed.
66 Constant - a constant is passed as an actual argument.
67 Unknown - neither of the above.
69 All jump function types are described in detail in ipa-prop.h, together with
70 the data structures that represent them and methods of accessing them.
72 ipcp_generate_summary() is the main function of the first stage.
74 Second stage - interprocedural analysis
75 ========================================
77 This stage is itself divided into two phases. In the first, we propagate
78 known values over the call graph, in the second, we make cloning decisions.
79 It uses a different algorithm than the original Callahan's paper.
81 First, we traverse the functions topologically from callers to callees and,
82 for each strongly connected component (SCC), we propagate constants
83 according to previously computed jump functions. We also record what known
84 values depend on other known values and estimate local effects. Finally, we
85 propagate cumulative information about these effects from dependent values
86 to those on which they depend.
88 Second, we again traverse the call graph in the same topological order and
89 make clones for functions which we know are called with the same values in
90 all contexts and decide about extra specialized clones of functions just for
91 some contexts - these decisions are based on both local estimates and
92 cumulative estimates propagated from callees.
94 ipcp_propagate_stage() and ipcp_decision_stage() together constitute the
97 Third phase - materialization of clones, call statement updates.
98 ============================================
100 This stage is currently performed by call graph code (mainly in cgraphunit.c
101 and tree-inline.c) according to instructions inserted to the call graph by
106 #include "coretypes.h"
111 #include "ipa-prop.h"
112 #include "tree-flow.h"
113 #include "tree-pass.h"
115 #include "diagnostic.h"
116 #include "tree-pretty-print.h"
117 #include "tree-inline.h"
119 #include "ipa-inline.h"
120 #include "ipa-utils.h"
124 /* Describes a particular source for an IPA-CP value. */
126 struct ipcp_value_source
128 /* The incoming edge that brought the value. */
129 struct cgraph_edge
*cs
;
130 /* If the jump function that resulted into his value was a pass-through or an
131 ancestor, this is the ipcp_value of the caller from which the described
132 value has been derived. Otherwise it is NULL. */
133 struct ipcp_value
*val
;
134 /* Next pointer in a linked list of sources of a value. */
135 struct ipcp_value_source
*next
;
136 /* If the jump function that resulted into his value was a pass-through or an
137 ancestor, this is the index of the parameter of the caller the jump
138 function references. */
142 /* Describes one particular value stored in struct ipcp_lattice. */
146 /* The actual value for the given parameter. This is either an IPA invariant
147 or a TREE_BINFO describing a type that can be used for
150 /* The list of sources from which this value originates. */
151 struct ipcp_value_source
*sources
;
152 /* Next pointers in a linked list of all values in a lattice. */
153 struct ipcp_value
*next
;
154 /* Next pointers in a linked list of values in a strongly connected component
156 struct ipcp_value
*scc_next
;
157 /* Next pointers in a linked list of SCCs of values sorted topologically
158 according their sources. */
159 struct ipcp_value
*topo_next
;
160 /* A specialized node created for this value, NULL if none has been (so far)
162 struct cgraph_node
*spec_node
;
163 /* Depth first search number and low link for topological sorting of
166 /* Time benefit and size cost that specializing the function for this value
167 would bring about in this function alone. */
168 int local_time_benefit
, local_size_cost
;
169 /* Time benefit and size cost that specializing the function for this value
170 can bring about in it's callees (transitively). */
171 int prop_time_benefit
, prop_size_cost
;
172 /* True if this valye is currently on the topo-sort stack. */
176 /* Allocation pools for values and their sources in ipa-cp. */
178 alloc_pool ipcp_values_pool
;
179 alloc_pool ipcp_sources_pool
;
181 /* Lattice describing potential values of a formal parameter of a function and
182 some of their other properties. TOP is represented by a lattice with zero
183 values and with contains_variable and bottom flags cleared. BOTTOM is
184 represented by a lattice with the bottom flag set. In that case, values and
185 contains_variable flag should be disregarded. */
189 /* The list of known values and types in this lattice. Note that values are
190 not deallocated if a lattice is set to bottom because there may be value
191 sources referencing them. */
192 struct ipcp_value
*values
;
193 /* Number of known values and types in this lattice. */
195 /* The lattice contains a variable component (in addition to values). */
196 bool contains_variable
;
197 /* The value of the lattice is bottom (i.e. variable and unusable for any
200 /* There is a virtual call based on this parameter. */
204 /* Maximal count found in program. */
206 static gcov_type max_count
;
208 /* Original overall size of the program. */
210 static long overall_size
, max_new_size
;
212 /* Head of the linked list of topologically sorted values. */
214 static struct ipcp_value
*values_topo
;
216 /* Return the lattice corresponding to the Ith formal parameter of the function
217 described by INFO. */
218 static inline struct ipcp_lattice
*
219 ipa_get_lattice (struct ipa_node_params
*info
, int i
)
221 gcc_assert (i
>= 0 && i
< ipa_get_param_count (info
));
222 gcc_checking_assert (!info
->ipcp_orig_node
);
223 gcc_checking_assert (info
->lattices
);
224 return &(info
->lattices
[i
]);
227 /* Return whether LAT is a lattice with a single constant and without an
231 ipa_lat_is_single_const (struct ipcp_lattice
*lat
)
234 || lat
->contains_variable
235 || lat
->values_count
!= 1)
241 /* Return true iff the CS is an edge within a strongly connected component as
242 computed by ipa_reduced_postorder. */
245 edge_within_scc (struct cgraph_edge
*cs
)
247 struct ipa_dfs_info
*caller_dfs
= (struct ipa_dfs_info
*) cs
->caller
->symbol
.aux
;
248 struct ipa_dfs_info
*callee_dfs
;
249 struct cgraph_node
*callee
= cgraph_function_node (cs
->callee
, NULL
);
251 callee_dfs
= (struct ipa_dfs_info
*) callee
->symbol
.aux
;
254 && caller_dfs
->scc_no
== callee_dfs
->scc_no
);
257 /* Print V which is extracted from a value in a lattice to F. */
260 print_ipcp_constant_value (FILE * f
, tree v
)
262 if (TREE_CODE (v
) == TREE_BINFO
)
264 fprintf (f
, "BINFO ");
265 print_generic_expr (f
, BINFO_TYPE (v
), 0);
267 else if (TREE_CODE (v
) == ADDR_EXPR
268 && TREE_CODE (TREE_OPERAND (v
, 0)) == CONST_DECL
)
271 print_generic_expr (f
, DECL_INITIAL (TREE_OPERAND (v
, 0)), 0);
274 print_generic_expr (f
, v
, 0);
277 /* Print all ipcp_lattices of all functions to F. */
280 print_all_lattices (FILE * f
, bool dump_sources
, bool dump_benefits
)
282 struct cgraph_node
*node
;
285 fprintf (f
, "\nLattices:\n");
286 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node
)
288 struct ipa_node_params
*info
;
290 info
= IPA_NODE_REF (node
);
291 fprintf (f
, " Node: %s/%i:\n", cgraph_node_name (node
), node
->uid
);
292 count
= ipa_get_param_count (info
);
293 for (i
= 0; i
< count
; i
++)
295 struct ipcp_lattice
*lat
= ipa_get_lattice (info
, i
);
296 struct ipcp_value
*val
;
299 fprintf (f
, " param [%d]: ", i
);
302 fprintf (f
, "BOTTOM\n");
306 if (!lat
->values_count
&& !lat
->contains_variable
)
308 fprintf (f
, "TOP\n");
312 if (lat
->contains_variable
)
314 fprintf (f
, "VARIABLE");
320 for (val
= lat
->values
; val
; val
= val
->next
)
322 if (dump_benefits
&& prev
)
324 else if (!dump_benefits
&& prev
)
329 print_ipcp_constant_value (f
, val
->value
);
333 struct ipcp_value_source
*s
;
335 fprintf (f
, " [from:");
336 for (s
= val
->sources
; s
; s
= s
->next
)
337 fprintf (f
, " %i(%i)", s
->cs
->caller
->uid
,s
->cs
->frequency
);
342 fprintf (f
, " [loc_time: %i, loc_size: %i, "
343 "prop_time: %i, prop_size: %i]\n",
344 val
->local_time_benefit
, val
->local_size_cost
,
345 val
->prop_time_benefit
, val
->prop_size_cost
);
353 /* Determine whether it is at all technically possible to create clones of NODE
354 and store this information in the ipa_node_params structure associated
358 determine_versionability (struct cgraph_node
*node
)
360 const char *reason
= NULL
;
362 /* There are a number of generic reasons functions cannot be versioned. We
363 also cannot remove parameters if there are type attributes such as fnspec
365 if (node
->alias
|| node
->thunk
.thunk_p
)
366 reason
= "alias or thunk";
367 else if (!node
->local
.versionable
)
368 reason
= "not a tree_versionable_function";
369 else if (cgraph_function_body_availability (node
) <= AVAIL_OVERWRITABLE
)
370 reason
= "insufficient body availability";
372 if (reason
&& dump_file
&& !node
->alias
&& !node
->thunk
.thunk_p
)
373 fprintf (dump_file
, "Function %s/%i is not versionable, reason: %s.\n",
374 cgraph_node_name (node
), node
->uid
, reason
);
376 node
->local
.versionable
= (reason
== NULL
);
379 /* Return true if it is at all technically possible to create clones of a
383 ipcp_versionable_function_p (struct cgraph_node
*node
)
385 return node
->local
.versionable
;
388 /* Structure holding accumulated information about callers of a node. */
390 struct caller_statistics
393 int n_calls
, n_hot_calls
, freq_sum
;
396 /* Initialize fields of STAT to zeroes. */
399 init_caller_stats (struct caller_statistics
*stats
)
401 stats
->count_sum
= 0;
403 stats
->n_hot_calls
= 0;
407 /* Worker callback of cgraph_for_node_and_aliases accumulating statistics of
408 non-thunk incoming edges to NODE. */
411 gather_caller_stats (struct cgraph_node
*node
, void *data
)
413 struct caller_statistics
*stats
= (struct caller_statistics
*) data
;
414 struct cgraph_edge
*cs
;
416 for (cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
417 if (cs
->caller
->thunk
.thunk_p
)
418 cgraph_for_node_and_aliases (cs
->caller
, gather_caller_stats
,
422 stats
->count_sum
+= cs
->count
;
423 stats
->freq_sum
+= cs
->frequency
;
425 if (cgraph_maybe_hot_edge_p (cs
))
426 stats
->n_hot_calls
++;
432 /* Return true if this NODE is viable candidate for cloning. */
435 ipcp_cloning_candidate_p (struct cgraph_node
*node
)
437 struct caller_statistics stats
;
439 gcc_checking_assert (cgraph_function_with_gimple_body_p (node
));
441 if (!flag_ipa_cp_clone
)
444 fprintf (dump_file
, "Not considering %s for cloning; "
445 "-fipa-cp-clone disabled.\n",
446 cgraph_node_name (node
));
450 if (!optimize_function_for_speed_p (DECL_STRUCT_FUNCTION (node
->symbol
.decl
)))
453 fprintf (dump_file
, "Not considering %s for cloning; "
454 "optimizing it for size.\n",
455 cgraph_node_name (node
));
459 init_caller_stats (&stats
);
460 cgraph_for_node_and_aliases (node
, gather_caller_stats
, &stats
, false);
462 if (inline_summary (node
)->self_size
< stats
.n_calls
)
465 fprintf (dump_file
, "Considering %s for cloning; code might shrink.\n",
466 cgraph_node_name (node
));
470 /* When profile is available and function is hot, propagate into it even if
471 calls seems cold; constant propagation can improve function's speed
475 if (stats
.count_sum
> node
->count
* 90 / 100)
478 fprintf (dump_file
, "Considering %s for cloning; "
479 "usually called directly.\n",
480 cgraph_node_name (node
));
484 if (!stats
.n_hot_calls
)
487 fprintf (dump_file
, "Not considering %s for cloning; no hot calls.\n",
488 cgraph_node_name (node
));
492 fprintf (dump_file
, "Considering %s for cloning.\n",
493 cgraph_node_name (node
));
497 /* Arrays representing a topological ordering of call graph nodes and a stack
498 of noes used during constant propagation. */
502 struct cgraph_node
**order
;
503 struct cgraph_node
**stack
;
504 int nnodes
, stack_top
;
507 /* Allocate the arrays in TOPO and topologically sort the nodes into order. */
510 build_toporder_info (struct topo_info
*topo
)
512 topo
->order
= XCNEWVEC (struct cgraph_node
*, cgraph_n_nodes
);
513 topo
->stack
= XCNEWVEC (struct cgraph_node
*, cgraph_n_nodes
);
515 topo
->nnodes
= ipa_reduced_postorder (topo
->order
, true, true, NULL
);
518 /* Free information about strongly connected components and the arrays in
522 free_toporder_info (struct topo_info
*topo
)
524 ipa_free_postorder_info ();
529 /* Add NODE to the stack in TOPO, unless it is already there. */
532 push_node_to_stack (struct topo_info
*topo
, struct cgraph_node
*node
)
534 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
535 if (info
->node_enqueued
)
537 info
->node_enqueued
= 1;
538 topo
->stack
[topo
->stack_top
++] = node
;
541 /* Pop a node from the stack in TOPO and return it or return NULL if the stack
544 static struct cgraph_node
*
545 pop_node_from_stack (struct topo_info
*topo
)
549 struct cgraph_node
*node
;
551 node
= topo
->stack
[topo
->stack_top
];
552 IPA_NODE_REF (node
)->node_enqueued
= 0;
559 /* Set lattice LAT to bottom and return true if it previously was not set as
563 set_lattice_to_bottom (struct ipcp_lattice
*lat
)
565 bool ret
= !lat
->bottom
;
570 /* Mark lattice as containing an unknown value and return true if it previously
571 was not marked as such. */
574 set_lattice_contains_variable (struct ipcp_lattice
*lat
)
576 bool ret
= !lat
->contains_variable
;
577 lat
->contains_variable
= true;
581 /* Initialize ipcp_lattices. */
584 initialize_node_lattices (struct cgraph_node
*node
)
586 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
587 struct cgraph_edge
*ie
;
588 bool disable
= false, variable
= false;
591 gcc_checking_assert (cgraph_function_with_gimple_body_p (node
));
592 if (!node
->local
.local
)
594 /* When cloning is allowed, we can assume that externally visible
595 functions are not called. We will compensate this by cloning
597 if (ipcp_versionable_function_p (node
)
598 && ipcp_cloning_candidate_p (node
))
604 if (disable
|| variable
)
606 for (i
= 0; i
< ipa_get_param_count (info
) ; i
++)
608 struct ipcp_lattice
*lat
= ipa_get_lattice (info
, i
);
610 set_lattice_to_bottom (lat
);
612 set_lattice_contains_variable (lat
);
614 if (dump_file
&& (dump_flags
& TDF_DETAILS
)
615 && node
->alias
&& node
->thunk
.thunk_p
)
616 fprintf (dump_file
, "Marking all lattices of %s/%i as %s\n",
617 cgraph_node_name (node
), node
->uid
,
618 disable
? "BOTTOM" : "VARIABLE");
621 for (ie
= node
->indirect_calls
; ie
; ie
= ie
->next_callee
)
622 if (ie
->indirect_info
->polymorphic
)
624 gcc_checking_assert (ie
->indirect_info
->param_index
>= 0);
625 ipa_get_lattice (info
, ie
->indirect_info
->param_index
)->virt_call
= 1;
629 /* Return the result of a (possibly arithmetic) pass through jump function
630 JFUNC on the constant value INPUT. Return NULL_TREE if that cannot be
631 determined or itself is considered an interprocedural invariant. */
634 ipa_get_jf_pass_through_result (struct ipa_jump_func
*jfunc
, tree input
)
638 if (ipa_get_jf_pass_through_operation (jfunc
) == NOP_EXPR
)
640 else if (TREE_CODE (input
) == TREE_BINFO
)
643 gcc_checking_assert (is_gimple_ip_invariant (input
));
644 if (TREE_CODE_CLASS (ipa_get_jf_pass_through_operation (jfunc
))
646 restype
= boolean_type_node
;
648 restype
= TREE_TYPE (input
);
649 res
= fold_binary (ipa_get_jf_pass_through_operation (jfunc
), restype
,
650 input
, ipa_get_jf_pass_through_operand (jfunc
));
652 if (res
&& !is_gimple_ip_invariant (res
))
658 /* Return the result of an ancestor jump function JFUNC on the constant value
659 INPUT. Return NULL_TREE if that cannot be determined. */
662 ipa_get_jf_ancestor_result (struct ipa_jump_func
*jfunc
, tree input
)
664 if (TREE_CODE (input
) == TREE_BINFO
)
665 return get_binfo_at_offset (input
,
666 ipa_get_jf_ancestor_offset (jfunc
),
667 ipa_get_jf_ancestor_type (jfunc
));
668 else if (TREE_CODE (input
) == ADDR_EXPR
)
670 tree t
= TREE_OPERAND (input
, 0);
671 t
= build_ref_for_offset (EXPR_LOCATION (t
), t
,
672 ipa_get_jf_ancestor_offset (jfunc
),
673 ipa_get_jf_ancestor_type (jfunc
), NULL
, false);
674 return build_fold_addr_expr (t
);
680 /* Extract the acual BINFO being described by JFUNC which must be a known type
684 ipa_value_from_known_type_jfunc (struct ipa_jump_func
*jfunc
)
686 tree base_binfo
= TYPE_BINFO (ipa_get_jf_known_type_base_type (jfunc
));
689 return get_binfo_at_offset (base_binfo
,
690 ipa_get_jf_known_type_offset (jfunc
),
691 ipa_get_jf_known_type_component_type (jfunc
));
694 /* Determine whether JFUNC evaluates to a known value (that is either a
695 constant or a binfo) and if so, return it. Otherwise return NULL. INFO
696 describes the caller node so that pass-through jump functions can be
700 ipa_value_from_jfunc (struct ipa_node_params
*info
, struct ipa_jump_func
*jfunc
)
702 if (jfunc
->type
== IPA_JF_CONST
)
703 return ipa_get_jf_constant (jfunc
);
704 else if (jfunc
->type
== IPA_JF_KNOWN_TYPE
)
705 return ipa_value_from_known_type_jfunc (jfunc
);
706 else if (jfunc
->type
== IPA_JF_PASS_THROUGH
707 || jfunc
->type
== IPA_JF_ANCESTOR
)
712 if (jfunc
->type
== IPA_JF_PASS_THROUGH
)
713 idx
= ipa_get_jf_pass_through_formal_id (jfunc
);
715 idx
= ipa_get_jf_ancestor_formal_id (jfunc
);
717 if (info
->ipcp_orig_node
)
718 input
= VEC_index (tree
, info
->known_vals
, idx
);
721 struct ipcp_lattice
*lat
;
725 gcc_checking_assert (!flag_ipa_cp
);
728 lat
= ipa_get_lattice (info
, idx
);
729 if (!ipa_lat_is_single_const (lat
))
731 input
= lat
->values
->value
;
737 if (jfunc
->type
== IPA_JF_PASS_THROUGH
)
738 return ipa_get_jf_pass_through_result (jfunc
, input
);
740 return ipa_get_jf_ancestor_result (jfunc
, input
);
747 /* If checking is enabled, verify that no lattice is in the TOP state, i.e. not
748 bottom, not containing a variable component and without any known value at
752 ipcp_verify_propagated_values (void)
754 struct cgraph_node
*node
;
756 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node
)
758 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
759 int i
, count
= ipa_get_param_count (info
);
761 for (i
= 0; i
< count
; i
++)
763 struct ipcp_lattice
*lat
= ipa_get_lattice (info
, i
);
766 && !lat
->contains_variable
767 && lat
->values_count
== 0)
771 fprintf (dump_file
, "\nIPA lattices after constant "
773 print_all_lattices (dump_file
, true, false);
782 /* Return true iff X and Y should be considered equal values by IPA-CP. */
785 values_equal_for_ipcp_p (tree x
, tree y
)
787 gcc_checking_assert (x
!= NULL_TREE
&& y
!= NULL_TREE
);
792 if (TREE_CODE (x
) == TREE_BINFO
|| TREE_CODE (y
) == TREE_BINFO
)
795 if (TREE_CODE (x
) == ADDR_EXPR
796 && TREE_CODE (y
) == ADDR_EXPR
797 && TREE_CODE (TREE_OPERAND (x
, 0)) == CONST_DECL
798 && TREE_CODE (TREE_OPERAND (y
, 0)) == CONST_DECL
)
799 return operand_equal_p (DECL_INITIAL (TREE_OPERAND (x
, 0)),
800 DECL_INITIAL (TREE_OPERAND (y
, 0)), 0);
802 return operand_equal_p (x
, y
, 0);
805 /* Add a new value source to VAL, marking that a value comes from edge CS and
806 (if the underlying jump function is a pass-through or an ancestor one) from
807 a caller value SRC_VAL of a caller parameter described by SRC_INDEX. */
810 add_value_source (struct ipcp_value
*val
, struct cgraph_edge
*cs
,
811 struct ipcp_value
*src_val
, int src_idx
)
813 struct ipcp_value_source
*src
;
815 src
= (struct ipcp_value_source
*) pool_alloc (ipcp_sources_pool
);
818 src
->index
= src_idx
;
820 src
->next
= val
->sources
;
825 /* Try to add NEWVAL to LAT, potentially creating a new struct ipcp_value for
826 it. CS, SRC_VAL and SRC_INDEX are meant for add_value_source and have the
830 add_value_to_lattice (struct ipcp_lattice
*lat
, tree newval
,
831 struct cgraph_edge
*cs
, struct ipcp_value
*src_val
,
834 struct ipcp_value
*val
;
840 for (val
= lat
->values
; val
; val
= val
->next
)
841 if (values_equal_for_ipcp_p (val
->value
, newval
))
843 if (edge_within_scc (cs
))
845 struct ipcp_value_source
*s
;
846 for (s
= val
->sources
; s
; s
= s
->next
)
853 add_value_source (val
, cs
, src_val
, src_idx
);
857 if (lat
->values_count
== PARAM_VALUE (PARAM_IPA_CP_VALUE_LIST_SIZE
))
859 /* We can only free sources, not the values themselves, because sources
860 of other values in this this SCC might point to them. */
861 for (val
= lat
->values
; val
; val
= val
->next
)
865 struct ipcp_value_source
*src
= val
->sources
;
866 val
->sources
= src
->next
;
867 pool_free (ipcp_sources_pool
, src
);
872 return set_lattice_to_bottom (lat
);
876 val
= (struct ipcp_value
*) pool_alloc (ipcp_values_pool
);
877 memset (val
, 0, sizeof (*val
));
879 add_value_source (val
, cs
, src_val
, src_idx
);
881 val
->next
= lat
->values
;
886 /* Propagate values through a pass-through jump function JFUNC associated with
887 edge CS, taking values from SRC_LAT and putting them into DEST_LAT. SRC_IDX
888 is the index of the source parameter. */
891 propagate_vals_accross_pass_through (struct cgraph_edge
*cs
,
892 struct ipa_jump_func
*jfunc
,
893 struct ipcp_lattice
*src_lat
,
894 struct ipcp_lattice
*dest_lat
,
897 struct ipcp_value
*src_val
;
900 if (ipa_get_jf_pass_through_operation (jfunc
) == NOP_EXPR
)
901 for (src_val
= src_lat
->values
; src_val
; src_val
= src_val
->next
)
902 ret
|= add_value_to_lattice (dest_lat
, src_val
->value
, cs
,
904 /* Do not create new values when propagating within an SCC because if there
905 are arithmetic functions with circular dependencies, there is infinite
906 number of them and we would just make lattices bottom. */
907 else if (edge_within_scc (cs
))
908 ret
= set_lattice_contains_variable (dest_lat
);
910 for (src_val
= src_lat
->values
; src_val
; src_val
= src_val
->next
)
912 tree cstval
= src_val
->value
;
914 if (TREE_CODE (cstval
) == TREE_BINFO
)
916 ret
|= set_lattice_contains_variable (dest_lat
);
919 cstval
= ipa_get_jf_pass_through_result (jfunc
, cstval
);
922 ret
|= add_value_to_lattice (dest_lat
, cstval
, cs
, src_val
, src_idx
);
924 ret
|= set_lattice_contains_variable (dest_lat
);
930 /* Propagate values through an ancestor jump function JFUNC associated with
931 edge CS, taking values from SRC_LAT and putting them into DEST_LAT. SRC_IDX
932 is the index of the source parameter. */
935 propagate_vals_accross_ancestor (struct cgraph_edge
*cs
,
936 struct ipa_jump_func
*jfunc
,
937 struct ipcp_lattice
*src_lat
,
938 struct ipcp_lattice
*dest_lat
,
941 struct ipcp_value
*src_val
;
944 if (edge_within_scc (cs
))
945 return set_lattice_contains_variable (dest_lat
);
947 for (src_val
= src_lat
->values
; src_val
; src_val
= src_val
->next
)
949 tree t
= ipa_get_jf_ancestor_result (jfunc
, src_val
->value
);
952 ret
|= add_value_to_lattice (dest_lat
, t
, cs
, src_val
, src_idx
);
954 ret
|= set_lattice_contains_variable (dest_lat
);
960 /* Propagate values across jump function JFUNC that is associated with edge CS
961 and put the values into DEST_LAT. */
964 propagate_accross_jump_function (struct cgraph_edge
*cs
,
965 struct ipa_jump_func
*jfunc
,
966 struct ipcp_lattice
*dest_lat
)
968 if (dest_lat
->bottom
)
971 if (jfunc
->type
== IPA_JF_CONST
972 || jfunc
->type
== IPA_JF_KNOWN_TYPE
)
976 if (jfunc
->type
== IPA_JF_KNOWN_TYPE
)
978 val
= ipa_value_from_known_type_jfunc (jfunc
);
980 return set_lattice_contains_variable (dest_lat
);
983 val
= ipa_get_jf_constant (jfunc
);
984 return add_value_to_lattice (dest_lat
, val
, cs
, NULL
, 0);
986 else if (jfunc
->type
== IPA_JF_PASS_THROUGH
987 || jfunc
->type
== IPA_JF_ANCESTOR
)
989 struct ipa_node_params
*caller_info
= IPA_NODE_REF (cs
->caller
);
990 struct ipcp_lattice
*src_lat
;
994 if (jfunc
->type
== IPA_JF_PASS_THROUGH
)
995 src_idx
= ipa_get_jf_pass_through_formal_id (jfunc
);
997 src_idx
= ipa_get_jf_ancestor_formal_id (jfunc
);
999 src_lat
= ipa_get_lattice (caller_info
, src_idx
);
1000 if (src_lat
->bottom
)
1001 return set_lattice_contains_variable (dest_lat
);
1003 /* If we would need to clone the caller and cannot, do not propagate. */
1004 if (!ipcp_versionable_function_p (cs
->caller
)
1005 && (src_lat
->contains_variable
1006 || (src_lat
->values_count
> 1)))
1007 return set_lattice_contains_variable (dest_lat
);
1009 if (jfunc
->type
== IPA_JF_PASS_THROUGH
)
1010 ret
= propagate_vals_accross_pass_through (cs
, jfunc
, src_lat
,
1013 ret
= propagate_vals_accross_ancestor (cs
, jfunc
, src_lat
, dest_lat
,
1016 if (src_lat
->contains_variable
)
1017 ret
|= set_lattice_contains_variable (dest_lat
);
1022 /* TODO: We currently do not handle member method pointers in IPA-CP (we only
1023 use it for indirect inlining), we should propagate them too. */
1024 return set_lattice_contains_variable (dest_lat
);
1027 /* Propagate constants from the caller to the callee of CS. INFO describes the
1031 propagate_constants_accross_call (struct cgraph_edge
*cs
)
1033 struct ipa_node_params
*callee_info
;
1034 enum availability availability
;
1035 struct cgraph_node
*callee
, *alias_or_thunk
;
1036 struct ipa_edge_args
*args
;
1038 int i
, args_count
, parms_count
;
1040 callee
= cgraph_function_node (cs
->callee
, &availability
);
1041 if (!callee
->analyzed
)
1043 gcc_checking_assert (cgraph_function_with_gimple_body_p (callee
));
1044 callee_info
= IPA_NODE_REF (callee
);
1046 args
= IPA_EDGE_REF (cs
);
1047 args_count
= ipa_get_cs_argument_count (args
);
1048 parms_count
= ipa_get_param_count (callee_info
);
1050 /* If this call goes through a thunk we must not propagate to the first (0th)
1051 parameter. However, we might need to uncover a thunk from below a series
1052 of aliases first. */
1053 alias_or_thunk
= cs
->callee
;
1054 while (alias_or_thunk
->alias
)
1055 alias_or_thunk
= cgraph_alias_aliased_node (alias_or_thunk
);
1056 if (alias_or_thunk
->thunk
.thunk_p
)
1058 ret
|= set_lattice_contains_variable (ipa_get_lattice (callee_info
, 0));
1064 for (; (i
< args_count
) && (i
< parms_count
); i
++)
1066 struct ipa_jump_func
*jump_func
= ipa_get_ith_jump_func (args
, i
);
1067 struct ipcp_lattice
*dest_lat
= ipa_get_lattice (callee_info
, i
);
1069 if (availability
== AVAIL_OVERWRITABLE
)
1070 ret
|= set_lattice_contains_variable (dest_lat
);
1072 ret
|= propagate_accross_jump_function (cs
, jump_func
, dest_lat
);
1074 for (; i
< parms_count
; i
++)
1075 ret
|= set_lattice_contains_variable (ipa_get_lattice (callee_info
, i
));
1080 /* If an indirect edge IE can be turned into a direct one based on KNOWN_VALS
1081 (which can contain both constants and binfos) or KNOWN_BINFOS (which can be
1082 NULL) return the destination. */
1085 ipa_get_indirect_edge_target (struct cgraph_edge
*ie
,
1086 VEC (tree
, heap
) *known_vals
,
1087 VEC (tree
, heap
) *known_binfos
,
1088 VEC (ipa_agg_jump_function_p
, heap
) *known_aggs
)
1090 int param_index
= ie
->indirect_info
->param_index
;
1091 HOST_WIDE_INT token
, anc_offset
;
1095 if (param_index
== -1)
1098 if (!ie
->indirect_info
->polymorphic
)
1102 if (ie
->indirect_info
->agg_contents
)
1104 if (VEC_length (ipa_agg_jump_function_p
, known_aggs
)
1105 > (unsigned int) param_index
)
1107 struct ipa_agg_jump_function
*agg
;
1108 agg
= VEC_index (ipa_agg_jump_function_p
, known_aggs
,
1110 t
= ipa_find_agg_cst_for_param (agg
, ie
->indirect_info
->offset
,
1111 ie
->indirect_info
->by_ref
);
1117 t
= (VEC_length (tree
, known_vals
) > (unsigned int) param_index
1118 ? VEC_index (tree
, known_vals
, param_index
) : NULL
);
1121 TREE_CODE (t
) == ADDR_EXPR
1122 && TREE_CODE (TREE_OPERAND (t
, 0)) == FUNCTION_DECL
)
1123 return TREE_OPERAND (t
, 0);
1128 gcc_assert (!ie
->indirect_info
->agg_contents
);
1129 token
= ie
->indirect_info
->otr_token
;
1130 anc_offset
= ie
->indirect_info
->offset
;
1131 otr_type
= ie
->indirect_info
->otr_type
;
1133 t
= VEC_index (tree
, known_vals
, param_index
);
1134 if (!t
&& known_binfos
1135 && VEC_length (tree
, known_binfos
) > (unsigned int) param_index
)
1136 t
= VEC_index (tree
, known_binfos
, param_index
);
1140 if (TREE_CODE (t
) != TREE_BINFO
)
1143 binfo
= gimple_extract_devirt_binfo_from_cst (t
);
1146 binfo
= get_binfo_at_offset (binfo
, anc_offset
, otr_type
);
1149 return gimple_get_virt_method_for_binfo (token
, binfo
);
1155 binfo
= get_binfo_at_offset (t
, anc_offset
, otr_type
);
1158 return gimple_get_virt_method_for_binfo (token
, binfo
);
1162 /* Calculate devirtualization time bonus for NODE, assuming we know KNOWN_CSTS
1163 and KNOWN_BINFOS. */
1166 devirtualization_time_bonus (struct cgraph_node
*node
,
1167 VEC (tree
, heap
) *known_csts
,
1168 VEC (tree
, heap
) *known_binfos
)
1170 struct cgraph_edge
*ie
;
1173 for (ie
= node
->indirect_calls
; ie
; ie
= ie
->next_callee
)
1175 struct cgraph_node
*callee
;
1176 struct inline_summary
*isummary
;
1179 target
= ipa_get_indirect_edge_target (ie
, known_csts
, known_binfos
,
1184 /* Only bare minimum benefit for clearly un-inlineable targets. */
1186 callee
= cgraph_get_node (target
);
1187 if (!callee
|| !callee
->analyzed
)
1189 isummary
= inline_summary (callee
);
1190 if (!isummary
->inlinable
)
1193 /* FIXME: The values below need re-considering and perhaps also
1194 integrating into the cost metrics, at lest in some very basic way. */
1195 if (isummary
->size
<= MAX_INLINE_INSNS_AUTO
/ 4)
1197 else if (isummary
->size
<= MAX_INLINE_INSNS_AUTO
/ 2)
1199 else if (isummary
->size
<= MAX_INLINE_INSNS_AUTO
1200 || DECL_DECLARED_INLINE_P (callee
->symbol
.decl
))
1207 /* Return true if cloning NODE is a good idea, given the estimated TIME_BENEFIT
1208 and SIZE_COST and with the sum of frequencies of incoming edges to the
1209 potential new clone in FREQUENCIES. */
1212 good_cloning_opportunity_p (struct cgraph_node
*node
, int time_benefit
,
1213 int freq_sum
, gcov_type count_sum
, int size_cost
)
1215 if (time_benefit
== 0
1216 || !flag_ipa_cp_clone
1217 || !optimize_function_for_speed_p (DECL_STRUCT_FUNCTION (node
->symbol
.decl
)))
1220 gcc_assert (size_cost
> 0);
1224 int factor
= (count_sum
* 1000) / max_count
;
1225 HOST_WIDEST_INT evaluation
= (((HOST_WIDEST_INT
) time_benefit
* factor
)
1228 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1229 fprintf (dump_file
, " good_cloning_opportunity_p (time: %i, "
1230 "size: %i, count_sum: " HOST_WIDE_INT_PRINT_DEC
1231 ") -> evaluation: " HOST_WIDEST_INT_PRINT_DEC
1232 ", threshold: %i\n",
1233 time_benefit
, size_cost
, (HOST_WIDE_INT
) count_sum
,
1236 return evaluation
>= PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD
);
1240 HOST_WIDEST_INT evaluation
= (((HOST_WIDEST_INT
) time_benefit
* freq_sum
)
1243 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1244 fprintf (dump_file
, " good_cloning_opportunity_p (time: %i, "
1245 "size: %i, freq_sum: %i) -> evaluation: "
1246 HOST_WIDEST_INT_PRINT_DEC
", threshold: %i\n",
1247 time_benefit
, size_cost
, freq_sum
, evaluation
,
1248 CGRAPH_FREQ_BASE
/2);
1250 return evaluation
>= PARAM_VALUE (PARAM_IPA_CP_EVAL_THRESHOLD
);
1255 /* Allocate KNOWN_CSTS and KNOWN_BINFOS and populate them with values of
1256 parameters that are known independent of the context. INFO describes the
1257 function. If REMOVABLE_PARAMS_COST is non-NULL, the movement cost of all
1258 removable parameters will be stored in it. */
1261 gather_context_independent_values (struct ipa_node_params
*info
,
1262 VEC (tree
, heap
) **known_csts
,
1263 VEC (tree
, heap
) **known_binfos
,
1264 int *removable_params_cost
)
1266 int i
, count
= ipa_get_param_count (info
);
1270 *known_binfos
= NULL
;
1271 VEC_safe_grow_cleared (tree
, heap
, *known_csts
, count
);
1272 VEC_safe_grow_cleared (tree
, heap
, *known_binfos
, count
);
1274 if (removable_params_cost
)
1275 *removable_params_cost
= 0;
1277 for (i
= 0; i
< count
; i
++)
1279 struct ipcp_lattice
*lat
= ipa_get_lattice (info
, i
);
1281 if (ipa_lat_is_single_const (lat
))
1283 struct ipcp_value
*val
= lat
->values
;
1284 if (TREE_CODE (val
->value
) != TREE_BINFO
)
1286 VEC_replace (tree
, *known_csts
, i
, val
->value
);
1287 if (removable_params_cost
)
1288 *removable_params_cost
1289 += estimate_move_cost (TREE_TYPE (val
->value
));
1292 else if (lat
->virt_call
)
1294 VEC_replace (tree
, *known_binfos
, i
, val
->value
);
1297 else if (removable_params_cost
1298 && !ipa_is_param_used (info
, i
))
1299 *removable_params_cost
1300 += estimate_move_cost (TREE_TYPE (ipa_get_param (info
, i
)));
1302 else if (removable_params_cost
1303 && !ipa_is_param_used (info
, i
))
1304 *removable_params_cost
1305 += estimate_move_cost (TREE_TYPE (ipa_get_param (info
, i
)));
1311 /* Iterate over known values of parameters of NODE and estimate the local
1312 effects in terms of time and size they have. */
1315 estimate_local_effects (struct cgraph_node
*node
)
1317 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
1318 int i
, count
= ipa_get_param_count (info
);
1319 VEC (tree
, heap
) *known_csts
, *known_binfos
;
1321 int base_time
= inline_summary (node
)->time
;
1322 int removable_params_cost
;
1324 if (!count
|| !ipcp_versionable_function_p (node
))
1327 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1328 fprintf (dump_file
, "\nEstimating effects for %s/%i, base_time: %i.\n",
1329 cgraph_node_name (node
), node
->uid
, base_time
);
1331 always_const
= gather_context_independent_values (info
, &known_csts
,
1333 &removable_params_cost
);
1336 struct caller_statistics stats
;
1339 init_caller_stats (&stats
);
1340 cgraph_for_node_and_aliases (node
, gather_caller_stats
, &stats
, false);
1341 estimate_ipcp_clone_size_and_time (node
, known_csts
, known_binfos
,
1343 time
-= devirtualization_time_bonus (node
, known_csts
, known_binfos
);
1344 time
-= removable_params_cost
;
1345 size
-= stats
.n_calls
* removable_params_cost
;
1348 fprintf (dump_file
, " - context independent values, size: %i, "
1349 "time_benefit: %i\n", size
, base_time
- time
);
1352 || cgraph_will_be_removed_from_program_if_no_direct_calls (node
))
1354 info
->clone_for_all_contexts
= true;
1358 fprintf (dump_file
, " Decided to specialize for all "
1359 "known contexts, code not going to grow.\n");
1361 else if (good_cloning_opportunity_p (node
, base_time
- time
,
1362 stats
.freq_sum
, stats
.count_sum
,
1365 if (size
+ overall_size
<= max_new_size
)
1367 info
->clone_for_all_contexts
= true;
1369 overall_size
+= size
;
1372 fprintf (dump_file
, " Decided to specialize for all "
1373 "known contexts, growth deemed beneficial.\n");
1375 else if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1376 fprintf (dump_file
, " Not cloning for all contexts because "
1377 "max_new_size would be reached with %li.\n",
1378 size
+ overall_size
);
1382 for (i
= 0; i
< count
; i
++)
1384 struct ipcp_lattice
*lat
= ipa_get_lattice (info
, i
);
1385 struct ipcp_value
*val
;
1390 || VEC_index (tree
, known_csts
, i
)
1391 || VEC_index (tree
, known_binfos
, i
))
1394 for (val
= lat
->values
; val
; val
= val
->next
)
1396 int time
, size
, time_benefit
;
1398 if (TREE_CODE (val
->value
) != TREE_BINFO
)
1400 VEC_replace (tree
, known_csts
, i
, val
->value
);
1401 VEC_replace (tree
, known_binfos
, i
, NULL_TREE
);
1402 emc
= estimate_move_cost (TREE_TYPE (val
->value
));
1404 else if (lat
->virt_call
)
1406 VEC_replace (tree
, known_csts
, i
, NULL_TREE
);
1407 VEC_replace (tree
, known_binfos
, i
, val
->value
);
1413 estimate_ipcp_clone_size_and_time (node
, known_csts
, known_binfos
,
1415 time_benefit
= base_time
- time
1416 + devirtualization_time_bonus (node
, known_csts
, known_binfos
)
1417 + removable_params_cost
+ emc
;
1419 gcc_checking_assert (size
>=0);
1420 /* The inliner-heuristics based estimates may think that in certain
1421 contexts some functions do not have any size at all but we want
1422 all specializations to have at least a tiny cost, not least not to
1427 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1429 fprintf (dump_file
, " - estimates for value ");
1430 print_ipcp_constant_value (dump_file
, val
->value
);
1431 fprintf (dump_file
, " for parameter ");
1432 print_generic_expr (dump_file
, ipa_get_param (info
, i
), 0);
1433 fprintf (dump_file
, ": time_benefit: %i, size: %i\n",
1434 time_benefit
, size
);
1437 val
->local_time_benefit
= time_benefit
;
1438 val
->local_size_cost
= size
;
1442 VEC_free (tree
, heap
, known_csts
);
1443 VEC_free (tree
, heap
, known_binfos
);
1447 /* Add value CUR_VAL and all yet-unsorted values it is dependent on to the
1448 topological sort of values. */
1451 add_val_to_toposort (struct ipcp_value
*cur_val
)
1453 static int dfs_counter
= 0;
1454 static struct ipcp_value
*stack
;
1455 struct ipcp_value_source
*src
;
1461 cur_val
->dfs
= dfs_counter
;
1462 cur_val
->low_link
= dfs_counter
;
1464 cur_val
->topo_next
= stack
;
1466 cur_val
->on_stack
= true;
1468 for (src
= cur_val
->sources
; src
; src
= src
->next
)
1471 if (src
->val
->dfs
== 0)
1473 add_val_to_toposort (src
->val
);
1474 if (src
->val
->low_link
< cur_val
->low_link
)
1475 cur_val
->low_link
= src
->val
->low_link
;
1477 else if (src
->val
->on_stack
1478 && src
->val
->dfs
< cur_val
->low_link
)
1479 cur_val
->low_link
= src
->val
->dfs
;
1482 if (cur_val
->dfs
== cur_val
->low_link
)
1484 struct ipcp_value
*v
, *scc_list
= NULL
;
1489 stack
= v
->topo_next
;
1490 v
->on_stack
= false;
1492 v
->scc_next
= scc_list
;
1495 while (v
!= cur_val
);
1497 cur_val
->topo_next
= values_topo
;
1498 values_topo
= cur_val
;
1502 /* Add all values in lattices associated with NODE to the topological sort if
1503 they are not there yet. */
1506 add_all_node_vals_to_toposort (struct cgraph_node
*node
)
1508 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
1509 int i
, count
= ipa_get_param_count (info
);
1511 for (i
= 0; i
< count
; i
++)
1513 struct ipcp_lattice
*lat
= ipa_get_lattice (info
, i
);
1514 struct ipcp_value
*val
;
1516 if (lat
->bottom
|| !lat
->values
)
1518 for (val
= lat
->values
; val
; val
= val
->next
)
1519 add_val_to_toposort (val
);
1523 /* One pass of constants propagation along the call graph edges, from callers
1524 to callees (requires topological ordering in TOPO), iterate over strongly
1525 connected components. */
1528 propagate_constants_topo (struct topo_info
*topo
)
1532 for (i
= topo
->nnodes
- 1; i
>= 0; i
--)
1534 struct cgraph_node
*v
, *node
= topo
->order
[i
];
1535 struct ipa_dfs_info
*node_dfs_info
;
1537 if (!cgraph_function_with_gimple_body_p (node
))
1540 node_dfs_info
= (struct ipa_dfs_info
*) node
->symbol
.aux
;
1541 /* First, iteratively propagate within the strongly connected component
1542 until all lattices stabilize. */
1543 v
= node_dfs_info
->next_cycle
;
1546 push_node_to_stack (topo
, v
);
1547 v
= ((struct ipa_dfs_info
*) v
->symbol
.aux
)->next_cycle
;
1553 struct cgraph_edge
*cs
;
1555 for (cs
= v
->callees
; cs
; cs
= cs
->next_callee
)
1556 if (edge_within_scc (cs
)
1557 && propagate_constants_accross_call (cs
))
1558 push_node_to_stack (topo
, cs
->callee
);
1559 v
= pop_node_from_stack (topo
);
1562 /* Afterwards, propagate along edges leading out of the SCC, calculates
1563 the local effects of the discovered constants and all valid values to
1564 their topological sort. */
1568 struct cgraph_edge
*cs
;
1570 estimate_local_effects (v
);
1571 add_all_node_vals_to_toposort (v
);
1572 for (cs
= v
->callees
; cs
; cs
= cs
->next_callee
)
1573 if (!edge_within_scc (cs
))
1574 propagate_constants_accross_call (cs
);
1576 v
= ((struct ipa_dfs_info
*) v
->symbol
.aux
)->next_cycle
;
1582 /* Return the sum of A and B if none of them is bigger than INT_MAX/2, return
1583 the bigger one if otherwise. */
1586 safe_add (int a
, int b
)
1588 if (a
> INT_MAX
/2 || b
> INT_MAX
/2)
1589 return a
> b
? a
: b
;
1595 /* Propagate the estimated effects of individual values along the topological
1596 from the dependent values to those they depend on. */
1599 propagate_effects (void)
1601 struct ipcp_value
*base
;
1603 for (base
= values_topo
; base
; base
= base
->topo_next
)
1605 struct ipcp_value_source
*src
;
1606 struct ipcp_value
*val
;
1607 int time
= 0, size
= 0;
1609 for (val
= base
; val
; val
= val
->scc_next
)
1611 time
= safe_add (time
,
1612 val
->local_time_benefit
+ val
->prop_time_benefit
);
1613 size
= safe_add (size
, val
->local_size_cost
+ val
->prop_size_cost
);
1616 for (val
= base
; val
; val
= val
->scc_next
)
1617 for (src
= val
->sources
; src
; src
= src
->next
)
1619 && cgraph_maybe_hot_edge_p (src
->cs
))
1621 src
->val
->prop_time_benefit
= safe_add (time
,
1622 src
->val
->prop_time_benefit
);
1623 src
->val
->prop_size_cost
= safe_add (size
,
1624 src
->val
->prop_size_cost
);
1630 /* Propagate constants, binfos and their effects from the summaries
1631 interprocedurally. */
1634 ipcp_propagate_stage (struct topo_info
*topo
)
1636 struct cgraph_node
*node
;
1639 fprintf (dump_file
, "\n Propagating constants:\n\n");
1642 ipa_update_after_lto_read ();
1645 FOR_EACH_DEFINED_FUNCTION (node
)
1647 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
1649 determine_versionability (node
);
1650 if (cgraph_function_with_gimple_body_p (node
))
1652 info
->lattices
= XCNEWVEC (struct ipcp_lattice
,
1653 ipa_get_param_count (info
));
1654 initialize_node_lattices (node
);
1656 if (node
->count
> max_count
)
1657 max_count
= node
->count
;
1658 overall_size
+= inline_summary (node
)->self_size
;
1661 max_new_size
= overall_size
;
1662 if (max_new_size
< PARAM_VALUE (PARAM_LARGE_UNIT_INSNS
))
1663 max_new_size
= PARAM_VALUE (PARAM_LARGE_UNIT_INSNS
);
1664 max_new_size
+= max_new_size
* PARAM_VALUE (PARAM_IPCP_UNIT_GROWTH
) / 100 + 1;
1667 fprintf (dump_file
, "\noverall_size: %li, max_new_size: %li\n",
1668 overall_size
, max_new_size
);
1670 propagate_constants_topo (topo
);
1671 #ifdef ENABLE_CHECKING
1672 ipcp_verify_propagated_values ();
1674 propagate_effects ();
1678 fprintf (dump_file
, "\nIPA lattices after all propagation:\n");
1679 print_all_lattices (dump_file
, (dump_flags
& TDF_DETAILS
), true);
1683 /* Discover newly direct outgoing edges from NODE which is a new clone with
1684 known KNOWN_VALS and make them direct. */
1687 ipcp_discover_new_direct_edges (struct cgraph_node
*node
,
1688 VEC (tree
, heap
) *known_vals
)
1690 struct cgraph_edge
*ie
, *next_ie
;
1692 for (ie
= node
->indirect_calls
; ie
; ie
= next_ie
)
1696 next_ie
= ie
->next_callee
;
1697 target
= ipa_get_indirect_edge_target (ie
, known_vals
, NULL
, NULL
);
1699 ipa_make_edge_direct_to_target (ie
, target
);
1703 /* Vector of pointers which for linked lists of clones of an original crgaph
1706 static VEC (cgraph_edge_p
, heap
) *next_edge_clone
;
1709 grow_next_edge_clone_vector (void)
1711 if (VEC_length (cgraph_edge_p
, next_edge_clone
)
1712 <= (unsigned) cgraph_edge_max_uid
)
1713 VEC_safe_grow_cleared (cgraph_edge_p
, heap
, next_edge_clone
,
1714 cgraph_edge_max_uid
+ 1);
1717 /* Edge duplication hook to grow the appropriate linked list in
1721 ipcp_edge_duplication_hook (struct cgraph_edge
*src
, struct cgraph_edge
*dst
,
1722 __attribute__((unused
)) void *data
)
1724 grow_next_edge_clone_vector ();
1725 VEC_replace (cgraph_edge_p
, next_edge_clone
, dst
->uid
,
1726 VEC_index (cgraph_edge_p
, next_edge_clone
, src
->uid
));
1727 VEC_replace (cgraph_edge_p
, next_edge_clone
, src
->uid
, dst
);
1730 /* Get the next clone in the linked list of clones of an edge. */
1732 static inline struct cgraph_edge
*
1733 get_next_cgraph_edge_clone (struct cgraph_edge
*cs
)
1735 return VEC_index (cgraph_edge_p
, next_edge_clone
, cs
->uid
);
1738 /* Return true if edge CS does bring about the value described by SRC. */
1741 cgraph_edge_brings_value_p (struct cgraph_edge
*cs
,
1742 struct ipcp_value_source
*src
)
1744 struct ipa_node_params
*caller_info
= IPA_NODE_REF (cs
->caller
);
1746 if (IPA_NODE_REF (cs
->callee
)->ipcp_orig_node
1747 || caller_info
->node_dead
)
1752 if (caller_info
->ipcp_orig_node
)
1754 tree t
= VEC_index (tree
, caller_info
->known_vals
, src
->index
);
1755 return (t
!= NULL_TREE
1756 && values_equal_for_ipcp_p (src
->val
->value
, t
));
1760 struct ipcp_lattice
*lat
= ipa_get_lattice (caller_info
, src
->index
);
1761 if (ipa_lat_is_single_const (lat
)
1762 && values_equal_for_ipcp_p (src
->val
->value
, lat
->values
->value
))
1769 /* Given VAL, iterate over all its sources and if they still hold, add their
1770 edge frequency and their number into *FREQUENCY and *CALLER_COUNT
1774 get_info_about_necessary_edges (struct ipcp_value
*val
, int *freq_sum
,
1775 gcov_type
*count_sum
, int *caller_count
)
1777 struct ipcp_value_source
*src
;
1778 int freq
= 0, count
= 0;
1782 for (src
= val
->sources
; src
; src
= src
->next
)
1784 struct cgraph_edge
*cs
= src
->cs
;
1787 if (cgraph_edge_brings_value_p (cs
, src
))
1790 freq
+= cs
->frequency
;
1792 hot
|= cgraph_maybe_hot_edge_p (cs
);
1794 cs
= get_next_cgraph_edge_clone (cs
);
1800 *caller_count
= count
;
1804 /* Return a vector of incoming edges that do bring value VAL. It is assumed
1805 their number is known and equal to CALLER_COUNT. */
1807 static VEC (cgraph_edge_p
,heap
) *
1808 gather_edges_for_value (struct ipcp_value
*val
, int caller_count
)
1810 struct ipcp_value_source
*src
;
1811 VEC (cgraph_edge_p
,heap
) *ret
;
1813 ret
= VEC_alloc (cgraph_edge_p
, heap
, caller_count
);
1814 for (src
= val
->sources
; src
; src
= src
->next
)
1816 struct cgraph_edge
*cs
= src
->cs
;
1819 if (cgraph_edge_brings_value_p (cs
, src
))
1820 VEC_quick_push (cgraph_edge_p
, ret
, cs
);
1821 cs
= get_next_cgraph_edge_clone (cs
);
1828 /* Construct a replacement map for a know VALUE for a formal parameter PARAM.
1829 Return it or NULL if for some reason it cannot be created. */
1831 static struct ipa_replace_map
*
1832 get_replacement_map (tree value
, tree parm
)
1834 tree req_type
= TREE_TYPE (parm
);
1835 struct ipa_replace_map
*replace_map
;
1837 if (!useless_type_conversion_p (req_type
, TREE_TYPE (value
)))
1839 if (fold_convertible_p (req_type
, value
))
1840 value
= fold_build1 (NOP_EXPR
, req_type
, value
);
1841 else if (TYPE_SIZE (req_type
) == TYPE_SIZE (TREE_TYPE (value
)))
1842 value
= fold_build1 (VIEW_CONVERT_EXPR
, req_type
, value
);
1847 fprintf (dump_file
, " const ");
1848 print_generic_expr (dump_file
, value
, 0);
1849 fprintf (dump_file
, " can't be converted to param ");
1850 print_generic_expr (dump_file
, parm
, 0);
1851 fprintf (dump_file
, "\n");
1857 replace_map
= ggc_alloc_ipa_replace_map ();
1860 fprintf (dump_file
, " replacing param ");
1861 print_generic_expr (dump_file
, parm
, 0);
1862 fprintf (dump_file
, " with const ");
1863 print_generic_expr (dump_file
, value
, 0);
1864 fprintf (dump_file
, "\n");
1866 replace_map
->old_tree
= parm
;
1867 replace_map
->new_tree
= value
;
1868 replace_map
->replace_p
= true;
1869 replace_map
->ref_p
= false;
1874 /* Dump new profiling counts */
1877 dump_profile_updates (struct cgraph_node
*orig_node
,
1878 struct cgraph_node
*new_node
)
1880 struct cgraph_edge
*cs
;
1882 fprintf (dump_file
, " setting count of the specialized node to "
1883 HOST_WIDE_INT_PRINT_DEC
"\n", (HOST_WIDE_INT
) new_node
->count
);
1884 for (cs
= new_node
->callees
; cs
; cs
= cs
->next_callee
)
1885 fprintf (dump_file
, " edge to %s has count "
1886 HOST_WIDE_INT_PRINT_DEC
"\n",
1887 cgraph_node_name (cs
->callee
), (HOST_WIDE_INT
) cs
->count
);
1889 fprintf (dump_file
, " setting count of the original node to "
1890 HOST_WIDE_INT_PRINT_DEC
"\n", (HOST_WIDE_INT
) orig_node
->count
);
1891 for (cs
= orig_node
->callees
; cs
; cs
= cs
->next_callee
)
1892 fprintf (dump_file
, " edge to %s is left with "
1893 HOST_WIDE_INT_PRINT_DEC
"\n",
1894 cgraph_node_name (cs
->callee
), (HOST_WIDE_INT
) cs
->count
);
1897 /* After a specialized NEW_NODE version of ORIG_NODE has been created, update
1898 their profile information to reflect this. */
1901 update_profiling_info (struct cgraph_node
*orig_node
,
1902 struct cgraph_node
*new_node
)
1904 struct cgraph_edge
*cs
;
1905 struct caller_statistics stats
;
1906 gcov_type new_sum
, orig_sum
;
1907 gcov_type remainder
, orig_node_count
= orig_node
->count
;
1909 if (orig_node_count
== 0)
1912 init_caller_stats (&stats
);
1913 cgraph_for_node_and_aliases (orig_node
, gather_caller_stats
, &stats
, false);
1914 orig_sum
= stats
.count_sum
;
1915 init_caller_stats (&stats
);
1916 cgraph_for_node_and_aliases (new_node
, gather_caller_stats
, &stats
, false);
1917 new_sum
= stats
.count_sum
;
1919 if (orig_node_count
< orig_sum
+ new_sum
)
1922 fprintf (dump_file
, " Problem: node %s/%i has too low count "
1923 HOST_WIDE_INT_PRINT_DEC
" while the sum of incoming "
1924 "counts is " HOST_WIDE_INT_PRINT_DEC
"\n",
1925 cgraph_node_name (orig_node
), orig_node
->uid
,
1926 (HOST_WIDE_INT
) orig_node_count
,
1927 (HOST_WIDE_INT
) (orig_sum
+ new_sum
));
1929 orig_node_count
= (orig_sum
+ new_sum
) * 12 / 10;
1931 fprintf (dump_file
, " proceeding by pretending it was "
1932 HOST_WIDE_INT_PRINT_DEC
"\n",
1933 (HOST_WIDE_INT
) orig_node_count
);
1936 new_node
->count
= new_sum
;
1937 remainder
= orig_node_count
- new_sum
;
1938 orig_node
->count
= remainder
;
1940 for (cs
= new_node
->callees
; cs
; cs
= cs
->next_callee
)
1942 cs
->count
= cs
->count
* (new_sum
* REG_BR_PROB_BASE
1943 / orig_node_count
) / REG_BR_PROB_BASE
;
1947 for (cs
= orig_node
->callees
; cs
; cs
= cs
->next_callee
)
1948 cs
->count
= cs
->count
* (remainder
* REG_BR_PROB_BASE
1949 / orig_node_count
) / REG_BR_PROB_BASE
;
1952 dump_profile_updates (orig_node
, new_node
);
1955 /* Update the respective profile of specialized NEW_NODE and the original
1956 ORIG_NODE after additional edges with cumulative count sum REDIRECTED_SUM
1957 have been redirected to the specialized version. */
1960 update_specialized_profile (struct cgraph_node
*new_node
,
1961 struct cgraph_node
*orig_node
,
1962 gcov_type redirected_sum
)
1964 struct cgraph_edge
*cs
;
1965 gcov_type new_node_count
, orig_node_count
= orig_node
->count
;
1968 fprintf (dump_file
, " the sum of counts of redirected edges is "
1969 HOST_WIDE_INT_PRINT_DEC
"\n", (HOST_WIDE_INT
) redirected_sum
);
1970 if (orig_node_count
== 0)
1973 gcc_assert (orig_node_count
>= redirected_sum
);
1975 new_node_count
= new_node
->count
;
1976 new_node
->count
+= redirected_sum
;
1977 orig_node
->count
-= redirected_sum
;
1979 for (cs
= new_node
->callees
; cs
; cs
= cs
->next_callee
)
1981 cs
->count
+= cs
->count
* redirected_sum
/ new_node_count
;
1985 for (cs
= orig_node
->callees
; cs
; cs
= cs
->next_callee
)
1987 gcov_type dec
= cs
->count
* (redirected_sum
* REG_BR_PROB_BASE
1988 / orig_node_count
) / REG_BR_PROB_BASE
;
1989 if (dec
< cs
->count
)
1996 dump_profile_updates (orig_node
, new_node
);
1999 /* Create a specialized version of NODE with known constants and types of
2000 parameters in KNOWN_VALS and redirect all edges in CALLERS to it. */
2002 static struct cgraph_node
*
2003 create_specialized_node (struct cgraph_node
*node
,
2004 VEC (tree
, heap
) *known_vals
,
2005 VEC (cgraph_edge_p
,heap
) *callers
)
2007 struct ipa_node_params
*new_info
, *info
= IPA_NODE_REF (node
);
2008 VEC (ipa_replace_map_p
,gc
)* replace_trees
= NULL
;
2009 struct cgraph_node
*new_node
;
2010 int i
, count
= ipa_get_param_count (info
);
2011 bitmap args_to_skip
;
2013 gcc_assert (!info
->ipcp_orig_node
);
2015 if (node
->local
.can_change_signature
)
2017 args_to_skip
= BITMAP_GGC_ALLOC ();
2018 for (i
= 0; i
< count
; i
++)
2020 tree t
= VEC_index (tree
, known_vals
, i
);
2022 if ((t
&& TREE_CODE (t
) != TREE_BINFO
)
2023 || !ipa_is_param_used (info
, i
))
2024 bitmap_set_bit (args_to_skip
, i
);
2029 args_to_skip
= NULL
;
2030 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2031 fprintf (dump_file
, " cannot change function signature\n");
2034 for (i
= 0; i
< count
; i
++)
2036 tree t
= VEC_index (tree
, known_vals
, i
);
2037 if (t
&& TREE_CODE (t
) != TREE_BINFO
)
2039 struct ipa_replace_map
*replace_map
;
2041 replace_map
= get_replacement_map (t
, ipa_get_param (info
, i
));
2043 VEC_safe_push (ipa_replace_map_p
, gc
, replace_trees
, replace_map
);
2047 new_node
= cgraph_create_virtual_clone (node
, callers
, replace_trees
,
2048 args_to_skip
, "constprop");
2049 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2050 fprintf (dump_file
, " the new node is %s/%i.\n",
2051 cgraph_node_name (new_node
), new_node
->uid
);
2052 gcc_checking_assert (ipa_node_params_vector
2053 && (VEC_length (ipa_node_params_t
,
2054 ipa_node_params_vector
)
2055 > (unsigned) cgraph_max_uid
));
2056 update_profiling_info (node
, new_node
);
2057 new_info
= IPA_NODE_REF (new_node
);
2058 new_info
->ipcp_orig_node
= node
;
2059 new_info
->known_vals
= known_vals
;
2061 ipcp_discover_new_direct_edges (new_node
, known_vals
);
2063 VEC_free (cgraph_edge_p
, heap
, callers
);
2067 /* Given a NODE, and a subset of its CALLERS, try to populate blanks slots in
2068 KNOWN_VALS with constants and types that are also known for all of the
2072 find_more_values_for_callers_subset (struct cgraph_node
*node
,
2073 VEC (tree
, heap
) *known_vals
,
2074 VEC (cgraph_edge_p
,heap
) *callers
)
2076 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
2077 int i
, count
= ipa_get_param_count (info
);
2079 for (i
= 0; i
< count
; i
++)
2081 struct cgraph_edge
*cs
;
2082 tree newval
= NULL_TREE
;
2085 if (ipa_get_lattice (info
, i
)->bottom
2086 || VEC_index (tree
, known_vals
, i
))
2089 FOR_EACH_VEC_ELT (cgraph_edge_p
, callers
, j
, cs
)
2091 struct ipa_jump_func
*jump_func
;
2094 if (i
>= ipa_get_cs_argument_count (IPA_EDGE_REF (cs
)))
2099 jump_func
= ipa_get_ith_jump_func (IPA_EDGE_REF (cs
), i
);
2100 t
= ipa_value_from_jfunc (IPA_NODE_REF (cs
->caller
), jump_func
);
2103 && !values_equal_for_ipcp_p (t
, newval
)))
2114 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2116 fprintf (dump_file
, " adding an extra known value ");
2117 print_ipcp_constant_value (dump_file
, newval
);
2118 fprintf (dump_file
, " for parameter ");
2119 print_generic_expr (dump_file
, ipa_get_param (info
, i
), 0);
2120 fprintf (dump_file
, "\n");
2123 VEC_replace (tree
, known_vals
, i
, newval
);
2128 /* Given an original NODE and a VAL for which we have already created a
2129 specialized clone, look whether there are incoming edges that still lead
2130 into the old node but now also bring the requested value and also conform to
2131 all other criteria such that they can be redirected the the special node.
2132 This function can therefore redirect the final edge in a SCC. */
2135 perhaps_add_new_callers (struct cgraph_node
*node
, struct ipcp_value
*val
)
2137 struct ipa_node_params
*dest_info
= IPA_NODE_REF (val
->spec_node
);
2138 struct ipcp_value_source
*src
;
2139 int count
= ipa_get_param_count (dest_info
);
2140 gcov_type redirected_sum
= 0;
2142 for (src
= val
->sources
; src
; src
= src
->next
)
2144 struct cgraph_edge
*cs
= src
->cs
;
2147 enum availability availability
;
2148 bool insufficient
= false;
2150 if (cgraph_function_node (cs
->callee
, &availability
) == node
2151 && availability
> AVAIL_OVERWRITABLE
2152 && cgraph_edge_brings_value_p (cs
, src
))
2154 struct ipa_node_params
*caller_info
;
2155 struct ipa_edge_args
*args
;
2158 caller_info
= IPA_NODE_REF (cs
->caller
);
2159 args
= IPA_EDGE_REF (cs
);
2160 for (i
= 0; i
< count
; i
++)
2162 struct ipa_jump_func
*jump_func
;
2165 val
= VEC_index (tree
, dest_info
->known_vals
, i
);
2169 if (i
>= ipa_get_cs_argument_count (args
))
2171 insufficient
= true;
2174 jump_func
= ipa_get_ith_jump_func (args
, i
);
2175 t
= ipa_value_from_jfunc (caller_info
, jump_func
);
2176 if (!t
|| !values_equal_for_ipcp_p (val
, t
))
2178 insufficient
= true;
2186 fprintf (dump_file
, " - adding an extra caller %s/%i"
2188 xstrdup (cgraph_node_name (cs
->caller
)),
2190 xstrdup (cgraph_node_name (val
->spec_node
)),
2191 val
->spec_node
->uid
);
2193 cgraph_redirect_edge_callee (cs
, val
->spec_node
);
2194 redirected_sum
+= cs
->count
;
2197 cs
= get_next_cgraph_edge_clone (cs
);
2202 update_specialized_profile (val
->spec_node
, node
, redirected_sum
);
2206 /* Copy KNOWN_BINFOS to KNOWN_VALS. */
2209 move_binfos_to_values (VEC (tree
, heap
) *known_vals
,
2210 VEC (tree
, heap
) *known_binfos
)
2215 for (i
= 0; VEC_iterate (tree
, known_binfos
, i
, t
); i
++)
2217 VEC_replace (tree
, known_vals
, i
, t
);
2221 /* Decide whether and what specialized clones of NODE should be created. */
2224 decide_whether_version_node (struct cgraph_node
*node
)
2226 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
2227 int i
, count
= ipa_get_param_count (info
);
2228 VEC (tree
, heap
) *known_csts
, *known_binfos
;
2234 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2235 fprintf (dump_file
, "\nEvaluating opportunities for %s/%i.\n",
2236 cgraph_node_name (node
), node
->uid
);
2238 gather_context_independent_values (info
, &known_csts
, &known_binfos
,
2241 for (i
= 0; i
< count
; i
++)
2243 struct ipcp_lattice
*lat
= ipa_get_lattice (info
, i
);
2244 struct ipcp_value
*val
;
2247 || VEC_index (tree
, known_csts
, i
)
2248 || VEC_index (tree
, known_binfos
, i
))
2251 for (val
= lat
->values
; val
; val
= val
->next
)
2253 int freq_sum
, caller_count
;
2254 gcov_type count_sum
;
2255 VEC (cgraph_edge_p
, heap
) *callers
;
2256 VEC (tree
, heap
) *kv
;
2260 perhaps_add_new_callers (node
, val
);
2263 else if (val
->local_size_cost
+ overall_size
> max_new_size
)
2265 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2266 fprintf (dump_file
, " Ignoring candidate value because "
2267 "max_new_size would be reached with %li.\n",
2268 val
->local_size_cost
+ overall_size
);
2271 else if (!get_info_about_necessary_edges (val
, &freq_sum
, &count_sum
,
2275 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2277 fprintf (dump_file
, " - considering value ");
2278 print_ipcp_constant_value (dump_file
, val
->value
);
2279 fprintf (dump_file
, " for parameter ");
2280 print_generic_expr (dump_file
, ipa_get_param (info
, i
), 0);
2281 fprintf (dump_file
, " (caller_count: %i)\n", caller_count
);
2285 if (!good_cloning_opportunity_p (node
, val
->local_time_benefit
,
2286 freq_sum
, count_sum
,
2287 val
->local_size_cost
)
2288 && !good_cloning_opportunity_p (node
,
2289 val
->local_time_benefit
2290 + val
->prop_time_benefit
,
2291 freq_sum
, count_sum
,
2292 val
->local_size_cost
2293 + val
->prop_size_cost
))
2297 fprintf (dump_file
, " Creating a specialized node of %s/%i.\n",
2298 cgraph_node_name (node
), node
->uid
);
2300 callers
= gather_edges_for_value (val
, caller_count
);
2301 kv
= VEC_copy (tree
, heap
, known_csts
);
2302 move_binfos_to_values (kv
, known_binfos
);
2303 VEC_replace (tree
, kv
, i
, val
->value
);
2304 find_more_values_for_callers_subset (node
, kv
, callers
);
2305 val
->spec_node
= create_specialized_node (node
, kv
, callers
);
2306 overall_size
+= val
->local_size_cost
;
2307 info
= IPA_NODE_REF (node
);
2309 /* TODO: If for some lattice there is only one other known value
2310 left, make a special node for it too. */
2313 VEC_replace (tree
, kv
, i
, val
->value
);
2317 if (info
->clone_for_all_contexts
)
2319 VEC (cgraph_edge_p
, heap
) *callers
;
2322 fprintf (dump_file
, " - Creating a specialized node of %s/%i "
2323 "for all known contexts.\n", cgraph_node_name (node
),
2326 callers
= collect_callers_of_node (node
);
2327 move_binfos_to_values (known_csts
, known_binfos
);
2328 create_specialized_node (node
, known_csts
, callers
);
2329 info
= IPA_NODE_REF (node
);
2330 info
->clone_for_all_contexts
= false;
2334 VEC_free (tree
, heap
, known_csts
);
2336 VEC_free (tree
, heap
, known_binfos
);
2340 /* Transitively mark all callees of NODE within the same SCC as not dead. */
2343 spread_undeadness (struct cgraph_node
*node
)
2345 struct cgraph_edge
*cs
;
2347 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
2348 if (edge_within_scc (cs
))
2350 struct cgraph_node
*callee
;
2351 struct ipa_node_params
*info
;
2353 callee
= cgraph_function_node (cs
->callee
, NULL
);
2354 info
= IPA_NODE_REF (callee
);
2356 if (info
->node_dead
)
2358 info
->node_dead
= 0;
2359 spread_undeadness (callee
);
2364 /* Return true if NODE has a caller from outside of its SCC that is not
2365 dead. Worker callback for cgraph_for_node_and_aliases. */
2368 has_undead_caller_from_outside_scc_p (struct cgraph_node
*node
,
2369 void *data ATTRIBUTE_UNUSED
)
2371 struct cgraph_edge
*cs
;
2373 for (cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
2374 if (cs
->caller
->thunk
.thunk_p
2375 && cgraph_for_node_and_aliases (cs
->caller
,
2376 has_undead_caller_from_outside_scc_p
,
2379 else if (!edge_within_scc (cs
)
2380 && !IPA_NODE_REF (cs
->caller
)->node_dead
)
2386 /* Identify nodes within the same SCC as NODE which are no longer needed
2387 because of new clones and will be removed as unreachable. */
2390 identify_dead_nodes (struct cgraph_node
*node
)
2392 struct cgraph_node
*v
;
2393 for (v
= node
; v
; v
= ((struct ipa_dfs_info
*) v
->symbol
.aux
)->next_cycle
)
2394 if (cgraph_will_be_removed_from_program_if_no_direct_calls (v
)
2395 && !cgraph_for_node_and_aliases (v
,
2396 has_undead_caller_from_outside_scc_p
,
2398 IPA_NODE_REF (v
)->node_dead
= 1;
2400 for (v
= node
; v
; v
= ((struct ipa_dfs_info
*) v
->symbol
.aux
)->next_cycle
)
2401 if (!IPA_NODE_REF (v
)->node_dead
)
2402 spread_undeadness (v
);
2404 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2406 for (v
= node
; v
; v
= ((struct ipa_dfs_info
*) v
->symbol
.aux
)->next_cycle
)
2407 if (IPA_NODE_REF (v
)->node_dead
)
2408 fprintf (dump_file
, " Marking node as dead: %s/%i.\n",
2409 cgraph_node_name (v
), v
->uid
);
2413 /* The decision stage. Iterate over the topological order of call graph nodes
2414 TOPO and make specialized clones if deemed beneficial. */
2417 ipcp_decision_stage (struct topo_info
*topo
)
2422 fprintf (dump_file
, "\nIPA decision stage:\n\n");
2424 for (i
= topo
->nnodes
- 1; i
>= 0; i
--)
2426 struct cgraph_node
*node
= topo
->order
[i
];
2427 bool change
= false, iterate
= true;
2431 struct cgraph_node
*v
;
2433 for (v
= node
; v
; v
= ((struct ipa_dfs_info
*) v
->symbol
.aux
)->next_cycle
)
2434 if (cgraph_function_with_gimple_body_p (v
)
2435 && ipcp_versionable_function_p (v
))
2436 iterate
|= decide_whether_version_node (v
);
2441 identify_dead_nodes (node
);
2445 /* The IPCP driver. */
2450 struct cgraph_2edge_hook_list
*edge_duplication_hook_holder
;
2451 struct topo_info topo
;
2453 ipa_check_create_node_params ();
2454 ipa_check_create_edge_args ();
2455 grow_next_edge_clone_vector ();
2456 edge_duplication_hook_holder
=
2457 cgraph_add_edge_duplication_hook (&ipcp_edge_duplication_hook
, NULL
);
2458 ipcp_values_pool
= create_alloc_pool ("IPA-CP values",
2459 sizeof (struct ipcp_value
), 32);
2460 ipcp_sources_pool
= create_alloc_pool ("IPA-CP value sources",
2461 sizeof (struct ipcp_value_source
), 64);
2464 fprintf (dump_file
, "\nIPA structures before propagation:\n");
2465 if (dump_flags
& TDF_DETAILS
)
2466 ipa_print_all_params (dump_file
);
2467 ipa_print_all_jump_functions (dump_file
);
2470 /* Topological sort. */
2471 build_toporder_info (&topo
);
2472 /* Do the interprocedural propagation. */
2473 ipcp_propagate_stage (&topo
);
2474 /* Decide what constant propagation and cloning should be performed. */
2475 ipcp_decision_stage (&topo
);
2477 /* Free all IPCP structures. */
2478 free_toporder_info (&topo
);
2479 VEC_free (cgraph_edge_p
, heap
, next_edge_clone
);
2480 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder
);
2481 ipa_free_all_structures_after_ipa_cp ();
2483 fprintf (dump_file
, "\nIPA constant propagation end\n");
2487 /* Initialization and computation of IPCP data structures. This is the initial
2488 intraprocedural analysis of functions, which gathers information to be
2489 propagated later on. */
2492 ipcp_generate_summary (void)
2494 struct cgraph_node
*node
;
2497 fprintf (dump_file
, "\nIPA constant propagation start:\n");
2498 ipa_register_cgraph_hooks ();
2500 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node
)
2502 node
->local
.versionable
2503 = tree_versionable_function_p (node
->symbol
.decl
);
2504 ipa_analyze_node (node
);
2508 /* Write ipcp summary for nodes in SET. */
2511 ipcp_write_summary (void)
2513 ipa_prop_write_jump_functions ();
2516 /* Read ipcp summary. */
2519 ipcp_read_summary (void)
2521 ipa_prop_read_jump_functions ();
2524 /* Gate for IPCP optimization. */
2527 cgraph_gate_cp (void)
2529 /* FIXME: We should remove the optimize check after we ensure we never run
2530 IPA passes when not optimizing. */
2531 return flag_ipa_cp
&& optimize
;
2534 struct ipa_opt_pass_d pass_ipa_cp
=
2539 cgraph_gate_cp
, /* gate */
2540 ipcp_driver
, /* execute */
2543 0, /* static_pass_number */
2544 TV_IPA_CONSTANT_PROP
, /* tv_id */
2545 0, /* properties_required */
2546 0, /* properties_provided */
2547 0, /* properties_destroyed */
2548 0, /* todo_flags_start */
2550 TODO_remove_functions
| TODO_ggc_collect
/* todo_flags_finish */
2552 ipcp_generate_summary
, /* generate_summary */
2553 ipcp_write_summary
, /* write_summary */
2554 ipcp_read_summary
, /* read_summary */
2555 NULL
, /* write_optimization_summary */
2556 NULL
, /* read_optimization_summary */
2557 NULL
, /* stmt_fixup */
2559 NULL
, /* function_transform */
2560 NULL
, /* variable_transform */