1 /* Interprocedural analyses.
2 Copyright (C) 2005, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
25 #include "langhooks.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-inline.h"
37 #include "diagnostic.h"
38 #include "tree-pretty-print.h"
39 #include "gimple-pretty-print.h"
40 #include "lto-streamer.h"
41 #include "data-streamer.h"
42 #include "tree-streamer.h"
45 /* Intermediate information about a parameter that is only useful during the
46 run of ipa_analyze_node and is not kept afterwards. */
48 struct param_analysis_info
51 bitmap visited_statements
;
54 /* Vector where the parameter infos are actually stored. */
55 VEC (ipa_node_params_t
, heap
) *ipa_node_params_vector
;
56 /* Vector where the parameter infos are actually stored. */
57 VEC (ipa_edge_args_t
, gc
) *ipa_edge_args_vector
;
59 /* Holders of ipa cgraph hooks: */
60 static struct cgraph_edge_hook_list
*edge_removal_hook_holder
;
61 static struct cgraph_node_hook_list
*node_removal_hook_holder
;
62 static struct cgraph_2edge_hook_list
*edge_duplication_hook_holder
;
63 static struct cgraph_2node_hook_list
*node_duplication_hook_holder
;
64 static struct cgraph_node_hook_list
*function_insertion_hook_holder
;
66 /* Return index of the formal whose tree is PTREE in function which corresponds
70 ipa_get_param_decl_index (struct ipa_node_params
*info
, tree ptree
)
74 count
= ipa_get_param_count (info
);
75 for (i
= 0; i
< count
; i
++)
76 if (ipa_get_param (info
, i
) == ptree
)
82 /* Populate the param_decl field in parameter descriptors of INFO that
83 corresponds to NODE. */
86 ipa_populate_param_decls (struct cgraph_node
*node
,
87 struct ipa_node_params
*info
)
95 fnargs
= DECL_ARGUMENTS (fndecl
);
97 for (parm
= fnargs
; parm
; parm
= DECL_CHAIN (parm
))
99 VEC_index (ipa_param_descriptor_t
,
100 info
->descriptors
, param_num
)->decl
= parm
;
105 /* Return how many formal parameters FNDECL has. */
108 count_formal_params (tree fndecl
)
113 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
119 /* Initialize the ipa_node_params structure associated with NODE by counting
120 the function parameters, creating the descriptors and populating their
124 ipa_initialize_node_params (struct cgraph_node
*node
)
126 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
128 if (!info
->descriptors
)
132 param_count
= count_formal_params (node
->decl
);
135 VEC_safe_grow_cleared (ipa_param_descriptor_t
, heap
,
136 info
->descriptors
, param_count
);
137 ipa_populate_param_decls (node
, info
);
142 /* Print the jump functions associated with call graph edge CS to file F. */
145 ipa_print_node_jump_functions_for_edge (FILE *f
, struct cgraph_edge
*cs
)
149 count
= ipa_get_cs_argument_count (IPA_EDGE_REF (cs
));
150 for (i
= 0; i
< count
; i
++)
152 struct ipa_jump_func
*jump_func
;
153 enum jump_func_type type
;
155 jump_func
= ipa_get_ith_jump_func (IPA_EDGE_REF (cs
), i
);
156 type
= jump_func
->type
;
158 fprintf (f
, " param %d: ", i
);
159 if (type
== IPA_JF_UNKNOWN
)
160 fprintf (f
, "UNKNOWN\n");
161 else if (type
== IPA_JF_KNOWN_TYPE
)
163 fprintf (f
, "KNOWN TYPE: base ");
164 print_generic_expr (f
, jump_func
->value
.known_type
.base_type
, 0);
165 fprintf (f
, ", offset "HOST_WIDE_INT_PRINT_DEC
", component ",
166 jump_func
->value
.known_type
.offset
);
167 print_generic_expr (f
, jump_func
->value
.known_type
.component_type
, 0);
170 else if (type
== IPA_JF_CONST
)
172 tree val
= jump_func
->value
.constant
;
173 fprintf (f
, "CONST: ");
174 print_generic_expr (f
, val
, 0);
175 if (TREE_CODE (val
) == ADDR_EXPR
176 && TREE_CODE (TREE_OPERAND (val
, 0)) == CONST_DECL
)
179 print_generic_expr (f
, DECL_INITIAL (TREE_OPERAND (val
, 0)),
184 else if (type
== IPA_JF_CONST_MEMBER_PTR
)
186 fprintf (f
, "CONST MEMBER PTR: ");
187 print_generic_expr (f
, jump_func
->value
.member_cst
.pfn
, 0);
189 print_generic_expr (f
, jump_func
->value
.member_cst
.delta
, 0);
192 else if (type
== IPA_JF_PASS_THROUGH
)
194 fprintf (f
, "PASS THROUGH: ");
195 fprintf (f
, "%d, op %s ",
196 jump_func
->value
.pass_through
.formal_id
,
198 jump_func
->value
.pass_through
.operation
]);
199 if (jump_func
->value
.pass_through
.operation
!= NOP_EXPR
)
200 print_generic_expr (f
,
201 jump_func
->value
.pass_through
.operand
, 0);
204 else if (type
== IPA_JF_ANCESTOR
)
206 fprintf (f
, "ANCESTOR: ");
207 fprintf (f
, "%d, offset "HOST_WIDE_INT_PRINT_DEC
", ",
208 jump_func
->value
.ancestor
.formal_id
,
209 jump_func
->value
.ancestor
.offset
);
210 print_generic_expr (f
, jump_func
->value
.ancestor
.type
, 0);
217 /* Print the jump functions of all arguments on all call graph edges going from
221 ipa_print_node_jump_functions (FILE *f
, struct cgraph_node
*node
)
223 struct cgraph_edge
*cs
;
226 fprintf (f
, " Jump functions of caller %s:\n", cgraph_node_name (node
));
227 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
229 if (!ipa_edge_args_info_available_for_edge_p (cs
))
232 fprintf (f
, " callsite %s/%i -> %s/%i : \n",
233 cgraph_node_name (node
), node
->uid
,
234 cgraph_node_name (cs
->callee
), cs
->callee
->uid
);
235 ipa_print_node_jump_functions_for_edge (f
, cs
);
238 for (cs
= node
->indirect_calls
, i
= 0; cs
; cs
= cs
->next_callee
, i
++)
240 if (!ipa_edge_args_info_available_for_edge_p (cs
))
245 fprintf (f
, " indirect callsite %d for stmt ", i
);
246 print_gimple_stmt (f
, cs
->call_stmt
, 0, TDF_SLIM
);
249 fprintf (f
, " indirect callsite %d :\n", i
);
250 ipa_print_node_jump_functions_for_edge (f
, cs
);
255 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
258 ipa_print_all_jump_functions (FILE *f
)
260 struct cgraph_node
*node
;
262 fprintf (f
, "\nJump functions:\n");
263 for (node
= cgraph_nodes
; node
; node
= node
->next
)
265 ipa_print_node_jump_functions (f
, node
);
269 /* Structure to be passed in between detect_type_change and
270 check_stmt_for_type_change. */
272 struct type_change_info
274 /* Offset into the object where there is the virtual method pointer we are
276 HOST_WIDE_INT offset
;
277 /* The declaration or SSA_NAME pointer of the base that we are checking for
280 /* If we actually can tell the type that the object has changed to, it is
281 stored in this field. Otherwise it remains NULL_TREE. */
282 tree known_current_type
;
283 /* Set to true if dynamic type change has been detected. */
284 bool type_maybe_changed
;
285 /* Set to true if multiple types have been encountered. known_current_type
286 must be disregarded in that case. */
287 bool multiple_types_encountered
;
290 /* Return true if STMT can modify a virtual method table pointer.
292 This function makes special assumptions about both constructors and
293 destructors which are all the functions that are allowed to alter the VMT
294 pointers. It assumes that destructors begin with assignment into all VMT
295 pointers and that constructors essentially look in the following way:
297 1) The very first thing they do is that they call constructors of ancestor
298 sub-objects that have them.
300 2) Then VMT pointers of this and all its ancestors is set to new values
301 corresponding to the type corresponding to the constructor.
303 3) Only afterwards, other stuff such as constructor of member sub-objects
304 and the code written by the user is run. Only this may include calling
305 virtual functions, directly or indirectly.
307 There is no way to call a constructor of an ancestor sub-object in any
310 This means that we do not have to care whether constructors get the correct
311 type information because they will always change it (in fact, if we define
312 the type to be given by the VMT pointer, it is undefined).
314 The most important fact to derive from the above is that if, for some
315 statement in the section 3, we try to detect whether the dynamic type has
316 changed, we can safely ignore all calls as we examine the function body
317 backwards until we reach statements in section 2 because these calls cannot
318 be ancestor constructors or destructors (if the input is not bogus) and so
319 do not change the dynamic type (this holds true only for automatically
320 allocated objects but at the moment we devirtualize only these). We then
321 must detect that statements in section 2 change the dynamic type and can try
322 to derive the new type. That is enough and we can stop, we will never see
323 the calls into constructors of sub-objects in this code. Therefore we can
324 safely ignore all call statements that we traverse.
328 stmt_may_be_vtbl_ptr_store (gimple stmt
)
330 if (is_gimple_call (stmt
))
332 else if (is_gimple_assign (stmt
))
334 tree lhs
= gimple_assign_lhs (stmt
);
336 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs
)))
338 if (flag_strict_aliasing
339 && !POINTER_TYPE_P (TREE_TYPE (lhs
)))
342 if (TREE_CODE (lhs
) == COMPONENT_REF
343 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
345 /* In the future we might want to use get_base_ref_and_offset to find
346 if there is a field corresponding to the offset and if so, proceed
347 almost like if it was a component ref. */
353 /* If STMT can be proved to be an assignment to the virtual method table
354 pointer of ANALYZED_OBJ and the type associated with the new table
355 identified, return the type. Otherwise return NULL_TREE. */
358 extr_type_from_vtbl_ptr_store (gimple stmt
, struct type_change_info
*tci
)
360 HOST_WIDE_INT offset
, size
, max_size
;
363 if (!gimple_assign_single_p (stmt
))
366 lhs
= gimple_assign_lhs (stmt
);
367 rhs
= gimple_assign_rhs1 (stmt
);
368 if (TREE_CODE (lhs
) != COMPONENT_REF
369 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1))
370 || TREE_CODE (rhs
) != ADDR_EXPR
)
372 rhs
= get_base_address (TREE_OPERAND (rhs
, 0));
374 || TREE_CODE (rhs
) != VAR_DECL
375 || !DECL_VIRTUAL_P (rhs
))
378 base
= get_ref_base_and_extent (lhs
, &offset
, &size
, &max_size
);
379 if (offset
!= tci
->offset
380 || size
!= POINTER_SIZE
381 || max_size
!= POINTER_SIZE
)
383 if (TREE_CODE (base
) == MEM_REF
)
385 if (TREE_CODE (tci
->object
) != MEM_REF
386 || TREE_OPERAND (tci
->object
, 0) != TREE_OPERAND (base
, 0)
387 || !tree_int_cst_equal (TREE_OPERAND (tci
->object
, 1),
388 TREE_OPERAND (base
, 1)))
391 else if (tci
->object
!= base
)
394 return DECL_CONTEXT (rhs
);
397 /* Callback of walk_aliased_vdefs and a helper function for
398 detect_type_change to check whether a particular statement may modify
399 the virtual table pointer, and if possible also determine the new type of
400 the (sub-)object. It stores its result into DATA, which points to a
401 type_change_info structure. */
404 check_stmt_for_type_change (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef
, void *data
)
406 gimple stmt
= SSA_NAME_DEF_STMT (vdef
);
407 struct type_change_info
*tci
= (struct type_change_info
*) data
;
409 if (stmt_may_be_vtbl_ptr_store (stmt
))
412 type
= extr_type_from_vtbl_ptr_store (stmt
, tci
);
413 if (tci
->type_maybe_changed
414 && type
!= tci
->known_current_type
)
415 tci
->multiple_types_encountered
= true;
416 tci
->known_current_type
= type
;
417 tci
->type_maybe_changed
= true;
426 /* Like detect_type_change but with extra argument COMP_TYPE which will become
427 the component type part of new JFUNC of dynamic type change is detected and
428 the new base type is identified. */
431 detect_type_change_1 (tree arg
, tree base
, tree comp_type
, gimple call
,
432 struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
)
434 struct type_change_info tci
;
437 gcc_checking_assert (DECL_P (arg
)
438 || TREE_CODE (arg
) == MEM_REF
439 || handled_component_p (arg
));
440 /* Const calls cannot call virtual methods through VMT and so type changes do
442 if (!flag_devirtualize
|| !gimple_vuse (call
))
448 ao
.size
= POINTER_SIZE
;
449 ao
.max_size
= ao
.size
;
450 ao
.ref_alias_set
= -1;
451 ao
.base_alias_set
= -1;
454 tci
.object
= get_base_address (arg
);
455 tci
.known_current_type
= NULL_TREE
;
456 tci
.type_maybe_changed
= false;
457 tci
.multiple_types_encountered
= false;
459 walk_aliased_vdefs (&ao
, gimple_vuse (call
), check_stmt_for_type_change
,
461 if (!tci
.type_maybe_changed
)
464 if (!tci
.known_current_type
465 || tci
.multiple_types_encountered
467 jfunc
->type
= IPA_JF_UNKNOWN
;
470 jfunc
->type
= IPA_JF_KNOWN_TYPE
;
471 jfunc
->value
.known_type
.base_type
= tci
.known_current_type
;
472 jfunc
->value
.known_type
.component_type
= comp_type
;
478 /* Detect whether the dynamic type of ARG has changed (before callsite CALL) by
479 looking for assignments to its virtual table pointer. If it is, return true
480 and fill in the jump function JFUNC with relevant type information or set it
481 to unknown. ARG is the object itself (not a pointer to it, unless
482 dereferenced). BASE is the base of the memory access as returned by
483 get_ref_base_and_extent, as is the offset. */
486 detect_type_change (tree arg
, tree base
, gimple call
,
487 struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
)
489 return detect_type_change_1 (arg
, base
, TREE_TYPE (arg
), call
, jfunc
, offset
);
492 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
493 SSA name (its dereference will become the base and the offset is assumed to
497 detect_type_change_ssa (tree arg
, gimple call
, struct ipa_jump_func
*jfunc
)
501 gcc_checking_assert (TREE_CODE (arg
) == SSA_NAME
);
502 if (!flag_devirtualize
503 || !POINTER_TYPE_P (TREE_TYPE (arg
))
504 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != RECORD_TYPE
)
507 comp_type
= TREE_TYPE (TREE_TYPE (arg
));
508 arg
= build2 (MEM_REF
, ptr_type_node
, arg
,
509 build_int_cst (ptr_type_node
, 0));
511 return detect_type_change_1 (arg
, arg
, comp_type
, call
, jfunc
, 0);
514 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
515 boolean variable pointed to by DATA. */
518 mark_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
521 bool *b
= (bool *) data
;
526 /* Return true if the formal parameter PARM might have been modified in this
527 function before reaching the statement STMT. PARM_AINFO is a pointer to a
528 structure containing temporary information about PARM. */
531 is_parm_modified_before_stmt (struct param_analysis_info
*parm_ainfo
,
532 gimple stmt
, tree parm
)
534 bool modified
= false;
537 if (parm_ainfo
->modified
)
540 gcc_checking_assert (gimple_vuse (stmt
) != NULL_TREE
);
541 ao_ref_init (&refd
, parm
);
542 walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
543 &modified
, &parm_ainfo
->visited_statements
);
546 parm_ainfo
->modified
= true;
552 /* If STMT is an assignment that loads a value from an parameter declaration,
553 return the index of the parameter in ipa_node_params which has not been
554 modified. Otherwise return -1. */
557 load_from_unmodified_param (struct ipa_node_params
*info
,
558 struct param_analysis_info
*parms_ainfo
,
564 if (!gimple_assign_single_p (stmt
))
567 op1
= gimple_assign_rhs1 (stmt
);
568 if (TREE_CODE (op1
) != PARM_DECL
)
571 index
= ipa_get_param_decl_index (info
, op1
);
573 || is_parm_modified_before_stmt (&parms_ainfo
[index
], stmt
, op1
))
579 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
580 of an assignment statement STMT, try to determine whether we are actually
581 handling any of the following cases and construct an appropriate jump
582 function into JFUNC if so:
584 1) The passed value is loaded from a formal parameter which is not a gimple
585 register (most probably because it is addressable, the value has to be
586 scalar) and we can guarantee the value has not changed. This case can
587 therefore be described by a simple pass-through jump function. For example:
596 2) The passed value can be described by a simple arithmetic pass-through
603 D.2064_4 = a.1(D) + 4;
606 This case can also occur in combination of the previous one, e.g.:
614 D.2064_4 = a.0_3 + 4;
617 3) The passed value is an address of an object within another one (which
618 also passed by reference). Such situations are described by an ancestor
619 jump function and describe situations such as:
621 B::foo() (struct B * const this)
625 D.1845_2 = &this_1(D)->D.1748;
628 INFO is the structure describing individual parameters access different
629 stages of IPA optimizations. PARMS_AINFO contains the information that is
630 only needed for intraprocedural analysis. */
633 compute_complex_assign_jump_func (struct ipa_node_params
*info
,
634 struct param_analysis_info
*parms_ainfo
,
635 struct ipa_jump_func
*jfunc
,
636 gimple call
, gimple stmt
, tree name
)
638 HOST_WIDE_INT offset
, size
, max_size
;
639 tree op1
, tc_ssa
, base
, ssa
;
642 op1
= gimple_assign_rhs1 (stmt
);
644 if (TREE_CODE (op1
) == SSA_NAME
)
646 if (SSA_NAME_IS_DEFAULT_DEF (op1
))
647 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (op1
));
649 index
= load_from_unmodified_param (info
, parms_ainfo
,
650 SSA_NAME_DEF_STMT (op1
));
655 index
= load_from_unmodified_param (info
, parms_ainfo
, stmt
);
656 tc_ssa
= gimple_assign_lhs (stmt
);
661 tree op2
= gimple_assign_rhs2 (stmt
);
665 if (!is_gimple_ip_invariant (op2
)
666 || (TREE_CODE_CLASS (gimple_expr_code (stmt
)) != tcc_comparison
667 && !useless_type_conversion_p (TREE_TYPE (name
),
671 jfunc
->type
= IPA_JF_PASS_THROUGH
;
672 jfunc
->value
.pass_through
.formal_id
= index
;
673 jfunc
->value
.pass_through
.operation
= gimple_assign_rhs_code (stmt
);
674 jfunc
->value
.pass_through
.operand
= op2
;
676 else if (gimple_assign_single_p (stmt
)
677 && !detect_type_change_ssa (tc_ssa
, call
, jfunc
))
679 jfunc
->type
= IPA_JF_PASS_THROUGH
;
680 jfunc
->value
.pass_through
.formal_id
= index
;
681 jfunc
->value
.pass_through
.operation
= NOP_EXPR
;
686 if (TREE_CODE (op1
) != ADDR_EXPR
)
688 op1
= TREE_OPERAND (op1
, 0);
689 if (TREE_CODE (TREE_TYPE (op1
)) != RECORD_TYPE
)
691 base
= get_ref_base_and_extent (op1
, &offset
, &size
, &max_size
);
692 if (TREE_CODE (base
) != MEM_REF
693 /* If this is a varying address, punt. */
697 offset
+= mem_ref_offset (base
).low
* BITS_PER_UNIT
;
698 ssa
= TREE_OPERAND (base
, 0);
699 if (TREE_CODE (ssa
) != SSA_NAME
700 || !SSA_NAME_IS_DEFAULT_DEF (ssa
)
704 /* Dynamic types are changed only in constructors and destructors and */
705 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (ssa
));
707 && !detect_type_change (op1
, base
, call
, jfunc
, offset
))
709 jfunc
->type
= IPA_JF_ANCESTOR
;
710 jfunc
->value
.ancestor
.formal_id
= index
;
711 jfunc
->value
.ancestor
.offset
= offset
;
712 jfunc
->value
.ancestor
.type
= TREE_TYPE (op1
);
716 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
719 iftmp.1_3 = &obj_2(D)->D.1762;
721 The base of the MEM_REF must be a default definition SSA NAME of a
722 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
723 whole MEM_REF expression is returned and the offset calculated from any
724 handled components and the MEM_REF itself is stored into *OFFSET. The whole
725 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
728 get_ancestor_addr_info (gimple assign
, tree
*obj_p
, HOST_WIDE_INT
*offset
)
730 HOST_WIDE_INT size
, max_size
;
731 tree expr
, parm
, obj
;
733 if (!gimple_assign_single_p (assign
))
735 expr
= gimple_assign_rhs1 (assign
);
737 if (TREE_CODE (expr
) != ADDR_EXPR
)
739 expr
= TREE_OPERAND (expr
, 0);
741 expr
= get_ref_base_and_extent (expr
, offset
, &size
, &max_size
);
743 if (TREE_CODE (expr
) != MEM_REF
744 /* If this is a varying address, punt. */
749 parm
= TREE_OPERAND (expr
, 0);
750 if (TREE_CODE (parm
) != SSA_NAME
751 || !SSA_NAME_IS_DEFAULT_DEF (parm
)
752 || TREE_CODE (SSA_NAME_VAR (parm
)) != PARM_DECL
)
755 *offset
+= mem_ref_offset (expr
).low
* BITS_PER_UNIT
;
761 /* Given that an actual argument is an SSA_NAME that is a result of a phi
762 statement PHI, try to find out whether NAME is in fact a
763 multiple-inheritance typecast from a descendant into an ancestor of a formal
764 parameter and thus can be described by an ancestor jump function and if so,
765 write the appropriate function into JFUNC.
767 Essentially we want to match the following pattern:
775 iftmp.1_3 = &obj_2(D)->D.1762;
778 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
779 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
783 compute_complex_ancestor_jump_func (struct ipa_node_params
*info
,
784 struct ipa_jump_func
*jfunc
,
785 gimple call
, gimple phi
)
787 HOST_WIDE_INT offset
;
789 basic_block phi_bb
, assign_bb
, cond_bb
;
790 tree tmp
, parm
, expr
, obj
;
793 if (gimple_phi_num_args (phi
) != 2)
796 if (integer_zerop (PHI_ARG_DEF (phi
, 1)))
797 tmp
= PHI_ARG_DEF (phi
, 0);
798 else if (integer_zerop (PHI_ARG_DEF (phi
, 0)))
799 tmp
= PHI_ARG_DEF (phi
, 1);
802 if (TREE_CODE (tmp
) != SSA_NAME
803 || SSA_NAME_IS_DEFAULT_DEF (tmp
)
804 || !POINTER_TYPE_P (TREE_TYPE (tmp
))
805 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp
))) != RECORD_TYPE
)
808 assign
= SSA_NAME_DEF_STMT (tmp
);
809 assign_bb
= gimple_bb (assign
);
810 if (!single_pred_p (assign_bb
))
812 expr
= get_ancestor_addr_info (assign
, &obj
, &offset
);
815 parm
= TREE_OPERAND (expr
, 0);
816 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (parm
));
817 gcc_assert (index
>= 0);
819 cond_bb
= single_pred (assign_bb
);
820 cond
= last_stmt (cond_bb
);
822 || gimple_code (cond
) != GIMPLE_COND
823 || gimple_cond_code (cond
) != NE_EXPR
824 || gimple_cond_lhs (cond
) != parm
825 || !integer_zerop (gimple_cond_rhs (cond
)))
828 phi_bb
= gimple_bb (phi
);
829 for (i
= 0; i
< 2; i
++)
831 basic_block pred
= EDGE_PRED (phi_bb
, i
)->src
;
832 if (pred
!= assign_bb
&& pred
!= cond_bb
)
836 if (!detect_type_change (obj
, expr
, call
, jfunc
, offset
))
838 jfunc
->type
= IPA_JF_ANCESTOR
;
839 jfunc
->value
.ancestor
.formal_id
= index
;
840 jfunc
->value
.ancestor
.offset
= offset
;
841 jfunc
->value
.ancestor
.type
= TREE_TYPE (obj
);
845 /* Given OP which is passed as an actual argument to a called function,
846 determine if it is possible to construct a KNOWN_TYPE jump function for it
847 and if so, create one and store it to JFUNC. */
850 compute_known_type_jump_func (tree op
, struct ipa_jump_func
*jfunc
,
853 HOST_WIDE_INT offset
, size
, max_size
;
856 if (!flag_devirtualize
857 || TREE_CODE (op
) != ADDR_EXPR
858 || TREE_CODE (TREE_TYPE (TREE_TYPE (op
))) != RECORD_TYPE
)
861 op
= TREE_OPERAND (op
, 0);
862 base
= get_ref_base_and_extent (op
, &offset
, &size
, &max_size
);
866 || TREE_CODE (TREE_TYPE (base
)) != RECORD_TYPE
867 || is_global_var (base
))
870 if (detect_type_change (op
, base
, call
, jfunc
, offset
)
871 || !TYPE_BINFO (TREE_TYPE (base
)))
874 jfunc
->type
= IPA_JF_KNOWN_TYPE
;
875 jfunc
->value
.known_type
.base_type
= TREE_TYPE (base
);
876 jfunc
->value
.known_type
.offset
= offset
;
877 jfunc
->value
.known_type
.component_type
= TREE_TYPE (op
);
881 /* Determine the jump functions of scalar arguments. Scalar means SSA names
882 and constants of a number of selected types. INFO is the ipa_node_params
883 structure associated with the caller, PARMS_AINFO describes state of
884 analysis with respect to individual formal parameters. ARGS is the
885 ipa_edge_args structure describing the callsite CALL which is the call
886 statement being examined.*/
889 compute_scalar_jump_functions (struct ipa_node_params
*info
,
890 struct param_analysis_info
*parms_ainfo
,
891 struct ipa_edge_args
*args
,
897 for (num
= 0; num
< gimple_call_num_args (call
); num
++)
899 struct ipa_jump_func
*jfunc
= ipa_get_ith_jump_func (args
, num
);
900 arg
= gimple_call_arg (call
, num
);
902 if (is_gimple_ip_invariant (arg
))
904 jfunc
->type
= IPA_JF_CONST
;
905 jfunc
->value
.constant
= arg
;
907 else if (TREE_CODE (arg
) == SSA_NAME
)
909 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
911 int index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (arg
));
914 && !detect_type_change_ssa (arg
, call
, jfunc
))
916 jfunc
->type
= IPA_JF_PASS_THROUGH
;
917 jfunc
->value
.pass_through
.formal_id
= index
;
918 jfunc
->value
.pass_through
.operation
= NOP_EXPR
;
923 gimple stmt
= SSA_NAME_DEF_STMT (arg
);
924 if (is_gimple_assign (stmt
))
925 compute_complex_assign_jump_func (info
, parms_ainfo
, jfunc
,
927 else if (gimple_code (stmt
) == GIMPLE_PHI
)
928 compute_complex_ancestor_jump_func (info
, jfunc
, call
, stmt
);
932 compute_known_type_jump_func (arg
, jfunc
, call
);
936 /* Inspect the given TYPE and return true iff it has the same structure (the
937 same number of fields of the same types) as a C++ member pointer. If
938 METHOD_PTR and DELTA are non-NULL, store the trees representing the
939 corresponding fields there. */
942 type_like_member_ptr_p (tree type
, tree
*method_ptr
, tree
*delta
)
946 if (TREE_CODE (type
) != RECORD_TYPE
)
949 fld
= TYPE_FIELDS (type
);
950 if (!fld
|| !POINTER_TYPE_P (TREE_TYPE (fld
))
951 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld
))) != METHOD_TYPE
)
957 fld
= DECL_CHAIN (fld
);
958 if (!fld
|| INTEGRAL_TYPE_P (fld
))
963 if (DECL_CHAIN (fld
))
969 /* Go through arguments of the CALL and for every one that looks like a member
970 pointer, check whether it can be safely declared pass-through and if so,
971 mark that to the corresponding item of jump FUNCTIONS. Return true iff
972 there are non-pass-through member pointers within the arguments. INFO
973 describes formal parameters of the caller. PARMS_INFO is a pointer to a
974 vector containing intermediate information about each formal parameter. */
977 compute_pass_through_member_ptrs (struct ipa_node_params
*info
,
978 struct param_analysis_info
*parms_ainfo
,
979 struct ipa_edge_args
*args
,
982 bool undecided_members
= false;
986 for (num
= 0; num
< gimple_call_num_args (call
); num
++)
988 arg
= gimple_call_arg (call
, num
);
990 if (type_like_member_ptr_p (TREE_TYPE (arg
), NULL
, NULL
))
992 if (TREE_CODE (arg
) == PARM_DECL
)
994 int index
= ipa_get_param_decl_index (info
, arg
);
996 gcc_assert (index
>=0);
997 if (!is_parm_modified_before_stmt (&parms_ainfo
[index
], call
,
1000 struct ipa_jump_func
*jfunc
= ipa_get_ith_jump_func (args
,
1002 jfunc
->type
= IPA_JF_PASS_THROUGH
;
1003 jfunc
->value
.pass_through
.formal_id
= index
;
1004 jfunc
->value
.pass_through
.operation
= NOP_EXPR
;
1007 undecided_members
= true;
1010 undecided_members
= true;
1014 return undecided_members
;
1017 /* Simple function filling in a member pointer constant jump function (with PFN
1018 and DELTA as the constant value) into JFUNC. */
1021 fill_member_ptr_cst_jump_function (struct ipa_jump_func
*jfunc
,
1022 tree pfn
, tree delta
)
1024 jfunc
->type
= IPA_JF_CONST_MEMBER_PTR
;
1025 jfunc
->value
.member_cst
.pfn
= pfn
;
1026 jfunc
->value
.member_cst
.delta
= delta
;
1029 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1030 return the rhs of its defining statement. */
1033 get_ssa_def_if_simple_copy (tree rhs
)
1035 while (TREE_CODE (rhs
) == SSA_NAME
&& !SSA_NAME_IS_DEFAULT_DEF (rhs
))
1037 gimple def_stmt
= SSA_NAME_DEF_STMT (rhs
);
1039 if (gimple_assign_single_p (def_stmt
))
1040 rhs
= gimple_assign_rhs1 (def_stmt
);
1047 /* Traverse statements from CALL backwards, scanning whether the argument ARG
1048 which is a member pointer is filled in with constant values. If it is, fill
1049 the jump function JFUNC in appropriately. METHOD_FIELD and DELTA_FIELD are
1050 fields of the record type of the member pointer. To give an example, we
1051 look for a pattern looking like the following:
1053 D.2515.__pfn ={v} printStuff;
1054 D.2515.__delta ={v} 0;
1055 i_1 = doprinting (D.2515); */
1058 determine_cst_member_ptr (gimple call
, tree arg
, tree method_field
,
1059 tree delta_field
, struct ipa_jump_func
*jfunc
)
1061 gimple_stmt_iterator gsi
;
1062 tree method
= NULL_TREE
;
1063 tree delta
= NULL_TREE
;
1065 gsi
= gsi_for_stmt (call
);
1068 for (; !gsi_end_p (gsi
); gsi_prev (&gsi
))
1070 gimple stmt
= gsi_stmt (gsi
);
1073 if (!stmt_may_clobber_ref_p (stmt
, arg
))
1075 if (!gimple_assign_single_p (stmt
))
1078 lhs
= gimple_assign_lhs (stmt
);
1079 rhs
= gimple_assign_rhs1 (stmt
);
1081 if (TREE_CODE (lhs
) != COMPONENT_REF
1082 || TREE_OPERAND (lhs
, 0) != arg
)
1085 fld
= TREE_OPERAND (lhs
, 1);
1086 if (!method
&& fld
== method_field
)
1088 rhs
= get_ssa_def_if_simple_copy (rhs
);
1089 if (TREE_CODE (rhs
) == ADDR_EXPR
1090 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == FUNCTION_DECL
1091 && TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs
, 0))) == METHOD_TYPE
)
1093 method
= TREE_OPERAND (rhs
, 0);
1096 fill_member_ptr_cst_jump_function (jfunc
, rhs
, delta
);
1104 if (!delta
&& fld
== delta_field
)
1106 rhs
= get_ssa_def_if_simple_copy (rhs
);
1107 if (TREE_CODE (rhs
) == INTEGER_CST
)
1112 fill_member_ptr_cst_jump_function (jfunc
, rhs
, delta
);
1124 /* Go through the arguments of the CALL and for every member pointer within
1125 tries determine whether it is a constant. If it is, create a corresponding
1126 constant jump function in FUNCTIONS which is an array of jump functions
1127 associated with the call. */
1130 compute_cst_member_ptr_arguments (struct ipa_edge_args
*args
,
1134 tree arg
, method_field
, delta_field
;
1136 for (num
= 0; num
< gimple_call_num_args (call
); num
++)
1138 struct ipa_jump_func
*jfunc
= ipa_get_ith_jump_func (args
, num
);
1139 arg
= gimple_call_arg (call
, num
);
1141 if (jfunc
->type
== IPA_JF_UNKNOWN
1142 && type_like_member_ptr_p (TREE_TYPE (arg
), &method_field
,
1144 determine_cst_member_ptr (call
, arg
, method_field
, delta_field
, jfunc
);
1148 /* Compute jump function for all arguments of callsite CS and insert the
1149 information in the jump_functions array in the ipa_edge_args corresponding
1150 to this callsite. */
1153 ipa_compute_jump_functions_for_edge (struct param_analysis_info
*parms_ainfo
,
1154 struct cgraph_edge
*cs
)
1156 struct ipa_node_params
*info
= IPA_NODE_REF (cs
->caller
);
1157 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
1158 gimple call
= cs
->call_stmt
;
1159 int arg_num
= gimple_call_num_args (call
);
1161 if (arg_num
== 0 || args
->jump_functions
)
1163 VEC_safe_grow_cleared (ipa_jump_func_t
, gc
, args
->jump_functions
, arg_num
);
1165 /* We will deal with constants and SSA scalars first: */
1166 compute_scalar_jump_functions (info
, parms_ainfo
, args
, call
);
1168 /* Let's check whether there are any potential member pointers and if so,
1169 whether we can determine their functions as pass_through. */
1170 if (!compute_pass_through_member_ptrs (info
, parms_ainfo
, args
, call
))
1173 /* Finally, let's check whether we actually pass a new constant member
1175 compute_cst_member_ptr_arguments (args
, call
);
1178 /* Compute jump functions for all edges - both direct and indirect - outgoing
1179 from NODE. Also count the actual arguments in the process. */
1182 ipa_compute_jump_functions (struct cgraph_node
*node
,
1183 struct param_analysis_info
*parms_ainfo
)
1185 struct cgraph_edge
*cs
;
1187 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
1189 struct cgraph_node
*callee
= cgraph_function_or_thunk_node (cs
->callee
,
1191 /* We do not need to bother analyzing calls to unknown
1192 functions unless they may become known during lto/whopr. */
1193 if (!callee
->analyzed
&& !flag_lto
)
1195 ipa_compute_jump_functions_for_edge (parms_ainfo
, cs
);
1198 for (cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
1199 ipa_compute_jump_functions_for_edge (parms_ainfo
, cs
);
1202 /* If RHS looks like a rhs of a statement loading pfn from a member
1203 pointer formal parameter, return the parameter, otherwise return
1204 NULL. If USE_DELTA, then we look for a use of the delta field
1205 rather than the pfn. */
1208 ipa_get_member_ptr_load_param (tree rhs
, bool use_delta
)
1210 tree rec
, ref_field
, ref_offset
, fld
, fld_offset
, ptr_field
, delta_field
;
1212 if (TREE_CODE (rhs
) == COMPONENT_REF
)
1214 ref_field
= TREE_OPERAND (rhs
, 1);
1215 rhs
= TREE_OPERAND (rhs
, 0);
1218 ref_field
= NULL_TREE
;
1219 if (TREE_CODE (rhs
) != MEM_REF
)
1221 rec
= TREE_OPERAND (rhs
, 0);
1222 if (TREE_CODE (rec
) != ADDR_EXPR
)
1224 rec
= TREE_OPERAND (rec
, 0);
1225 if (TREE_CODE (rec
) != PARM_DECL
1226 || !type_like_member_ptr_p (TREE_TYPE (rec
), &ptr_field
, &delta_field
))
1229 ref_offset
= TREE_OPERAND (rhs
, 1);
1233 if (integer_nonzerop (ref_offset
))
1241 return ref_field
== fld
? rec
: NULL_TREE
;
1245 fld_offset
= byte_position (delta_field
);
1247 fld_offset
= byte_position (ptr_field
);
1249 return tree_int_cst_equal (ref_offset
, fld_offset
) ? rec
: NULL_TREE
;
1252 /* If STMT looks like a statement loading a value from a member pointer formal
1253 parameter, this function returns that parameter. */
1256 ipa_get_stmt_member_ptr_load_param (gimple stmt
, bool use_delta
)
1260 if (!gimple_assign_single_p (stmt
))
1263 rhs
= gimple_assign_rhs1 (stmt
);
1264 return ipa_get_member_ptr_load_param (rhs
, use_delta
);
1267 /* Returns true iff T is an SSA_NAME defined by a statement. */
1270 ipa_is_ssa_with_stmt_def (tree t
)
1272 if (TREE_CODE (t
) == SSA_NAME
1273 && !SSA_NAME_IS_DEFAULT_DEF (t
))
1279 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1280 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1281 indirect call graph edge. */
1283 static struct cgraph_edge
*
1284 ipa_note_param_call (struct cgraph_node
*node
, int param_index
, gimple stmt
)
1286 struct cgraph_edge
*cs
;
1288 cs
= cgraph_edge (node
, stmt
);
1289 cs
->indirect_info
->param_index
= param_index
;
1290 cs
->indirect_info
->anc_offset
= 0;
1291 cs
->indirect_info
->polymorphic
= 0;
1295 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1296 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1297 intermediate information about each formal parameter. Currently it checks
1298 whether the call calls a pointer that is a formal parameter and if so, the
1299 parameter is marked with the called flag and an indirect call graph edge
1300 describing the call is created. This is very simple for ordinary pointers
1301 represented in SSA but not-so-nice when it comes to member pointers. The
1302 ugly part of this function does nothing more than trying to match the
1303 pattern of such a call. An example of such a pattern is the gimple dump
1304 below, the call is on the last line:
1307 f$__delta_5 = f.__delta;
1308 f$__pfn_24 = f.__pfn;
1312 f$__delta_5 = MEM[(struct *)&f];
1313 f$__pfn_24 = MEM[(struct *)&f + 4B];
1315 and a few lines below:
1318 D.2496_3 = (int) f$__pfn_24;
1319 D.2497_4 = D.2496_3 & 1;
1326 D.2500_7 = (unsigned int) f$__delta_5;
1327 D.2501_8 = &S + D.2500_7;
1328 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1329 D.2503_10 = *D.2502_9;
1330 D.2504_12 = f$__pfn_24 + -1;
1331 D.2505_13 = (unsigned int) D.2504_12;
1332 D.2506_14 = D.2503_10 + D.2505_13;
1333 D.2507_15 = *D.2506_14;
1334 iftmp.11_16 = (String:: *) D.2507_15;
1337 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1338 D.2500_19 = (unsigned int) f$__delta_5;
1339 D.2508_20 = &S + D.2500_19;
1340 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1342 Such patterns are results of simple calls to a member pointer:
1344 int doprinting (int (MyString::* f)(int) const)
1346 MyString S ("somestring");
1353 ipa_analyze_indirect_call_uses (struct cgraph_node
*node
,
1354 struct ipa_node_params
*info
,
1355 struct param_analysis_info
*parms_ainfo
,
1356 gimple call
, tree target
)
1361 tree rec
, rec2
, cond
;
1364 basic_block bb
, virt_bb
, join
;
1366 if (SSA_NAME_IS_DEFAULT_DEF (target
))
1368 tree var
= SSA_NAME_VAR (target
);
1369 index
= ipa_get_param_decl_index (info
, var
);
1371 ipa_note_param_call (node
, index
, call
);
1375 /* Now we need to try to match the complex pattern of calling a member
1378 if (!POINTER_TYPE_P (TREE_TYPE (target
))
1379 || TREE_CODE (TREE_TYPE (TREE_TYPE (target
))) != METHOD_TYPE
)
1382 def
= SSA_NAME_DEF_STMT (target
);
1383 if (gimple_code (def
) != GIMPLE_PHI
)
1386 if (gimple_phi_num_args (def
) != 2)
1389 /* First, we need to check whether one of these is a load from a member
1390 pointer that is a parameter to this function. */
1391 n1
= PHI_ARG_DEF (def
, 0);
1392 n2
= PHI_ARG_DEF (def
, 1);
1393 if (!ipa_is_ssa_with_stmt_def (n1
) || !ipa_is_ssa_with_stmt_def (n2
))
1395 d1
= SSA_NAME_DEF_STMT (n1
);
1396 d2
= SSA_NAME_DEF_STMT (n2
);
1398 join
= gimple_bb (def
);
1399 if ((rec
= ipa_get_stmt_member_ptr_load_param (d1
, false)))
1401 if (ipa_get_stmt_member_ptr_load_param (d2
, false))
1404 bb
= EDGE_PRED (join
, 0)->src
;
1405 virt_bb
= gimple_bb (d2
);
1407 else if ((rec
= ipa_get_stmt_member_ptr_load_param (d2
, false)))
1409 bb
= EDGE_PRED (join
, 1)->src
;
1410 virt_bb
= gimple_bb (d1
);
1415 /* Second, we need to check that the basic blocks are laid out in the way
1416 corresponding to the pattern. */
1418 if (!single_pred_p (virt_bb
) || !single_succ_p (virt_bb
)
1419 || single_pred (virt_bb
) != bb
1420 || single_succ (virt_bb
) != join
)
1423 /* Third, let's see that the branching is done depending on the least
1424 significant bit of the pfn. */
1426 branch
= last_stmt (bb
);
1427 if (!branch
|| gimple_code (branch
) != GIMPLE_COND
)
1430 if ((gimple_cond_code (branch
) != NE_EXPR
1431 && gimple_cond_code (branch
) != EQ_EXPR
)
1432 || !integer_zerop (gimple_cond_rhs (branch
)))
1435 cond
= gimple_cond_lhs (branch
);
1436 if (!ipa_is_ssa_with_stmt_def (cond
))
1439 def
= SSA_NAME_DEF_STMT (cond
);
1440 if (!is_gimple_assign (def
)
1441 || gimple_assign_rhs_code (def
) != BIT_AND_EXPR
1442 || !integer_onep (gimple_assign_rhs2 (def
)))
1445 cond
= gimple_assign_rhs1 (def
);
1446 if (!ipa_is_ssa_with_stmt_def (cond
))
1449 def
= SSA_NAME_DEF_STMT (cond
);
1451 if (is_gimple_assign (def
)
1452 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
1454 cond
= gimple_assign_rhs1 (def
);
1455 if (!ipa_is_ssa_with_stmt_def (cond
))
1457 def
= SSA_NAME_DEF_STMT (cond
);
1460 rec2
= ipa_get_stmt_member_ptr_load_param (def
,
1461 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1462 == ptrmemfunc_vbit_in_delta
));
1467 index
= ipa_get_param_decl_index (info
, rec
);
1468 if (index
>= 0 && !is_parm_modified_before_stmt (&parms_ainfo
[index
],
1470 ipa_note_param_call (node
, index
, call
);
1475 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1476 object referenced in the expression is a formal parameter of the caller
1477 (described by INFO), create a call note for the statement. */
1480 ipa_analyze_virtual_call_uses (struct cgraph_node
*node
,
1481 struct ipa_node_params
*info
, gimple call
,
1484 struct cgraph_edge
*cs
;
1485 struct cgraph_indirect_call_info
*ii
;
1486 struct ipa_jump_func jfunc
;
1487 tree obj
= OBJ_TYPE_REF_OBJECT (target
);
1489 HOST_WIDE_INT anc_offset
;
1491 if (!flag_devirtualize
)
1494 if (TREE_CODE (obj
) != SSA_NAME
)
1497 if (SSA_NAME_IS_DEFAULT_DEF (obj
))
1499 if (TREE_CODE (SSA_NAME_VAR (obj
)) != PARM_DECL
)
1503 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (obj
));
1504 gcc_assert (index
>= 0);
1505 if (detect_type_change_ssa (obj
, call
, &jfunc
))
1510 gimple stmt
= SSA_NAME_DEF_STMT (obj
);
1513 expr
= get_ancestor_addr_info (stmt
, &obj
, &anc_offset
);
1516 index
= ipa_get_param_decl_index (info
,
1517 SSA_NAME_VAR (TREE_OPERAND (expr
, 0)));
1518 gcc_assert (index
>= 0);
1519 if (detect_type_change (obj
, expr
, call
, &jfunc
, anc_offset
))
1523 cs
= ipa_note_param_call (node
, index
, call
);
1524 ii
= cs
->indirect_info
;
1525 ii
->anc_offset
= anc_offset
;
1526 ii
->otr_token
= tree_low_cst (OBJ_TYPE_REF_TOKEN (target
), 1);
1527 ii
->otr_type
= TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (target
)));
1528 ii
->polymorphic
= 1;
1531 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1532 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
1533 containing intermediate information about each formal parameter. */
1536 ipa_analyze_call_uses (struct cgraph_node
*node
,
1537 struct ipa_node_params
*info
,
1538 struct param_analysis_info
*parms_ainfo
, gimple call
)
1540 tree target
= gimple_call_fn (call
);
1544 if (TREE_CODE (target
) == SSA_NAME
)
1545 ipa_analyze_indirect_call_uses (node
, info
, parms_ainfo
, call
, target
);
1546 else if (TREE_CODE (target
) == OBJ_TYPE_REF
)
1547 ipa_analyze_virtual_call_uses (node
, info
, call
, target
);
1551 /* Analyze the call statement STMT with respect to formal parameters (described
1552 in INFO) of caller given by NODE. Currently it only checks whether formal
1553 parameters are called. PARMS_AINFO is a pointer to a vector containing
1554 intermediate information about each formal parameter. */
1557 ipa_analyze_stmt_uses (struct cgraph_node
*node
, struct ipa_node_params
*info
,
1558 struct param_analysis_info
*parms_ainfo
, gimple stmt
)
1560 if (is_gimple_call (stmt
))
1561 ipa_analyze_call_uses (node
, info
, parms_ainfo
, stmt
);
1564 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
1565 If OP is a parameter declaration, mark it as used in the info structure
1569 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED
,
1570 tree op
, void *data
)
1572 struct ipa_node_params
*info
= (struct ipa_node_params
*) data
;
1574 op
= get_base_address (op
);
1576 && TREE_CODE (op
) == PARM_DECL
)
1578 int index
= ipa_get_param_decl_index (info
, op
);
1579 gcc_assert (index
>= 0);
1580 ipa_set_param_used (info
, index
, true);
1586 /* Scan the function body of NODE and inspect the uses of formal parameters.
1587 Store the findings in various structures of the associated ipa_node_params
1588 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
1589 vector containing intermediate information about each formal parameter. */
1592 ipa_analyze_params_uses (struct cgraph_node
*node
,
1593 struct param_analysis_info
*parms_ainfo
)
1595 tree decl
= node
->decl
;
1597 struct function
*func
;
1598 gimple_stmt_iterator gsi
;
1599 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
1602 if (ipa_get_param_count (info
) == 0 || info
->uses_analysis_done
)
1605 for (i
= 0; i
< ipa_get_param_count (info
); i
++)
1607 tree parm
= ipa_get_param (info
, i
);
1608 /* For SSA regs see if parameter is used. For non-SSA we compute
1609 the flag during modification analysis. */
1610 if (is_gimple_reg (parm
)
1611 && gimple_default_def (DECL_STRUCT_FUNCTION (node
->decl
), parm
))
1612 ipa_set_param_used (info
, i
, true);
1615 func
= DECL_STRUCT_FUNCTION (decl
);
1616 FOR_EACH_BB_FN (bb
, func
)
1618 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1620 gimple stmt
= gsi_stmt (gsi
);
1622 if (is_gimple_debug (stmt
))
1625 ipa_analyze_stmt_uses (node
, info
, parms_ainfo
, stmt
);
1626 walk_stmt_load_store_addr_ops (stmt
, info
,
1627 visit_ref_for_mod_analysis
,
1628 visit_ref_for_mod_analysis
,
1629 visit_ref_for_mod_analysis
);
1631 for (gsi
= gsi_start (phi_nodes (bb
)); !gsi_end_p (gsi
); gsi_next (&gsi
))
1632 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), info
,
1633 visit_ref_for_mod_analysis
,
1634 visit_ref_for_mod_analysis
,
1635 visit_ref_for_mod_analysis
);
1638 info
->uses_analysis_done
= 1;
1641 /* Initialize the array describing properties of of formal parameters
1642 of NODE, analyze their uses and compute jump functions associated
1643 with actual arguments of calls from within NODE. */
1646 ipa_analyze_node (struct cgraph_node
*node
)
1648 struct ipa_node_params
*info
;
1649 struct param_analysis_info
*parms_ainfo
;
1652 ipa_check_create_node_params ();
1653 ipa_check_create_edge_args ();
1654 info
= IPA_NODE_REF (node
);
1655 push_cfun (DECL_STRUCT_FUNCTION (node
->decl
));
1656 current_function_decl
= node
->decl
;
1657 ipa_initialize_node_params (node
);
1659 param_count
= ipa_get_param_count (info
);
1660 parms_ainfo
= XALLOCAVEC (struct param_analysis_info
, param_count
);
1661 memset (parms_ainfo
, 0, sizeof (struct param_analysis_info
) * param_count
);
1663 ipa_analyze_params_uses (node
, parms_ainfo
);
1664 ipa_compute_jump_functions (node
, parms_ainfo
);
1666 for (i
= 0; i
< param_count
; i
++)
1667 if (parms_ainfo
[i
].visited_statements
)
1668 BITMAP_FREE (parms_ainfo
[i
].visited_statements
);
1670 current_function_decl
= NULL
;
1675 /* Update the jump function DST when the call graph edge corresponding to SRC is
1676 is being inlined, knowing that DST is of type ancestor and src of known
1680 combine_known_type_and_ancestor_jfs (struct ipa_jump_func
*src
,
1681 struct ipa_jump_func
*dst
)
1683 HOST_WIDE_INT combined_offset
;
1686 combined_offset
= src
->value
.known_type
.offset
+ dst
->value
.ancestor
.offset
;
1687 combined_type
= dst
->value
.ancestor
.type
;
1689 dst
->type
= IPA_JF_KNOWN_TYPE
;
1690 dst
->value
.known_type
.base_type
= src
->value
.known_type
.base_type
;
1691 dst
->value
.known_type
.offset
= combined_offset
;
1692 dst
->value
.known_type
.component_type
= combined_type
;
1695 /* Update the jump functions associated with call graph edge E when the call
1696 graph edge CS is being inlined, assuming that E->caller is already (possibly
1697 indirectly) inlined into CS->callee and that E has not been inlined. */
1700 update_jump_functions_after_inlining (struct cgraph_edge
*cs
,
1701 struct cgraph_edge
*e
)
1703 struct ipa_edge_args
*top
= IPA_EDGE_REF (cs
);
1704 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
1705 int count
= ipa_get_cs_argument_count (args
);
1708 for (i
= 0; i
< count
; i
++)
1710 struct ipa_jump_func
*dst
= ipa_get_ith_jump_func (args
, i
);
1712 if (dst
->type
== IPA_JF_ANCESTOR
)
1714 struct ipa_jump_func
*src
;
1716 /* Variable number of arguments can cause havoc if we try to access
1717 one that does not exist in the inlined edge. So make sure we
1719 if (dst
->value
.ancestor
.formal_id
>= ipa_get_cs_argument_count (top
))
1721 dst
->type
= IPA_JF_UNKNOWN
;
1725 src
= ipa_get_ith_jump_func (top
, dst
->value
.ancestor
.formal_id
);
1726 if (src
->type
== IPA_JF_KNOWN_TYPE
)
1727 combine_known_type_and_ancestor_jfs (src
, dst
);
1728 else if (src
->type
== IPA_JF_PASS_THROUGH
1729 && src
->value
.pass_through
.operation
== NOP_EXPR
)
1730 dst
->value
.ancestor
.formal_id
= src
->value
.pass_through
.formal_id
;
1731 else if (src
->type
== IPA_JF_ANCESTOR
)
1733 dst
->value
.ancestor
.formal_id
= src
->value
.ancestor
.formal_id
;
1734 dst
->value
.ancestor
.offset
+= src
->value
.ancestor
.offset
;
1737 dst
->type
= IPA_JF_UNKNOWN
;
1739 else if (dst
->type
== IPA_JF_PASS_THROUGH
)
1741 struct ipa_jump_func
*src
;
1742 /* We must check range due to calls with variable number of arguments
1743 and we cannot combine jump functions with operations. */
1744 if (dst
->value
.pass_through
.operation
== NOP_EXPR
1745 && (dst
->value
.pass_through
.formal_id
1746 < ipa_get_cs_argument_count (top
)))
1748 src
= ipa_get_ith_jump_func (top
,
1749 dst
->value
.pass_through
.formal_id
);
1753 dst
->type
= IPA_JF_UNKNOWN
;
1758 /* If TARGET is an addr_expr of a function declaration, make it the destination
1759 of an indirect edge IE and return the edge. Otherwise, return NULL. */
1761 struct cgraph_edge
*
1762 ipa_make_edge_direct_to_target (struct cgraph_edge
*ie
, tree target
)
1764 struct cgraph_node
*callee
;
1766 if (TREE_CODE (target
) == ADDR_EXPR
)
1767 target
= TREE_OPERAND (target
, 0);
1768 if (TREE_CODE (target
) != FUNCTION_DECL
)
1770 callee
= cgraph_get_node (target
);
1773 ipa_check_create_node_params ();
1775 /* We can not make edges to inline clones. It is bug that someone removed
1776 the cgraph node too early. */
1777 gcc_assert (!callee
->global
.inlined_to
);
1779 cgraph_make_edge_direct (ie
, callee
);
1782 fprintf (dump_file
, "ipa-prop: Discovered %s call to a known target "
1783 "(%s/%i -> %s/%i), for stmt ",
1784 ie
->indirect_info
->polymorphic
? "a virtual" : "an indirect",
1785 cgraph_node_name (ie
->caller
), ie
->caller
->uid
,
1786 cgraph_node_name (ie
->callee
), ie
->callee
->uid
);
1788 print_gimple_stmt (dump_file
, ie
->call_stmt
, 2, TDF_SLIM
);
1790 fprintf (dump_file
, "with uid %i\n", ie
->lto_stmt_uid
);
1792 callee
= cgraph_function_or_thunk_node (callee
, NULL
);
1797 /* Try to find a destination for indirect edge IE that corresponds to a simple
1798 call or a call of a member function pointer and where the destination is a
1799 pointer formal parameter described by jump function JFUNC. If it can be
1800 determined, return the newly direct edge, otherwise return NULL. */
1802 static struct cgraph_edge
*
1803 try_make_edge_direct_simple_call (struct cgraph_edge
*ie
,
1804 struct ipa_jump_func
*jfunc
)
1808 if (jfunc
->type
== IPA_JF_CONST
)
1809 target
= jfunc
->value
.constant
;
1810 else if (jfunc
->type
== IPA_JF_CONST_MEMBER_PTR
)
1811 target
= jfunc
->value
.member_cst
.pfn
;
1815 return ipa_make_edge_direct_to_target (ie
, target
);
1818 /* Try to find a destination for indirect edge IE that corresponds to a
1819 virtual call based on a formal parameter which is described by jump
1820 function JFUNC and if it can be determined, make it direct and return the
1821 direct edge. Otherwise, return NULL. */
1823 static struct cgraph_edge
*
1824 try_make_edge_direct_virtual_call (struct cgraph_edge
*ie
,
1825 struct ipa_jump_func
*jfunc
)
1829 if (jfunc
->type
!= IPA_JF_KNOWN_TYPE
)
1832 binfo
= TYPE_BINFO (jfunc
->value
.known_type
.base_type
);
1833 gcc_checking_assert (binfo
);
1834 binfo
= get_binfo_at_offset (binfo
, jfunc
->value
.known_type
.offset
1835 + ie
->indirect_info
->anc_offset
,
1836 ie
->indirect_info
->otr_type
);
1838 target
= gimple_get_virt_method_for_binfo (ie
->indirect_info
->otr_token
,
1844 return ipa_make_edge_direct_to_target (ie
, target
);
1849 /* Update the param called notes associated with NODE when CS is being inlined,
1850 assuming NODE is (potentially indirectly) inlined into CS->callee.
1851 Moreover, if the callee is discovered to be constant, create a new cgraph
1852 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
1853 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
1856 update_indirect_edges_after_inlining (struct cgraph_edge
*cs
,
1857 struct cgraph_node
*node
,
1858 VEC (cgraph_edge_p
, heap
) **new_edges
)
1860 struct ipa_edge_args
*top
;
1861 struct cgraph_edge
*ie
, *next_ie
, *new_direct_edge
;
1864 ipa_check_create_edge_args ();
1865 top
= IPA_EDGE_REF (cs
);
1867 for (ie
= node
->indirect_calls
; ie
; ie
= next_ie
)
1869 struct cgraph_indirect_call_info
*ici
= ie
->indirect_info
;
1870 struct ipa_jump_func
*jfunc
;
1872 next_ie
= ie
->next_callee
;
1874 if (ici
->param_index
== -1)
1877 /* We must check range due to calls with variable number of arguments: */
1878 if (ici
->param_index
>= ipa_get_cs_argument_count (top
))
1880 ici
->param_index
= -1;
1884 jfunc
= ipa_get_ith_jump_func (top
, ici
->param_index
);
1885 if (jfunc
->type
== IPA_JF_PASS_THROUGH
1886 && jfunc
->value
.pass_through
.operation
== NOP_EXPR
)
1887 ici
->param_index
= jfunc
->value
.pass_through
.formal_id
;
1888 else if (jfunc
->type
== IPA_JF_ANCESTOR
)
1890 ici
->param_index
= jfunc
->value
.ancestor
.formal_id
;
1891 ici
->anc_offset
+= jfunc
->value
.ancestor
.offset
;
1894 /* Either we can find a destination for this edge now or never. */
1895 ici
->param_index
= -1;
1897 if (!flag_indirect_inlining
)
1900 if (ici
->polymorphic
)
1901 new_direct_edge
= try_make_edge_direct_virtual_call (ie
, jfunc
);
1903 new_direct_edge
= try_make_edge_direct_simple_call (ie
, jfunc
);
1905 if (new_direct_edge
)
1907 new_direct_edge
->indirect_inlining_edge
= 1;
1908 if (new_direct_edge
->call_stmt
1909 && !gimple_check_call_matching_types (new_direct_edge
->call_stmt
,
1910 new_direct_edge
->callee
->decl
))
1912 gimple_call_set_cannot_inline (new_direct_edge
->call_stmt
, true);
1913 new_direct_edge
->call_stmt_cannot_inline_p
= true;
1917 VEC_safe_push (cgraph_edge_p
, heap
, *new_edges
,
1919 top
= IPA_EDGE_REF (cs
);
1928 /* Recursively traverse subtree of NODE (including node) made of inlined
1929 cgraph_edges when CS has been inlined and invoke
1930 update_indirect_edges_after_inlining on all nodes and
1931 update_jump_functions_after_inlining on all non-inlined edges that lead out
1932 of this subtree. Newly discovered indirect edges will be added to
1933 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
1937 propagate_info_to_inlined_callees (struct cgraph_edge
*cs
,
1938 struct cgraph_node
*node
,
1939 VEC (cgraph_edge_p
, heap
) **new_edges
)
1941 struct cgraph_edge
*e
;
1944 res
= update_indirect_edges_after_inlining (cs
, node
, new_edges
);
1946 for (e
= node
->callees
; e
; e
= e
->next_callee
)
1947 if (!e
->inline_failed
)
1948 res
|= propagate_info_to_inlined_callees (cs
, e
->callee
, new_edges
);
1950 update_jump_functions_after_inlining (cs
, e
);
1951 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
1952 update_jump_functions_after_inlining (cs
, e
);
1957 /* Update jump functions and call note functions on inlining the call site CS.
1958 CS is expected to lead to a node already cloned by
1959 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
1960 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
1964 ipa_propagate_indirect_call_infos (struct cgraph_edge
*cs
,
1965 VEC (cgraph_edge_p
, heap
) **new_edges
)
1968 /* Do nothing if the preparation phase has not been carried out yet
1969 (i.e. during early inlining). */
1970 if (!ipa_node_params_vector
)
1972 gcc_assert (ipa_edge_args_vector
);
1974 changed
= propagate_info_to_inlined_callees (cs
, cs
->callee
, new_edges
);
1976 /* We do not keep jump functions of inlined edges up to date. Better to free
1977 them so we do not access them accidentally. */
1978 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs
));
1982 /* Frees all dynamically allocated structures that the argument info points
1986 ipa_free_edge_args_substructures (struct ipa_edge_args
*args
)
1988 if (args
->jump_functions
)
1989 ggc_free (args
->jump_functions
);
1991 memset (args
, 0, sizeof (*args
));
1994 /* Free all ipa_edge structures. */
1997 ipa_free_all_edge_args (void)
2000 struct ipa_edge_args
*args
;
2002 FOR_EACH_VEC_ELT (ipa_edge_args_t
, ipa_edge_args_vector
, i
, args
)
2003 ipa_free_edge_args_substructures (args
);
2005 VEC_free (ipa_edge_args_t
, gc
, ipa_edge_args_vector
);
2006 ipa_edge_args_vector
= NULL
;
2009 /* Frees all dynamically allocated structures that the param info points
2013 ipa_free_node_params_substructures (struct ipa_node_params
*info
)
2015 VEC_free (ipa_param_descriptor_t
, heap
, info
->descriptors
);
2016 free (info
->lattices
);
2017 /* Lattice values and their sources are deallocated with their alocation
2019 VEC_free (tree
, heap
, info
->known_vals
);
2020 memset (info
, 0, sizeof (*info
));
2023 /* Free all ipa_node_params structures. */
2026 ipa_free_all_node_params (void)
2029 struct ipa_node_params
*info
;
2031 FOR_EACH_VEC_ELT (ipa_node_params_t
, ipa_node_params_vector
, i
, info
)
2032 ipa_free_node_params_substructures (info
);
2034 VEC_free (ipa_node_params_t
, heap
, ipa_node_params_vector
);
2035 ipa_node_params_vector
= NULL
;
2038 /* Hook that is called by cgraph.c when an edge is removed. */
2041 ipa_edge_removal_hook (struct cgraph_edge
*cs
, void *data ATTRIBUTE_UNUSED
)
2043 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2044 if (VEC_length (ipa_edge_args_t
, ipa_edge_args_vector
)
2045 <= (unsigned)cs
->uid
)
2047 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs
));
2050 /* Hook that is called by cgraph.c when a node is removed. */
2053 ipa_node_removal_hook (struct cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
2055 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2056 if (VEC_length (ipa_node_params_t
, ipa_node_params_vector
)
2057 <= (unsigned)node
->uid
)
2059 ipa_free_node_params_substructures (IPA_NODE_REF (node
));
2062 /* Hook that is called by cgraph.c when a node is duplicated. */
2065 ipa_edge_duplication_hook (struct cgraph_edge
*src
, struct cgraph_edge
*dst
,
2066 __attribute__((unused
)) void *data
)
2068 struct ipa_edge_args
*old_args
, *new_args
;
2070 ipa_check_create_edge_args ();
2072 old_args
= IPA_EDGE_REF (src
);
2073 new_args
= IPA_EDGE_REF (dst
);
2075 new_args
->jump_functions
= VEC_copy (ipa_jump_func_t
, gc
,
2076 old_args
->jump_functions
);
2079 /* Hook that is called by cgraph.c when a node is duplicated. */
2082 ipa_node_duplication_hook (struct cgraph_node
*src
, struct cgraph_node
*dst
,
2083 ATTRIBUTE_UNUSED
void *data
)
2085 struct ipa_node_params
*old_info
, *new_info
;
2087 ipa_check_create_node_params ();
2088 old_info
= IPA_NODE_REF (src
);
2089 new_info
= IPA_NODE_REF (dst
);
2091 new_info
->descriptors
= VEC_copy (ipa_param_descriptor_t
, heap
,
2092 old_info
->descriptors
);
2093 new_info
->lattices
= NULL
;
2094 new_info
->ipcp_orig_node
= old_info
->ipcp_orig_node
;
2096 new_info
->uses_analysis_done
= old_info
->uses_analysis_done
;
2097 new_info
->node_enqueued
= old_info
->node_enqueued
;
2101 /* Analyze newly added function into callgraph. */
2104 ipa_add_new_function (struct cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
2106 ipa_analyze_node (node
);
2109 /* Register our cgraph hooks if they are not already there. */
2112 ipa_register_cgraph_hooks (void)
2114 if (!edge_removal_hook_holder
)
2115 edge_removal_hook_holder
=
2116 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook
, NULL
);
2117 if (!node_removal_hook_holder
)
2118 node_removal_hook_holder
=
2119 cgraph_add_node_removal_hook (&ipa_node_removal_hook
, NULL
);
2120 if (!edge_duplication_hook_holder
)
2121 edge_duplication_hook_holder
=
2122 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook
, NULL
);
2123 if (!node_duplication_hook_holder
)
2124 node_duplication_hook_holder
=
2125 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook
, NULL
);
2126 function_insertion_hook_holder
=
2127 cgraph_add_function_insertion_hook (&ipa_add_new_function
, NULL
);
2130 /* Unregister our cgraph hooks if they are not already there. */
2133 ipa_unregister_cgraph_hooks (void)
2135 cgraph_remove_edge_removal_hook (edge_removal_hook_holder
);
2136 edge_removal_hook_holder
= NULL
;
2137 cgraph_remove_node_removal_hook (node_removal_hook_holder
);
2138 node_removal_hook_holder
= NULL
;
2139 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder
);
2140 edge_duplication_hook_holder
= NULL
;
2141 cgraph_remove_node_duplication_hook (node_duplication_hook_holder
);
2142 node_duplication_hook_holder
= NULL
;
2143 cgraph_remove_function_insertion_hook (function_insertion_hook_holder
);
2144 function_insertion_hook_holder
= NULL
;
2147 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2148 longer needed after ipa-cp. */
2151 ipa_free_all_structures_after_ipa_cp (void)
2155 ipa_free_all_edge_args ();
2156 ipa_free_all_node_params ();
2157 free_alloc_pool (ipcp_sources_pool
);
2158 free_alloc_pool (ipcp_values_pool
);
2159 ipa_unregister_cgraph_hooks ();
2163 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
2164 longer needed after indirect inlining. */
2167 ipa_free_all_structures_after_iinln (void)
2169 ipa_free_all_edge_args ();
2170 ipa_free_all_node_params ();
2171 ipa_unregister_cgraph_hooks ();
2172 if (ipcp_sources_pool
)
2173 free_alloc_pool (ipcp_sources_pool
);
2174 if (ipcp_values_pool
)
2175 free_alloc_pool (ipcp_values_pool
);
2178 /* Print ipa_tree_map data structures of all functions in the
2182 ipa_print_node_params (FILE * f
, struct cgraph_node
*node
)
2186 struct ipa_node_params
*info
;
2188 if (!node
->analyzed
)
2190 info
= IPA_NODE_REF (node
);
2191 fprintf (f
, " function %s parameter descriptors:\n",
2192 cgraph_node_name (node
));
2193 count
= ipa_get_param_count (info
);
2194 for (i
= 0; i
< count
; i
++)
2196 temp
= ipa_get_param (info
, i
);
2197 if (TREE_CODE (temp
) == PARM_DECL
)
2198 fprintf (f
, " param %d : %s", i
,
2200 ? (*lang_hooks
.decl_printable_name
) (temp
, 2)
2202 if (ipa_is_param_used (info
, i
))
2203 fprintf (f
, " used");
2208 /* Print ipa_tree_map data structures of all functions in the
2212 ipa_print_all_params (FILE * f
)
2214 struct cgraph_node
*node
;
2216 fprintf (f
, "\nFunction parameters:\n");
2217 for (node
= cgraph_nodes
; node
; node
= node
->next
)
2218 ipa_print_node_params (f
, node
);
2221 /* Return a heap allocated vector containing formal parameters of FNDECL. */
2224 ipa_get_vector_of_formal_parms (tree fndecl
)
2226 VEC(tree
, heap
) *args
;
2230 count
= count_formal_params (fndecl
);
2231 args
= VEC_alloc (tree
, heap
, count
);
2232 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
2233 VEC_quick_push (tree
, args
, parm
);
2238 /* Return a heap allocated vector containing types of formal parameters of
2239 function type FNTYPE. */
2241 static inline VEC(tree
, heap
) *
2242 get_vector_of_formal_parm_types (tree fntype
)
2244 VEC(tree
, heap
) *types
;
2248 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
2251 types
= VEC_alloc (tree
, heap
, count
);
2252 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
2253 VEC_quick_push (tree
, types
, TREE_VALUE (t
));
2258 /* Modify the function declaration FNDECL and its type according to the plan in
2259 ADJUSTMENTS. It also sets base fields of individual adjustments structures
2260 to reflect the actual parameters being modified which are determined by the
2261 base_index field. */
2264 ipa_modify_formal_parameters (tree fndecl
, ipa_parm_adjustment_vec adjustments
,
2265 const char *synth_parm_prefix
)
2267 VEC(tree
, heap
) *oparms
, *otypes
;
2268 tree orig_type
, new_type
= NULL
;
2269 tree old_arg_types
, t
, new_arg_types
= NULL
;
2270 tree parm
, *link
= &DECL_ARGUMENTS (fndecl
);
2271 int i
, len
= VEC_length (ipa_parm_adjustment_t
, adjustments
);
2272 tree new_reversed
= NULL
;
2273 bool care_for_types
, last_parm_void
;
2275 if (!synth_parm_prefix
)
2276 synth_parm_prefix
= "SYNTH";
2278 oparms
= ipa_get_vector_of_formal_parms (fndecl
);
2279 orig_type
= TREE_TYPE (fndecl
);
2280 old_arg_types
= TYPE_ARG_TYPES (orig_type
);
2282 /* The following test is an ugly hack, some functions simply don't have any
2283 arguments in their type. This is probably a bug but well... */
2284 care_for_types
= (old_arg_types
!= NULL_TREE
);
2287 last_parm_void
= (TREE_VALUE (tree_last (old_arg_types
))
2289 otypes
= get_vector_of_formal_parm_types (orig_type
);
2291 gcc_assert (VEC_length (tree
, oparms
) + 1 == VEC_length (tree
, otypes
));
2293 gcc_assert (VEC_length (tree
, oparms
) == VEC_length (tree
, otypes
));
2297 last_parm_void
= false;
2301 for (i
= 0; i
< len
; i
++)
2303 struct ipa_parm_adjustment
*adj
;
2306 adj
= VEC_index (ipa_parm_adjustment_t
, adjustments
, i
);
2307 parm
= VEC_index (tree
, oparms
, adj
->base_index
);
2310 if (adj
->copy_param
)
2313 new_arg_types
= tree_cons (NULL_TREE
, VEC_index (tree
, otypes
,
2317 link
= &DECL_CHAIN (parm
);
2319 else if (!adj
->remove_param
)
2325 ptype
= build_pointer_type (adj
->type
);
2330 new_arg_types
= tree_cons (NULL_TREE
, ptype
, new_arg_types
);
2332 new_parm
= build_decl (UNKNOWN_LOCATION
, PARM_DECL
, NULL_TREE
,
2334 DECL_NAME (new_parm
) = create_tmp_var_name (synth_parm_prefix
);
2336 DECL_ARTIFICIAL (new_parm
) = 1;
2337 DECL_ARG_TYPE (new_parm
) = ptype
;
2338 DECL_CONTEXT (new_parm
) = fndecl
;
2339 TREE_USED (new_parm
) = 1;
2340 DECL_IGNORED_P (new_parm
) = 1;
2341 layout_decl (new_parm
, 0);
2343 add_referenced_var (new_parm
);
2344 mark_sym_for_renaming (new_parm
);
2346 adj
->reduction
= new_parm
;
2350 link
= &DECL_CHAIN (new_parm
);
2358 new_reversed
= nreverse (new_arg_types
);
2362 TREE_CHAIN (new_arg_types
) = void_list_node
;
2364 new_reversed
= void_list_node
;
2368 /* Use copy_node to preserve as much as possible from original type
2369 (debug info, attribute lists etc.)
2370 Exception is METHOD_TYPEs must have THIS argument.
2371 When we are asked to remove it, we need to build new FUNCTION_TYPE
2373 if (TREE_CODE (orig_type
) != METHOD_TYPE
2374 || (VEC_index (ipa_parm_adjustment_t
, adjustments
, 0)->copy_param
2375 && VEC_index (ipa_parm_adjustment_t
, adjustments
, 0)->base_index
== 0))
2377 new_type
= build_distinct_type_copy (orig_type
);
2378 TYPE_ARG_TYPES (new_type
) = new_reversed
;
2383 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type
),
2385 TYPE_CONTEXT (new_type
) = TYPE_CONTEXT (orig_type
);
2386 DECL_VINDEX (fndecl
) = NULL_TREE
;
2389 /* When signature changes, we need to clear builtin info. */
2390 if (DECL_BUILT_IN (fndecl
))
2392 DECL_BUILT_IN_CLASS (fndecl
) = NOT_BUILT_IN
;
2393 DECL_FUNCTION_CODE (fndecl
) = (enum built_in_function
) 0;
2396 /* This is a new type, not a copy of an old type. Need to reassociate
2397 variants. We can handle everything except the main variant lazily. */
2398 t
= TYPE_MAIN_VARIANT (orig_type
);
2401 TYPE_MAIN_VARIANT (new_type
) = t
;
2402 TYPE_NEXT_VARIANT (new_type
) = TYPE_NEXT_VARIANT (t
);
2403 TYPE_NEXT_VARIANT (t
) = new_type
;
2407 TYPE_MAIN_VARIANT (new_type
) = new_type
;
2408 TYPE_NEXT_VARIANT (new_type
) = NULL
;
2411 TREE_TYPE (fndecl
) = new_type
;
2412 DECL_VIRTUAL_P (fndecl
) = 0;
2414 VEC_free (tree
, heap
, otypes
);
2415 VEC_free (tree
, heap
, oparms
);
2418 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
2419 If this is a directly recursive call, CS must be NULL. Otherwise it must
2420 contain the corresponding call graph edge. */
2423 ipa_modify_call_arguments (struct cgraph_edge
*cs
, gimple stmt
,
2424 ipa_parm_adjustment_vec adjustments
)
2426 VEC(tree
, heap
) *vargs
;
2427 VEC(tree
, gc
) **debug_args
= NULL
;
2429 gimple_stmt_iterator gsi
;
2433 len
= VEC_length (ipa_parm_adjustment_t
, adjustments
);
2434 vargs
= VEC_alloc (tree
, heap
, len
);
2435 callee_decl
= !cs
? gimple_call_fndecl (stmt
) : cs
->callee
->decl
;
2437 gsi
= gsi_for_stmt (stmt
);
2438 for (i
= 0; i
< len
; i
++)
2440 struct ipa_parm_adjustment
*adj
;
2442 adj
= VEC_index (ipa_parm_adjustment_t
, adjustments
, i
);
2444 if (adj
->copy_param
)
2446 tree arg
= gimple_call_arg (stmt
, adj
->base_index
);
2448 VEC_quick_push (tree
, vargs
, arg
);
2450 else if (!adj
->remove_param
)
2452 tree expr
, base
, off
;
2455 /* We create a new parameter out of the value of the old one, we can
2456 do the following kind of transformations:
2458 - A scalar passed by reference is converted to a scalar passed by
2459 value. (adj->by_ref is false and the type of the original
2460 actual argument is a pointer to a scalar).
2462 - A part of an aggregate is passed instead of the whole aggregate.
2463 The part can be passed either by value or by reference, this is
2464 determined by value of adj->by_ref. Moreover, the code below
2465 handles both situations when the original aggregate is passed by
2466 value (its type is not a pointer) and when it is passed by
2467 reference (it is a pointer to an aggregate).
2469 When the new argument is passed by reference (adj->by_ref is true)
2470 it must be a part of an aggregate and therefore we form it by
2471 simply taking the address of a reference inside the original
2474 gcc_checking_assert (adj
->offset
% BITS_PER_UNIT
== 0);
2475 base
= gimple_call_arg (stmt
, adj
->base_index
);
2476 loc
= EXPR_LOCATION (base
);
2478 if (TREE_CODE (base
) != ADDR_EXPR
2479 && POINTER_TYPE_P (TREE_TYPE (base
)))
2480 off
= build_int_cst (adj
->alias_ptr_type
,
2481 adj
->offset
/ BITS_PER_UNIT
);
2484 HOST_WIDE_INT base_offset
;
2487 if (TREE_CODE (base
) == ADDR_EXPR
)
2488 base
= TREE_OPERAND (base
, 0);
2490 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
2491 /* Aggregate arguments can have non-invariant addresses. */
2494 base
= build_fold_addr_expr (prev_base
);
2495 off
= build_int_cst (adj
->alias_ptr_type
,
2496 adj
->offset
/ BITS_PER_UNIT
);
2498 else if (TREE_CODE (base
) == MEM_REF
)
2500 off
= build_int_cst (adj
->alias_ptr_type
,
2502 + adj
->offset
/ BITS_PER_UNIT
);
2503 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1),
2505 base
= TREE_OPERAND (base
, 0);
2509 off
= build_int_cst (adj
->alias_ptr_type
,
2511 + adj
->offset
/ BITS_PER_UNIT
);
2512 base
= build_fold_addr_expr (base
);
2516 expr
= fold_build2_loc (loc
, MEM_REF
, adj
->type
, base
, off
);
2518 expr
= build_fold_addr_expr (expr
);
2520 expr
= force_gimple_operand_gsi (&gsi
, expr
,
2522 || is_gimple_reg_type (adj
->type
),
2523 NULL
, true, GSI_SAME_STMT
);
2524 VEC_quick_push (tree
, vargs
, expr
);
2526 if (!adj
->copy_param
&& MAY_HAVE_DEBUG_STMTS
)
2529 tree ddecl
= NULL_TREE
, origin
= DECL_ORIGIN (adj
->base
), arg
;
2532 arg
= gimple_call_arg (stmt
, adj
->base_index
);
2533 if (!useless_type_conversion_p (TREE_TYPE (origin
), TREE_TYPE (arg
)))
2535 if (!fold_convertible_p (TREE_TYPE (origin
), arg
))
2537 arg
= fold_convert_loc (gimple_location (stmt
),
2538 TREE_TYPE (origin
), arg
);
2540 if (debug_args
== NULL
)
2541 debug_args
= decl_debug_args_insert (callee_decl
);
2542 for (ix
= 0; VEC_iterate (tree
, *debug_args
, ix
, ddecl
); ix
+= 2)
2543 if (ddecl
== origin
)
2545 ddecl
= VEC_index (tree
, *debug_args
, ix
+ 1);
2550 ddecl
= make_node (DEBUG_EXPR_DECL
);
2551 DECL_ARTIFICIAL (ddecl
) = 1;
2552 TREE_TYPE (ddecl
) = TREE_TYPE (origin
);
2553 DECL_MODE (ddecl
) = DECL_MODE (origin
);
2555 VEC_safe_push (tree
, gc
, *debug_args
, origin
);
2556 VEC_safe_push (tree
, gc
, *debug_args
, ddecl
);
2558 def_temp
= gimple_build_debug_bind (ddecl
, unshare_expr (arg
),
2560 gsi_insert_before (&gsi
, def_temp
, GSI_SAME_STMT
);
2564 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2566 fprintf (dump_file
, "replacing stmt:");
2567 print_gimple_stmt (dump_file
, gsi_stmt (gsi
), 0, 0);
2570 new_stmt
= gimple_build_call_vec (callee_decl
, vargs
);
2571 VEC_free (tree
, heap
, vargs
);
2572 if (gimple_call_lhs (stmt
))
2573 gimple_call_set_lhs (new_stmt
, gimple_call_lhs (stmt
));
2575 gimple_set_block (new_stmt
, gimple_block (stmt
));
2576 if (gimple_has_location (stmt
))
2577 gimple_set_location (new_stmt
, gimple_location (stmt
));
2578 gimple_call_set_chain (new_stmt
, gimple_call_chain (stmt
));
2579 gimple_call_copy_flags (new_stmt
, stmt
);
2580 if (gimple_call_cannot_inline_p (stmt
))
2581 gimple_call_set_cannot_inline
2582 (new_stmt
, !gimple_check_call_matching_types (new_stmt
, callee_decl
));
2584 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2586 fprintf (dump_file
, "with stmt:");
2587 print_gimple_stmt (dump_file
, new_stmt
, 0, 0);
2588 fprintf (dump_file
, "\n");
2590 gsi_replace (&gsi
, new_stmt
, true);
2592 cgraph_set_call_stmt (cs
, new_stmt
);
2593 update_ssa (TODO_update_ssa
);
2594 free_dominance_info (CDI_DOMINATORS
);
2597 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
2600 index_in_adjustments_multiple_times_p (int base_index
,
2601 ipa_parm_adjustment_vec adjustments
)
2603 int i
, len
= VEC_length (ipa_parm_adjustment_t
, adjustments
);
2606 for (i
= 0; i
< len
; i
++)
2608 struct ipa_parm_adjustment
*adj
;
2609 adj
= VEC_index (ipa_parm_adjustment_t
, adjustments
, i
);
2611 if (adj
->base_index
== base_index
)
2623 /* Return adjustments that should have the same effect on function parameters
2624 and call arguments as if they were first changed according to adjustments in
2625 INNER and then by adjustments in OUTER. */
2627 ipa_parm_adjustment_vec
2628 ipa_combine_adjustments (ipa_parm_adjustment_vec inner
,
2629 ipa_parm_adjustment_vec outer
)
2631 int i
, outlen
= VEC_length (ipa_parm_adjustment_t
, outer
);
2632 int inlen
= VEC_length (ipa_parm_adjustment_t
, inner
);
2634 ipa_parm_adjustment_vec adjustments
, tmp
;
2636 tmp
= VEC_alloc (ipa_parm_adjustment_t
, heap
, inlen
);
2637 for (i
= 0; i
< inlen
; i
++)
2639 struct ipa_parm_adjustment
*n
;
2640 n
= VEC_index (ipa_parm_adjustment_t
, inner
, i
);
2642 if (n
->remove_param
)
2645 VEC_quick_push (ipa_parm_adjustment_t
, tmp
, n
);
2648 adjustments
= VEC_alloc (ipa_parm_adjustment_t
, heap
, outlen
+ removals
);
2649 for (i
= 0; i
< outlen
; i
++)
2651 struct ipa_parm_adjustment
*r
;
2652 struct ipa_parm_adjustment
*out
= VEC_index (ipa_parm_adjustment_t
,
2654 struct ipa_parm_adjustment
*in
= VEC_index (ipa_parm_adjustment_t
, tmp
,
2657 gcc_assert (!in
->remove_param
);
2658 if (out
->remove_param
)
2660 if (!index_in_adjustments_multiple_times_p (in
->base_index
, tmp
))
2662 r
= VEC_quick_push (ipa_parm_adjustment_t
, adjustments
, NULL
);
2663 memset (r
, 0, sizeof (*r
));
2664 r
->remove_param
= true;
2669 r
= VEC_quick_push (ipa_parm_adjustment_t
, adjustments
, NULL
);
2670 memset (r
, 0, sizeof (*r
));
2671 r
->base_index
= in
->base_index
;
2672 r
->type
= out
->type
;
2674 /* FIXME: Create nonlocal value too. */
2676 if (in
->copy_param
&& out
->copy_param
)
2677 r
->copy_param
= true;
2678 else if (in
->copy_param
)
2679 r
->offset
= out
->offset
;
2680 else if (out
->copy_param
)
2681 r
->offset
= in
->offset
;
2683 r
->offset
= in
->offset
+ out
->offset
;
2686 for (i
= 0; i
< inlen
; i
++)
2688 struct ipa_parm_adjustment
*n
= VEC_index (ipa_parm_adjustment_t
,
2691 if (n
->remove_param
)
2692 VEC_quick_push (ipa_parm_adjustment_t
, adjustments
, n
);
2695 VEC_free (ipa_parm_adjustment_t
, heap
, tmp
);
2699 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
2700 friendly way, assuming they are meant to be applied to FNDECL. */
2703 ipa_dump_param_adjustments (FILE *file
, ipa_parm_adjustment_vec adjustments
,
2706 int i
, len
= VEC_length (ipa_parm_adjustment_t
, adjustments
);
2708 VEC(tree
, heap
) *parms
= ipa_get_vector_of_formal_parms (fndecl
);
2710 fprintf (file
, "IPA param adjustments: ");
2711 for (i
= 0; i
< len
; i
++)
2713 struct ipa_parm_adjustment
*adj
;
2714 adj
= VEC_index (ipa_parm_adjustment_t
, adjustments
, i
);
2717 fprintf (file
, " ");
2721 fprintf (file
, "%i. base_index: %i - ", i
, adj
->base_index
);
2722 print_generic_expr (file
, VEC_index (tree
, parms
, adj
->base_index
), 0);
2725 fprintf (file
, ", base: ");
2726 print_generic_expr (file
, adj
->base
, 0);
2730 fprintf (file
, ", reduction: ");
2731 print_generic_expr (file
, adj
->reduction
, 0);
2733 if (adj
->new_ssa_base
)
2735 fprintf (file
, ", new_ssa_base: ");
2736 print_generic_expr (file
, adj
->new_ssa_base
, 0);
2739 if (adj
->copy_param
)
2740 fprintf (file
, ", copy_param");
2741 else if (adj
->remove_param
)
2742 fprintf (file
, ", remove_param");
2744 fprintf (file
, ", offset %li", (long) adj
->offset
);
2746 fprintf (file
, ", by_ref");
2747 print_node_brief (file
, ", type: ", adj
->type
, 0);
2748 fprintf (file
, "\n");
2750 VEC_free (tree
, heap
, parms
);
2753 /* Stream out jump function JUMP_FUNC to OB. */
2756 ipa_write_jump_function (struct output_block
*ob
,
2757 struct ipa_jump_func
*jump_func
)
2759 streamer_write_uhwi (ob
, jump_func
->type
);
2761 switch (jump_func
->type
)
2763 case IPA_JF_UNKNOWN
:
2765 case IPA_JF_KNOWN_TYPE
:
2766 streamer_write_uhwi (ob
, jump_func
->value
.known_type
.offset
);
2767 stream_write_tree (ob
, jump_func
->value
.known_type
.base_type
, true);
2768 stream_write_tree (ob
, jump_func
->value
.known_type
.component_type
, true);
2771 stream_write_tree (ob
, jump_func
->value
.constant
, true);
2773 case IPA_JF_PASS_THROUGH
:
2774 stream_write_tree (ob
, jump_func
->value
.pass_through
.operand
, true);
2775 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
2776 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.operation
);
2778 case IPA_JF_ANCESTOR
:
2779 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.offset
);
2780 stream_write_tree (ob
, jump_func
->value
.ancestor
.type
, true);
2781 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.formal_id
);
2783 case IPA_JF_CONST_MEMBER_PTR
:
2784 stream_write_tree (ob
, jump_func
->value
.member_cst
.pfn
, true);
2785 stream_write_tree (ob
, jump_func
->value
.member_cst
.delta
, false);
2790 /* Read in jump function JUMP_FUNC from IB. */
2793 ipa_read_jump_function (struct lto_input_block
*ib
,
2794 struct ipa_jump_func
*jump_func
,
2795 struct data_in
*data_in
)
2797 jump_func
->type
= (enum jump_func_type
) streamer_read_uhwi (ib
);
2799 switch (jump_func
->type
)
2801 case IPA_JF_UNKNOWN
:
2803 case IPA_JF_KNOWN_TYPE
:
2804 jump_func
->value
.known_type
.offset
= streamer_read_uhwi (ib
);
2805 jump_func
->value
.known_type
.base_type
= stream_read_tree (ib
, data_in
);
2806 jump_func
->value
.known_type
.component_type
= stream_read_tree (ib
,
2810 jump_func
->value
.constant
= stream_read_tree (ib
, data_in
);
2812 case IPA_JF_PASS_THROUGH
:
2813 jump_func
->value
.pass_through
.operand
= stream_read_tree (ib
, data_in
);
2814 jump_func
->value
.pass_through
.formal_id
= streamer_read_uhwi (ib
);
2815 jump_func
->value
.pass_through
.operation
2816 = (enum tree_code
) streamer_read_uhwi (ib
);
2818 case IPA_JF_ANCESTOR
:
2819 jump_func
->value
.ancestor
.offset
= streamer_read_uhwi (ib
);
2820 jump_func
->value
.ancestor
.type
= stream_read_tree (ib
, data_in
);
2821 jump_func
->value
.ancestor
.formal_id
= streamer_read_uhwi (ib
);
2823 case IPA_JF_CONST_MEMBER_PTR
:
2824 jump_func
->value
.member_cst
.pfn
= stream_read_tree (ib
, data_in
);
2825 jump_func
->value
.member_cst
.delta
= stream_read_tree (ib
, data_in
);
2830 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
2831 relevant to indirect inlining to OB. */
2834 ipa_write_indirect_edge_info (struct output_block
*ob
,
2835 struct cgraph_edge
*cs
)
2837 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
2838 struct bitpack_d bp
;
2840 streamer_write_hwi (ob
, ii
->param_index
);
2841 streamer_write_hwi (ob
, ii
->anc_offset
);
2842 bp
= bitpack_create (ob
->main_stream
);
2843 bp_pack_value (&bp
, ii
->polymorphic
, 1);
2844 streamer_write_bitpack (&bp
);
2846 if (ii
->polymorphic
)
2848 streamer_write_hwi (ob
, ii
->otr_token
);
2849 stream_write_tree (ob
, ii
->otr_type
, true);
2853 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
2854 relevant to indirect inlining from IB. */
2857 ipa_read_indirect_edge_info (struct lto_input_block
*ib
,
2858 struct data_in
*data_in ATTRIBUTE_UNUSED
,
2859 struct cgraph_edge
*cs
)
2861 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
2862 struct bitpack_d bp
;
2864 ii
->param_index
= (int) streamer_read_hwi (ib
);
2865 ii
->anc_offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
2866 bp
= streamer_read_bitpack (ib
);
2867 ii
->polymorphic
= bp_unpack_value (&bp
, 1);
2868 if (ii
->polymorphic
)
2870 ii
->otr_token
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
2871 ii
->otr_type
= stream_read_tree (ib
, data_in
);
2875 /* Stream out NODE info to OB. */
2878 ipa_write_node_info (struct output_block
*ob
, struct cgraph_node
*node
)
2881 lto_cgraph_encoder_t encoder
;
2882 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
2884 struct cgraph_edge
*e
;
2885 struct bitpack_d bp
;
2887 encoder
= ob
->decl_state
->cgraph_node_encoder
;
2888 node_ref
= lto_cgraph_encoder_encode (encoder
, node
);
2889 streamer_write_uhwi (ob
, node_ref
);
2891 bp
= bitpack_create (ob
->main_stream
);
2892 gcc_assert (info
->uses_analysis_done
2893 || ipa_get_param_count (info
) == 0);
2894 gcc_assert (!info
->node_enqueued
);
2895 gcc_assert (!info
->ipcp_orig_node
);
2896 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
2897 bp_pack_value (&bp
, ipa_is_param_used (info
, j
), 1);
2898 streamer_write_bitpack (&bp
);
2899 for (e
= node
->callees
; e
; e
= e
->next_callee
)
2901 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
2903 streamer_write_uhwi (ob
, ipa_get_cs_argument_count (args
));
2904 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
2905 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
2907 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
2909 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
2911 streamer_write_uhwi (ob
, ipa_get_cs_argument_count (args
));
2912 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
2913 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
2914 ipa_write_indirect_edge_info (ob
, e
);
2918 /* Stream in NODE info from IB. */
2921 ipa_read_node_info (struct lto_input_block
*ib
, struct cgraph_node
*node
,
2922 struct data_in
*data_in
)
2924 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
2926 struct cgraph_edge
*e
;
2927 struct bitpack_d bp
;
2929 ipa_initialize_node_params (node
);
2931 bp
= streamer_read_bitpack (ib
);
2932 if (ipa_get_param_count (info
) != 0)
2933 info
->uses_analysis_done
= true;
2934 info
->node_enqueued
= false;
2935 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
2936 ipa_set_param_used (info
, k
, bp_unpack_value (&bp
, 1));
2937 for (e
= node
->callees
; e
; e
= e
->next_callee
)
2939 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
2940 int count
= streamer_read_uhwi (ib
);
2944 VEC_safe_grow_cleared (ipa_jump_func_t
, gc
, args
->jump_functions
, count
);
2946 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
2947 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), data_in
);
2949 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
2951 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
2952 int count
= streamer_read_uhwi (ib
);
2956 VEC_safe_grow_cleared (ipa_jump_func_t
, gc
, args
->jump_functions
,
2958 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
2959 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
),
2962 ipa_read_indirect_edge_info (ib
, data_in
, e
);
2966 /* Write jump functions for nodes in SET. */
2969 ipa_prop_write_jump_functions (cgraph_node_set set
)
2971 struct cgraph_node
*node
;
2972 struct output_block
*ob
;
2973 unsigned int count
= 0;
2974 cgraph_node_set_iterator csi
;
2976 if (!ipa_node_params_vector
)
2979 ob
= create_output_block (LTO_section_jump_functions
);
2980 ob
->cgraph_node
= NULL
;
2981 for (csi
= csi_start (set
); !csi_end_p (csi
); csi_next (&csi
))
2983 node
= csi_node (csi
);
2984 if (cgraph_function_with_gimple_body_p (node
)
2985 && IPA_NODE_REF (node
) != NULL
)
2989 streamer_write_uhwi (ob
, count
);
2991 /* Process all of the functions. */
2992 for (csi
= csi_start (set
); !csi_end_p (csi
); csi_next (&csi
))
2994 node
= csi_node (csi
);
2995 if (cgraph_function_with_gimple_body_p (node
)
2996 && IPA_NODE_REF (node
) != NULL
)
2997 ipa_write_node_info (ob
, node
);
2999 streamer_write_char_stream (ob
->main_stream
, 0);
3000 produce_asm (ob
, NULL
);
3001 destroy_output_block (ob
);
3004 /* Read section in file FILE_DATA of length LEN with data DATA. */
3007 ipa_prop_read_section (struct lto_file_decl_data
*file_data
, const char *data
,
3010 const struct lto_function_header
*header
=
3011 (const struct lto_function_header
*) data
;
3012 const int32_t cfg_offset
= sizeof (struct lto_function_header
);
3013 const int32_t main_offset
= cfg_offset
+ header
->cfg_size
;
3014 const int32_t string_offset
= main_offset
+ header
->main_size
;
3015 struct data_in
*data_in
;
3016 struct lto_input_block ib_main
;
3020 LTO_INIT_INPUT_BLOCK (ib_main
, (const char *) data
+ main_offset
, 0,
3024 lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
3025 header
->string_size
, NULL
);
3026 count
= streamer_read_uhwi (&ib_main
);
3028 for (i
= 0; i
< count
; i
++)
3031 struct cgraph_node
*node
;
3032 lto_cgraph_encoder_t encoder
;
3034 index
= streamer_read_uhwi (&ib_main
);
3035 encoder
= file_data
->cgraph_node_encoder
;
3036 node
= lto_cgraph_encoder_deref (encoder
, index
);
3037 gcc_assert (node
->analyzed
);
3038 ipa_read_node_info (&ib_main
, node
, data_in
);
3040 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
3042 lto_data_in_delete (data_in
);
3045 /* Read ipcp jump functions. */
3048 ipa_prop_read_jump_functions (void)
3050 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
3051 struct lto_file_decl_data
*file_data
;
3054 ipa_check_create_node_params ();
3055 ipa_check_create_edge_args ();
3056 ipa_register_cgraph_hooks ();
3058 while ((file_data
= file_data_vec
[j
++]))
3061 const char *data
= lto_get_section_data (file_data
, LTO_section_jump_functions
, NULL
, &len
);
3064 ipa_prop_read_section (file_data
, data
, len
);
3068 /* After merging units, we can get mismatch in argument counts.
3069 Also decl merging might've rendered parameter lists obsolete.
3070 Also compute called_with_variable_arg info. */
3073 ipa_update_after_lto_read (void)
3075 struct cgraph_node
*node
;
3077 ipa_check_create_node_params ();
3078 ipa_check_create_edge_args ();
3080 for (node
= cgraph_nodes
; node
; node
= node
->next
)
3082 ipa_initialize_node_params (node
);