1 /* Interprocedural analyses.
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
24 #include "langhooks.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-inline.h"
32 #include "ipa-inline.h"
35 #include "diagnostic.h"
36 #include "gimple-pretty-print.h"
37 #include "lto-streamer.h"
38 #include "data-streamer.h"
39 #include "tree-streamer.h"
42 /* Intermediate information about a parameter that is only useful during the
43 run of ipa_analyze_node and is not kept afterwards. */
45 struct param_analysis_info
47 bool parm_modified
, ref_modified
, pt_modified
;
48 bitmap parm_visited_statements
, pt_visited_statements
;
51 /* Vector where the parameter infos are actually stored. */
52 vec
<ipa_node_params_t
> ipa_node_params_vector
;
53 /* Vector of known aggregate values in cloned nodes. */
54 vec
<ipa_agg_replacement_value_p
, va_gc
> *ipa_node_agg_replacements
;
55 /* Vector where the parameter infos are actually stored. */
56 vec
<ipa_edge_args_t
, va_gc
> *ipa_edge_args_vector
;
58 /* Holders of ipa cgraph hooks: */
59 static struct cgraph_edge_hook_list
*edge_removal_hook_holder
;
60 static struct cgraph_node_hook_list
*node_removal_hook_holder
;
61 static struct cgraph_2edge_hook_list
*edge_duplication_hook_holder
;
62 static struct cgraph_2node_hook_list
*node_duplication_hook_holder
;
63 static struct cgraph_node_hook_list
*function_insertion_hook_holder
;
65 /* Description of a reference to an IPA constant. */
66 struct ipa_cst_ref_desc
68 /* Edge that corresponds to the statement which took the reference. */
69 struct cgraph_edge
*cs
;
70 /* Linked list of duplicates created when call graph edges are cloned. */
71 struct ipa_cst_ref_desc
*next_duplicate
;
72 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
77 /* Allocation pool for reference descriptions. */
79 static alloc_pool ipa_refdesc_pool
;
81 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
82 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
85 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node
*node
)
87 tree fs_opts
= DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node
->symbol
.decl
);
88 struct cl_optimization
*os
;
92 os
= TREE_OPTIMIZATION (fs_opts
);
93 return !os
->x_optimize
|| !os
->x_flag_ipa_cp
;
96 /* Return index of the formal whose tree is PTREE in function which corresponds
100 ipa_get_param_decl_index_1 (vec
<ipa_param_descriptor_t
> descriptors
, tree ptree
)
104 count
= descriptors
.length ();
105 for (i
= 0; i
< count
; i
++)
106 if (descriptors
[i
].decl
== ptree
)
112 /* Return index of the formal whose tree is PTREE in function which corresponds
116 ipa_get_param_decl_index (struct ipa_node_params
*info
, tree ptree
)
118 return ipa_get_param_decl_index_1 (info
->descriptors
, ptree
);
121 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
125 ipa_populate_param_decls (struct cgraph_node
*node
,
126 vec
<ipa_param_descriptor_t
> &descriptors
)
133 fndecl
= node
->symbol
.decl
;
134 gcc_assert (gimple_has_body_p (fndecl
));
135 fnargs
= DECL_ARGUMENTS (fndecl
);
137 for (parm
= fnargs
; parm
; parm
= DECL_CHAIN (parm
))
139 descriptors
[param_num
].decl
= parm
;
140 descriptors
[param_num
].move_cost
= estimate_move_cost (TREE_TYPE (parm
));
145 /* Return how many formal parameters FNDECL has. */
148 count_formal_params (tree fndecl
)
152 gcc_assert (gimple_has_body_p (fndecl
));
154 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
160 /* Return the declaration of Ith formal parameter of the function corresponding
161 to INFO. Note there is no setter function as this array is built just once
162 using ipa_initialize_node_params. */
165 ipa_dump_param (FILE *file
, struct ipa_node_params
*info
, int i
)
167 fprintf (file
, "param #%i", i
);
168 if (info
->descriptors
[i
].decl
)
171 print_generic_expr (file
, info
->descriptors
[i
].decl
, 0);
175 /* Initialize the ipa_node_params structure associated with NODE
176 to hold PARAM_COUNT parameters. */
179 ipa_alloc_node_params (struct cgraph_node
*node
, int param_count
)
181 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
183 if (!info
->descriptors
.exists () && param_count
)
184 info
->descriptors
.safe_grow_cleared (param_count
);
187 /* Initialize the ipa_node_params structure associated with NODE by counting
188 the function parameters, creating the descriptors and populating their
192 ipa_initialize_node_params (struct cgraph_node
*node
)
194 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
196 if (!info
->descriptors
.exists ())
198 ipa_alloc_node_params (node
, count_formal_params (node
->symbol
.decl
));
199 ipa_populate_param_decls (node
, info
->descriptors
);
203 /* Print the jump functions associated with call graph edge CS to file F. */
206 ipa_print_node_jump_functions_for_edge (FILE *f
, struct cgraph_edge
*cs
)
210 count
= ipa_get_cs_argument_count (IPA_EDGE_REF (cs
));
211 for (i
= 0; i
< count
; i
++)
213 struct ipa_jump_func
*jump_func
;
214 enum jump_func_type type
;
216 jump_func
= ipa_get_ith_jump_func (IPA_EDGE_REF (cs
), i
);
217 type
= jump_func
->type
;
219 fprintf (f
, " param %d: ", i
);
220 if (type
== IPA_JF_UNKNOWN
)
221 fprintf (f
, "UNKNOWN\n");
222 else if (type
== IPA_JF_KNOWN_TYPE
)
224 fprintf (f
, "KNOWN TYPE: base ");
225 print_generic_expr (f
, jump_func
->value
.known_type
.base_type
, 0);
226 fprintf (f
, ", offset "HOST_WIDE_INT_PRINT_DEC
", component ",
227 jump_func
->value
.known_type
.offset
);
228 print_generic_expr (f
, jump_func
->value
.known_type
.component_type
, 0);
231 else if (type
== IPA_JF_CONST
)
233 tree val
= jump_func
->value
.constant
.value
;
234 fprintf (f
, "CONST: ");
235 print_generic_expr (f
, val
, 0);
236 if (TREE_CODE (val
) == ADDR_EXPR
237 && TREE_CODE (TREE_OPERAND (val
, 0)) == CONST_DECL
)
240 print_generic_expr (f
, DECL_INITIAL (TREE_OPERAND (val
, 0)),
245 else if (type
== IPA_JF_PASS_THROUGH
)
247 fprintf (f
, "PASS THROUGH: ");
248 fprintf (f
, "%d, op %s",
249 jump_func
->value
.pass_through
.formal_id
,
251 jump_func
->value
.pass_through
.operation
]);
252 if (jump_func
->value
.pass_through
.operation
!= NOP_EXPR
)
255 print_generic_expr (f
,
256 jump_func
->value
.pass_through
.operand
, 0);
258 if (jump_func
->value
.pass_through
.agg_preserved
)
259 fprintf (f
, ", agg_preserved");
262 else if (type
== IPA_JF_ANCESTOR
)
264 fprintf (f
, "ANCESTOR: ");
265 fprintf (f
, "%d, offset "HOST_WIDE_INT_PRINT_DEC
", ",
266 jump_func
->value
.ancestor
.formal_id
,
267 jump_func
->value
.ancestor
.offset
);
268 print_generic_expr (f
, jump_func
->value
.ancestor
.type
, 0);
269 if (jump_func
->value
.ancestor
.agg_preserved
)
270 fprintf (f
, ", agg_preserved");
274 if (jump_func
->agg
.items
)
276 struct ipa_agg_jf_item
*item
;
279 fprintf (f
, " Aggregate passed by %s:\n",
280 jump_func
->agg
.by_ref
? "reference" : "value");
281 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, j
, item
)
283 fprintf (f
, " offset: " HOST_WIDE_INT_PRINT_DEC
", ",
285 if (TYPE_P (item
->value
))
286 fprintf (f
, "clobber of " HOST_WIDE_INT_PRINT_DEC
" bits",
287 tree_low_cst (TYPE_SIZE (item
->value
), 1));
290 fprintf (f
, "cst: ");
291 print_generic_expr (f
, item
->value
, 0);
300 /* Print the jump functions of all arguments on all call graph edges going from
304 ipa_print_node_jump_functions (FILE *f
, struct cgraph_node
*node
)
306 struct cgraph_edge
*cs
;
308 fprintf (f
, " Jump functions of caller %s/%i:\n", cgraph_node_name (node
),
310 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
312 if (!ipa_edge_args_info_available_for_edge_p (cs
))
315 fprintf (f
, " callsite %s/%i -> %s/%i : \n",
316 xstrdup (cgraph_node_name (node
)), node
->symbol
.order
,
317 xstrdup (cgraph_node_name (cs
->callee
)),
318 cs
->callee
->symbol
.order
);
319 ipa_print_node_jump_functions_for_edge (f
, cs
);
322 for (cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
324 struct cgraph_indirect_call_info
*ii
;
325 if (!ipa_edge_args_info_available_for_edge_p (cs
))
328 ii
= cs
->indirect_info
;
329 if (ii
->agg_contents
)
330 fprintf (f
, " indirect %s callsite, calling param %i, "
331 "offset " HOST_WIDE_INT_PRINT_DEC
", %s",
332 ii
->member_ptr
? "member ptr" : "aggregate",
333 ii
->param_index
, ii
->offset
,
334 ii
->by_ref
? "by reference" : "by_value");
336 fprintf (f
, " indirect %s callsite, calling param %i",
337 ii
->polymorphic
? "polymorphic" : "simple", ii
->param_index
);
341 fprintf (f
, ", for stmt ");
342 print_gimple_stmt (f
, cs
->call_stmt
, 0, TDF_SLIM
);
346 ipa_print_node_jump_functions_for_edge (f
, cs
);
350 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
353 ipa_print_all_jump_functions (FILE *f
)
355 struct cgraph_node
*node
;
357 fprintf (f
, "\nJump functions:\n");
358 FOR_EACH_FUNCTION (node
)
360 ipa_print_node_jump_functions (f
, node
);
364 /* Set JFUNC to be a known type jump function. */
367 ipa_set_jf_known_type (struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
,
368 tree base_type
, tree component_type
)
370 jfunc
->type
= IPA_JF_KNOWN_TYPE
;
371 jfunc
->value
.known_type
.offset
= offset
,
372 jfunc
->value
.known_type
.base_type
= base_type
;
373 jfunc
->value
.known_type
.component_type
= component_type
;
376 /* Set JFUNC to be a constant jmp function. */
379 ipa_set_jf_constant (struct ipa_jump_func
*jfunc
, tree constant
,
380 struct cgraph_edge
*cs
)
382 constant
= unshare_expr (constant
);
383 if (constant
&& EXPR_P (constant
))
384 SET_EXPR_LOCATION (constant
, UNKNOWN_LOCATION
);
385 jfunc
->type
= IPA_JF_CONST
;
386 jfunc
->value
.constant
.value
= unshare_expr_without_location (constant
);
388 if (TREE_CODE (constant
) == ADDR_EXPR
389 && TREE_CODE (TREE_OPERAND (constant
, 0)) == FUNCTION_DECL
)
391 struct ipa_cst_ref_desc
*rdesc
;
392 if (!ipa_refdesc_pool
)
393 ipa_refdesc_pool
= create_alloc_pool ("IPA-PROP ref descriptions",
394 sizeof (struct ipa_cst_ref_desc
), 32);
396 rdesc
= (struct ipa_cst_ref_desc
*) pool_alloc (ipa_refdesc_pool
);
398 rdesc
->next_duplicate
= NULL
;
400 jfunc
->value
.constant
.rdesc
= rdesc
;
403 jfunc
->value
.constant
.rdesc
= NULL
;
406 /* Set JFUNC to be a simple pass-through jump function. */
408 ipa_set_jf_simple_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
411 jfunc
->type
= IPA_JF_PASS_THROUGH
;
412 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
413 jfunc
->value
.pass_through
.formal_id
= formal_id
;
414 jfunc
->value
.pass_through
.operation
= NOP_EXPR
;
415 jfunc
->value
.pass_through
.agg_preserved
= agg_preserved
;
418 /* Set JFUNC to be an arithmetic pass through jump function. */
421 ipa_set_jf_arith_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
422 tree operand
, enum tree_code operation
)
424 jfunc
->type
= IPA_JF_PASS_THROUGH
;
425 jfunc
->value
.pass_through
.operand
= unshare_expr_without_location (operand
);
426 jfunc
->value
.pass_through
.formal_id
= formal_id
;
427 jfunc
->value
.pass_through
.operation
= operation
;
428 jfunc
->value
.pass_through
.agg_preserved
= false;
431 /* Set JFUNC to be an ancestor jump function. */
434 ipa_set_ancestor_jf (struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
,
435 tree type
, int formal_id
, bool agg_preserved
)
437 jfunc
->type
= IPA_JF_ANCESTOR
;
438 jfunc
->value
.ancestor
.formal_id
= formal_id
;
439 jfunc
->value
.ancestor
.offset
= offset
;
440 jfunc
->value
.ancestor
.type
= type
;
441 jfunc
->value
.ancestor
.agg_preserved
= agg_preserved
;
444 /* Extract the acual BINFO being described by JFUNC which must be a known type
448 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func
*jfunc
)
450 tree base_binfo
= TYPE_BINFO (jfunc
->value
.known_type
.base_type
);
453 return get_binfo_at_offset (base_binfo
,
454 jfunc
->value
.known_type
.offset
,
455 jfunc
->value
.known_type
.component_type
);
458 /* Structure to be passed in between detect_type_change and
459 check_stmt_for_type_change. */
461 struct type_change_info
463 /* Offset into the object where there is the virtual method pointer we are
465 HOST_WIDE_INT offset
;
466 /* The declaration or SSA_NAME pointer of the base that we are checking for
469 /* If we actually can tell the type that the object has changed to, it is
470 stored in this field. Otherwise it remains NULL_TREE. */
471 tree known_current_type
;
472 /* Set to true if dynamic type change has been detected. */
473 bool type_maybe_changed
;
474 /* Set to true if multiple types have been encountered. known_current_type
475 must be disregarded in that case. */
476 bool multiple_types_encountered
;
479 /* Return true if STMT can modify a virtual method table pointer.
481 This function makes special assumptions about both constructors and
482 destructors which are all the functions that are allowed to alter the VMT
483 pointers. It assumes that destructors begin with assignment into all VMT
484 pointers and that constructors essentially look in the following way:
486 1) The very first thing they do is that they call constructors of ancestor
487 sub-objects that have them.
489 2) Then VMT pointers of this and all its ancestors is set to new values
490 corresponding to the type corresponding to the constructor.
492 3) Only afterwards, other stuff such as constructor of member sub-objects
493 and the code written by the user is run. Only this may include calling
494 virtual functions, directly or indirectly.
496 There is no way to call a constructor of an ancestor sub-object in any
499 This means that we do not have to care whether constructors get the correct
500 type information because they will always change it (in fact, if we define
501 the type to be given by the VMT pointer, it is undefined).
503 The most important fact to derive from the above is that if, for some
504 statement in the section 3, we try to detect whether the dynamic type has
505 changed, we can safely ignore all calls as we examine the function body
506 backwards until we reach statements in section 2 because these calls cannot
507 be ancestor constructors or destructors (if the input is not bogus) and so
508 do not change the dynamic type (this holds true only for automatically
509 allocated objects but at the moment we devirtualize only these). We then
510 must detect that statements in section 2 change the dynamic type and can try
511 to derive the new type. That is enough and we can stop, we will never see
512 the calls into constructors of sub-objects in this code. Therefore we can
513 safely ignore all call statements that we traverse.
517 stmt_may_be_vtbl_ptr_store (gimple stmt
)
519 if (is_gimple_call (stmt
))
521 else if (is_gimple_assign (stmt
))
523 tree lhs
= gimple_assign_lhs (stmt
);
525 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs
)))
527 if (flag_strict_aliasing
528 && !POINTER_TYPE_P (TREE_TYPE (lhs
)))
531 if (TREE_CODE (lhs
) == COMPONENT_REF
532 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
534 /* In the future we might want to use get_base_ref_and_offset to find
535 if there is a field corresponding to the offset and if so, proceed
536 almost like if it was a component ref. */
542 /* If STMT can be proved to be an assignment to the virtual method table
543 pointer of ANALYZED_OBJ and the type associated with the new table
544 identified, return the type. Otherwise return NULL_TREE. */
547 extr_type_from_vtbl_ptr_store (gimple stmt
, struct type_change_info
*tci
)
549 HOST_WIDE_INT offset
, size
, max_size
;
552 if (!gimple_assign_single_p (stmt
))
555 lhs
= gimple_assign_lhs (stmt
);
556 rhs
= gimple_assign_rhs1 (stmt
);
557 if (TREE_CODE (lhs
) != COMPONENT_REF
558 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1))
559 || TREE_CODE (rhs
) != ADDR_EXPR
)
561 rhs
= get_base_address (TREE_OPERAND (rhs
, 0));
563 || TREE_CODE (rhs
) != VAR_DECL
564 || !DECL_VIRTUAL_P (rhs
))
567 base
= get_ref_base_and_extent (lhs
, &offset
, &size
, &max_size
);
568 if (offset
!= tci
->offset
569 || size
!= POINTER_SIZE
570 || max_size
!= POINTER_SIZE
)
572 if (TREE_CODE (base
) == MEM_REF
)
574 if (TREE_CODE (tci
->object
) != MEM_REF
575 || TREE_OPERAND (tci
->object
, 0) != TREE_OPERAND (base
, 0)
576 || !tree_int_cst_equal (TREE_OPERAND (tci
->object
, 1),
577 TREE_OPERAND (base
, 1)))
580 else if (tci
->object
!= base
)
583 return DECL_CONTEXT (rhs
);
586 /* Callback of walk_aliased_vdefs and a helper function for
587 detect_type_change to check whether a particular statement may modify
588 the virtual table pointer, and if possible also determine the new type of
589 the (sub-)object. It stores its result into DATA, which points to a
590 type_change_info structure. */
593 check_stmt_for_type_change (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef
, void *data
)
595 gimple stmt
= SSA_NAME_DEF_STMT (vdef
);
596 struct type_change_info
*tci
= (struct type_change_info
*) data
;
598 if (stmt_may_be_vtbl_ptr_store (stmt
))
601 type
= extr_type_from_vtbl_ptr_store (stmt
, tci
);
602 if (tci
->type_maybe_changed
603 && type
!= tci
->known_current_type
)
604 tci
->multiple_types_encountered
= true;
605 tci
->known_current_type
= type
;
606 tci
->type_maybe_changed
= true;
615 /* Like detect_type_change but with extra argument COMP_TYPE which will become
616 the component type part of new JFUNC of dynamic type change is detected and
617 the new base type is identified. */
620 detect_type_change_1 (tree arg
, tree base
, tree comp_type
, gimple call
,
621 struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
)
623 struct type_change_info tci
;
626 gcc_checking_assert (DECL_P (arg
)
627 || TREE_CODE (arg
) == MEM_REF
628 || handled_component_p (arg
));
629 /* Const calls cannot call virtual methods through VMT and so type changes do
631 if (!flag_devirtualize
|| !gimple_vuse (call
))
634 ao_ref_init (&ao
, arg
);
637 ao
.size
= POINTER_SIZE
;
638 ao
.max_size
= ao
.size
;
641 tci
.object
= get_base_address (arg
);
642 tci
.known_current_type
= NULL_TREE
;
643 tci
.type_maybe_changed
= false;
644 tci
.multiple_types_encountered
= false;
646 walk_aliased_vdefs (&ao
, gimple_vuse (call
), check_stmt_for_type_change
,
648 if (!tci
.type_maybe_changed
)
651 if (!tci
.known_current_type
652 || tci
.multiple_types_encountered
654 jfunc
->type
= IPA_JF_UNKNOWN
;
656 ipa_set_jf_known_type (jfunc
, 0, tci
.known_current_type
, comp_type
);
661 /* Detect whether the dynamic type of ARG has changed (before callsite CALL) by
662 looking for assignments to its virtual table pointer. If it is, return true
663 and fill in the jump function JFUNC with relevant type information or set it
664 to unknown. ARG is the object itself (not a pointer to it, unless
665 dereferenced). BASE is the base of the memory access as returned by
666 get_ref_base_and_extent, as is the offset. */
669 detect_type_change (tree arg
, tree base
, gimple call
,
670 struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
)
672 return detect_type_change_1 (arg
, base
, TREE_TYPE (arg
), call
, jfunc
, offset
);
675 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
676 SSA name (its dereference will become the base and the offset is assumed to
680 detect_type_change_ssa (tree arg
, gimple call
, struct ipa_jump_func
*jfunc
)
684 gcc_checking_assert (TREE_CODE (arg
) == SSA_NAME
);
685 if (!flag_devirtualize
686 || !POINTER_TYPE_P (TREE_TYPE (arg
))
687 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != RECORD_TYPE
)
690 comp_type
= TREE_TYPE (TREE_TYPE (arg
));
691 arg
= build2 (MEM_REF
, ptr_type_node
, arg
,
692 build_int_cst (ptr_type_node
, 0));
694 return detect_type_change_1 (arg
, arg
, comp_type
, call
, jfunc
, 0);
697 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
698 boolean variable pointed to by DATA. */
701 mark_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
704 bool *b
= (bool *) data
;
709 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
710 a value known not to be modified in this function before reaching the
711 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
712 information about the parameter. */
715 parm_preserved_before_stmt_p (struct param_analysis_info
*parm_ainfo
,
716 gimple stmt
, tree parm_load
)
718 bool modified
= false;
719 bitmap
*visited_stmts
;
722 if (parm_ainfo
&& parm_ainfo
->parm_modified
)
725 gcc_checking_assert (gimple_vuse (stmt
) != NULL_TREE
);
726 ao_ref_init (&refd
, parm_load
);
727 /* We can cache visited statements only when parm_ainfo is available and when
728 we are looking at a naked load of the whole parameter. */
729 if (!parm_ainfo
|| TREE_CODE (parm_load
) != PARM_DECL
)
730 visited_stmts
= NULL
;
732 visited_stmts
= &parm_ainfo
->parm_visited_statements
;
733 walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
, &modified
,
735 if (parm_ainfo
&& modified
)
736 parm_ainfo
->parm_modified
= true;
740 /* If STMT is an assignment that loads a value from an parameter declaration,
741 return the index of the parameter in ipa_node_params which has not been
742 modified. Otherwise return -1. */
745 load_from_unmodified_param (vec
<ipa_param_descriptor_t
> descriptors
,
746 struct param_analysis_info
*parms_ainfo
,
752 if (!gimple_assign_single_p (stmt
))
755 op1
= gimple_assign_rhs1 (stmt
);
756 if (TREE_CODE (op1
) != PARM_DECL
)
759 index
= ipa_get_param_decl_index_1 (descriptors
, op1
);
761 || !parm_preserved_before_stmt_p (parms_ainfo
? &parms_ainfo
[index
]
768 /* Return true if memory reference REF loads data that are known to be
769 unmodified in this function before reaching statement STMT. PARM_AINFO, if
770 non-NULL, is a pointer to a structure containing temporary information about
774 parm_ref_data_preserved_p (struct param_analysis_info
*parm_ainfo
,
775 gimple stmt
, tree ref
)
777 bool modified
= false;
780 gcc_checking_assert (gimple_vuse (stmt
));
781 if (parm_ainfo
&& parm_ainfo
->ref_modified
)
784 ao_ref_init (&refd
, ref
);
785 walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
, &modified
,
787 if (parm_ainfo
&& modified
)
788 parm_ainfo
->ref_modified
= true;
792 /* Return true if the data pointed to by PARM is known to be unmodified in this
793 function before reaching call statement CALL into which it is passed.
794 PARM_AINFO is a pointer to a structure containing temporary information
798 parm_ref_data_pass_through_p (struct param_analysis_info
*parm_ainfo
,
799 gimple call
, tree parm
)
801 bool modified
= false;
804 /* It's unnecessary to calculate anything about memory contnets for a const
805 function because it is not goin to use it. But do not cache the result
806 either. Also, no such calculations for non-pointers. */
807 if (!gimple_vuse (call
)
808 || !POINTER_TYPE_P (TREE_TYPE (parm
)))
811 if (parm_ainfo
->pt_modified
)
814 ao_ref_init_from_ptr_and_size (&refd
, parm
, NULL_TREE
);
815 walk_aliased_vdefs (&refd
, gimple_vuse (call
), mark_modified
, &modified
,
816 parm_ainfo
? &parm_ainfo
->pt_visited_statements
: NULL
);
818 parm_ainfo
->pt_modified
= true;
822 /* Return true if we can prove that OP is a memory reference loading unmodified
823 data from an aggregate passed as a parameter and if the aggregate is passed
824 by reference, that the alias type of the load corresponds to the type of the
825 formal parameter (so that we can rely on this type for TBAA in callers).
826 INFO and PARMS_AINFO describe parameters of the current function (but the
827 latter can be NULL), STMT is the load statement. If function returns true,
828 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
829 within the aggregate and whether it is a load from a value passed by
830 reference respectively. */
833 ipa_load_from_parm_agg_1 (vec
<ipa_param_descriptor_t
> descriptors
,
834 struct param_analysis_info
*parms_ainfo
, gimple stmt
,
835 tree op
, int *index_p
, HOST_WIDE_INT
*offset_p
,
839 HOST_WIDE_INT size
, max_size
;
840 tree base
= get_ref_base_and_extent (op
, offset_p
, &size
, &max_size
);
842 if (max_size
== -1 || max_size
!= size
|| *offset_p
< 0)
847 int index
= ipa_get_param_decl_index_1 (descriptors
, base
);
849 && parm_preserved_before_stmt_p (parms_ainfo
? &parms_ainfo
[index
]
859 if (TREE_CODE (base
) != MEM_REF
860 || TREE_CODE (TREE_OPERAND (base
, 0)) != SSA_NAME
861 || !integer_zerop (TREE_OPERAND (base
, 1)))
864 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base
, 0)))
866 tree parm
= SSA_NAME_VAR (TREE_OPERAND (base
, 0));
867 index
= ipa_get_param_decl_index_1 (descriptors
, parm
);
871 /* This branch catches situations where a pointer parameter is not a
872 gimple register, for example:
874 void hip7(S*) (struct S * p)
876 void (*<T2e4>) (struct S *) D.1867;
886 gimple def
= SSA_NAME_DEF_STMT (TREE_OPERAND (base
, 0));
887 index
= load_from_unmodified_param (descriptors
, parms_ainfo
, def
);
891 && parm_ref_data_preserved_p (parms_ainfo
? &parms_ainfo
[index
] : NULL
,
901 /* Just like the previous function, just without the param_analysis_info
902 pointer, for users outside of this file. */
905 ipa_load_from_parm_agg (struct ipa_node_params
*info
, gimple stmt
,
906 tree op
, int *index_p
, HOST_WIDE_INT
*offset_p
,
909 return ipa_load_from_parm_agg_1 (info
->descriptors
, NULL
, stmt
, op
, index_p
,
913 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
914 of an assignment statement STMT, try to determine whether we are actually
915 handling any of the following cases and construct an appropriate jump
916 function into JFUNC if so:
918 1) The passed value is loaded from a formal parameter which is not a gimple
919 register (most probably because it is addressable, the value has to be
920 scalar) and we can guarantee the value has not changed. This case can
921 therefore be described by a simple pass-through jump function. For example:
930 2) The passed value can be described by a simple arithmetic pass-through
937 D.2064_4 = a.1(D) + 4;
940 This case can also occur in combination of the previous one, e.g.:
948 D.2064_4 = a.0_3 + 4;
951 3) The passed value is an address of an object within another one (which
952 also passed by reference). Such situations are described by an ancestor
953 jump function and describe situations such as:
955 B::foo() (struct B * const this)
959 D.1845_2 = &this_1(D)->D.1748;
962 INFO is the structure describing individual parameters access different
963 stages of IPA optimizations. PARMS_AINFO contains the information that is
964 only needed for intraprocedural analysis. */
967 compute_complex_assign_jump_func (struct ipa_node_params
*info
,
968 struct param_analysis_info
*parms_ainfo
,
969 struct ipa_jump_func
*jfunc
,
970 gimple call
, gimple stmt
, tree name
)
972 HOST_WIDE_INT offset
, size
, max_size
;
973 tree op1
, tc_ssa
, base
, ssa
;
976 op1
= gimple_assign_rhs1 (stmt
);
978 if (TREE_CODE (op1
) == SSA_NAME
)
980 if (SSA_NAME_IS_DEFAULT_DEF (op1
))
981 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (op1
));
983 index
= load_from_unmodified_param (info
->descriptors
, parms_ainfo
,
984 SSA_NAME_DEF_STMT (op1
));
989 index
= load_from_unmodified_param (info
->descriptors
, parms_ainfo
, stmt
);
990 tc_ssa
= gimple_assign_lhs (stmt
);
995 tree op2
= gimple_assign_rhs2 (stmt
);
999 if (!is_gimple_ip_invariant (op2
)
1000 || (TREE_CODE_CLASS (gimple_expr_code (stmt
)) != tcc_comparison
1001 && !useless_type_conversion_p (TREE_TYPE (name
),
1005 ipa_set_jf_arith_pass_through (jfunc
, index
, op2
,
1006 gimple_assign_rhs_code (stmt
));
1008 else if (gimple_assign_single_p (stmt
)
1009 && !detect_type_change_ssa (tc_ssa
, call
, jfunc
))
1011 bool agg_p
= parm_ref_data_pass_through_p (&parms_ainfo
[index
],
1013 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1018 if (TREE_CODE (op1
) != ADDR_EXPR
)
1020 op1
= TREE_OPERAND (op1
, 0);
1021 if (TREE_CODE (TREE_TYPE (op1
)) != RECORD_TYPE
)
1023 base
= get_ref_base_and_extent (op1
, &offset
, &size
, &max_size
);
1024 if (TREE_CODE (base
) != MEM_REF
1025 /* If this is a varying address, punt. */
1027 || max_size
!= size
)
1029 offset
+= mem_ref_offset (base
).low
* BITS_PER_UNIT
;
1030 ssa
= TREE_OPERAND (base
, 0);
1031 if (TREE_CODE (ssa
) != SSA_NAME
1032 || !SSA_NAME_IS_DEFAULT_DEF (ssa
)
1036 /* Dynamic types are changed only in constructors and destructors and */
1037 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (ssa
));
1039 && !detect_type_change (op1
, base
, call
, jfunc
, offset
))
1040 ipa_set_ancestor_jf (jfunc
, offset
, TREE_TYPE (op1
), index
,
1041 parm_ref_data_pass_through_p (&parms_ainfo
[index
],
1045 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1048 iftmp.1_3 = &obj_2(D)->D.1762;
1050 The base of the MEM_REF must be a default definition SSA NAME of a
1051 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1052 whole MEM_REF expression is returned and the offset calculated from any
1053 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1054 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1057 get_ancestor_addr_info (gimple assign
, tree
*obj_p
, HOST_WIDE_INT
*offset
)
1059 HOST_WIDE_INT size
, max_size
;
1060 tree expr
, parm
, obj
;
1062 if (!gimple_assign_single_p (assign
))
1064 expr
= gimple_assign_rhs1 (assign
);
1066 if (TREE_CODE (expr
) != ADDR_EXPR
)
1068 expr
= TREE_OPERAND (expr
, 0);
1070 expr
= get_ref_base_and_extent (expr
, offset
, &size
, &max_size
);
1072 if (TREE_CODE (expr
) != MEM_REF
1073 /* If this is a varying address, punt. */
1078 parm
= TREE_OPERAND (expr
, 0);
1079 if (TREE_CODE (parm
) != SSA_NAME
1080 || !SSA_NAME_IS_DEFAULT_DEF (parm
)
1081 || TREE_CODE (SSA_NAME_VAR (parm
)) != PARM_DECL
)
1084 *offset
+= mem_ref_offset (expr
).low
* BITS_PER_UNIT
;
1090 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1091 statement PHI, try to find out whether NAME is in fact a
1092 multiple-inheritance typecast from a descendant into an ancestor of a formal
1093 parameter and thus can be described by an ancestor jump function and if so,
1094 write the appropriate function into JFUNC.
1096 Essentially we want to match the following pattern:
1104 iftmp.1_3 = &obj_2(D)->D.1762;
1107 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1108 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1112 compute_complex_ancestor_jump_func (struct ipa_node_params
*info
,
1113 struct param_analysis_info
*parms_ainfo
,
1114 struct ipa_jump_func
*jfunc
,
1115 gimple call
, gimple phi
)
1117 HOST_WIDE_INT offset
;
1118 gimple assign
, cond
;
1119 basic_block phi_bb
, assign_bb
, cond_bb
;
1120 tree tmp
, parm
, expr
, obj
;
1123 if (gimple_phi_num_args (phi
) != 2)
1126 if (integer_zerop (PHI_ARG_DEF (phi
, 1)))
1127 tmp
= PHI_ARG_DEF (phi
, 0);
1128 else if (integer_zerop (PHI_ARG_DEF (phi
, 0)))
1129 tmp
= PHI_ARG_DEF (phi
, 1);
1132 if (TREE_CODE (tmp
) != SSA_NAME
1133 || SSA_NAME_IS_DEFAULT_DEF (tmp
)
1134 || !POINTER_TYPE_P (TREE_TYPE (tmp
))
1135 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp
))) != RECORD_TYPE
)
1138 assign
= SSA_NAME_DEF_STMT (tmp
);
1139 assign_bb
= gimple_bb (assign
);
1140 if (!single_pred_p (assign_bb
))
1142 expr
= get_ancestor_addr_info (assign
, &obj
, &offset
);
1145 parm
= TREE_OPERAND (expr
, 0);
1146 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (parm
));
1147 gcc_assert (index
>= 0);
1149 cond_bb
= single_pred (assign_bb
);
1150 cond
= last_stmt (cond_bb
);
1152 || gimple_code (cond
) != GIMPLE_COND
1153 || gimple_cond_code (cond
) != NE_EXPR
1154 || gimple_cond_lhs (cond
) != parm
1155 || !integer_zerop (gimple_cond_rhs (cond
)))
1158 phi_bb
= gimple_bb (phi
);
1159 for (i
= 0; i
< 2; i
++)
1161 basic_block pred
= EDGE_PRED (phi_bb
, i
)->src
;
1162 if (pred
!= assign_bb
&& pred
!= cond_bb
)
1166 if (!detect_type_change (obj
, expr
, call
, jfunc
, offset
))
1167 ipa_set_ancestor_jf (jfunc
, offset
, TREE_TYPE (obj
), index
,
1168 parm_ref_data_pass_through_p (&parms_ainfo
[index
],
1172 /* Given OP which is passed as an actual argument to a called function,
1173 determine if it is possible to construct a KNOWN_TYPE jump function for it
1174 and if so, create one and store it to JFUNC. */
1177 compute_known_type_jump_func (tree op
, struct ipa_jump_func
*jfunc
,
1180 HOST_WIDE_INT offset
, size
, max_size
;
1183 if (!flag_devirtualize
1184 || TREE_CODE (op
) != ADDR_EXPR
1185 || TREE_CODE (TREE_TYPE (TREE_TYPE (op
))) != RECORD_TYPE
)
1188 op
= TREE_OPERAND (op
, 0);
1189 base
= get_ref_base_and_extent (op
, &offset
, &size
, &max_size
);
1193 || TREE_CODE (TREE_TYPE (base
)) != RECORD_TYPE
1194 || is_global_var (base
))
1197 if (!TYPE_BINFO (TREE_TYPE (base
))
1198 || detect_type_change (op
, base
, call
, jfunc
, offset
))
1201 ipa_set_jf_known_type (jfunc
, offset
, TREE_TYPE (base
), TREE_TYPE (op
));
1204 /* Inspect the given TYPE and return true iff it has the same structure (the
1205 same number of fields of the same types) as a C++ member pointer. If
1206 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1207 corresponding fields there. */
1210 type_like_member_ptr_p (tree type
, tree
*method_ptr
, tree
*delta
)
1214 if (TREE_CODE (type
) != RECORD_TYPE
)
1217 fld
= TYPE_FIELDS (type
);
1218 if (!fld
|| !POINTER_TYPE_P (TREE_TYPE (fld
))
1219 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld
))) != METHOD_TYPE
1220 || !host_integerp (DECL_FIELD_OFFSET (fld
), 1))
1226 fld
= DECL_CHAIN (fld
);
1227 if (!fld
|| INTEGRAL_TYPE_P (fld
)
1228 || !host_integerp (DECL_FIELD_OFFSET (fld
), 1))
1233 if (DECL_CHAIN (fld
))
1239 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1240 return the rhs of its defining statement. Otherwise return RHS as it
1244 get_ssa_def_if_simple_copy (tree rhs
)
1246 while (TREE_CODE (rhs
) == SSA_NAME
&& !SSA_NAME_IS_DEFAULT_DEF (rhs
))
1248 gimple def_stmt
= SSA_NAME_DEF_STMT (rhs
);
1250 if (gimple_assign_single_p (def_stmt
))
1251 rhs
= gimple_assign_rhs1 (def_stmt
);
1258 /* Simple linked list, describing known contents of an aggregate beforere
1261 struct ipa_known_agg_contents_list
1263 /* Offset and size of the described part of the aggregate. */
1264 HOST_WIDE_INT offset
, size
;
1265 /* Known constant value or NULL if the contents is known to be unknown. */
1267 /* Pointer to the next structure in the list. */
1268 struct ipa_known_agg_contents_list
*next
;
1271 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1272 in ARG is filled in with constant values. ARG can either be an aggregate
1273 expression or a pointer to an aggregate. JFUNC is the jump function into
1274 which the constants are subsequently stored. */
1277 determine_known_aggregate_parts (gimple call
, tree arg
,
1278 struct ipa_jump_func
*jfunc
)
1280 struct ipa_known_agg_contents_list
*list
= NULL
;
1281 int item_count
= 0, const_count
= 0;
1282 HOST_WIDE_INT arg_offset
, arg_size
;
1283 gimple_stmt_iterator gsi
;
1285 bool check_ref
, by_ref
;
1288 /* The function operates in three stages. First, we prepare check_ref, r,
1289 arg_base and arg_offset based on what is actually passed as an actual
1292 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
1295 if (TREE_CODE (arg
) == SSA_NAME
)
1298 if (!host_integerp (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg
))), 1))
1303 type_size
= TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg
)));
1304 arg_size
= tree_low_cst (type_size
, 1);
1305 ao_ref_init_from_ptr_and_size (&r
, arg_base
, NULL_TREE
);
1307 else if (TREE_CODE (arg
) == ADDR_EXPR
)
1309 HOST_WIDE_INT arg_max_size
;
1311 arg
= TREE_OPERAND (arg
, 0);
1312 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1314 if (arg_max_size
== -1
1315 || arg_max_size
!= arg_size
1318 if (DECL_P (arg_base
))
1322 size
= build_int_cst (integer_type_node
, arg_size
);
1323 ao_ref_init_from_ptr_and_size (&r
, arg_base
, size
);
1333 HOST_WIDE_INT arg_max_size
;
1335 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg
)));
1339 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1341 if (arg_max_size
== -1
1342 || arg_max_size
!= arg_size
1346 ao_ref_init (&r
, arg
);
1349 /* Second stage walks back the BB, looks at individual statements and as long
1350 as it is confident of how the statements affect contents of the
1351 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1353 gsi
= gsi_for_stmt (call
);
1355 for (; !gsi_end_p (gsi
); gsi_prev (&gsi
))
1357 struct ipa_known_agg_contents_list
*n
, **p
;
1358 gimple stmt
= gsi_stmt (gsi
);
1359 HOST_WIDE_INT lhs_offset
, lhs_size
, lhs_max_size
;
1360 tree lhs
, rhs
, lhs_base
;
1361 bool partial_overlap
;
1363 if (!stmt_may_clobber_ref_p_1 (stmt
, &r
))
1365 if (!gimple_assign_single_p (stmt
))
1368 lhs
= gimple_assign_lhs (stmt
);
1369 rhs
= gimple_assign_rhs1 (stmt
);
1370 if (!is_gimple_reg_type (rhs
)
1371 || TREE_CODE (lhs
) == BIT_FIELD_REF
1372 || contains_bitfld_component_ref_p (lhs
))
1375 lhs_base
= get_ref_base_and_extent (lhs
, &lhs_offset
, &lhs_size
,
1377 if (lhs_max_size
== -1
1378 || lhs_max_size
!= lhs_size
1379 || (lhs_offset
< arg_offset
1380 && lhs_offset
+ lhs_size
> arg_offset
)
1381 || (lhs_offset
< arg_offset
+ arg_size
1382 && lhs_offset
+ lhs_size
> arg_offset
+ arg_size
))
1387 if (TREE_CODE (lhs_base
) != MEM_REF
1388 || TREE_OPERAND (lhs_base
, 0) != arg_base
1389 || !integer_zerop (TREE_OPERAND (lhs_base
, 1)))
1392 else if (lhs_base
!= arg_base
)
1394 if (DECL_P (lhs_base
))
1400 if (lhs_offset
+ lhs_size
< arg_offset
1401 || lhs_offset
>= (arg_offset
+ arg_size
))
1404 partial_overlap
= false;
1406 while (*p
&& (*p
)->offset
< lhs_offset
)
1408 if ((*p
)->offset
+ (*p
)->size
> lhs_offset
)
1410 partial_overlap
= true;
1415 if (partial_overlap
)
1417 if (*p
&& (*p
)->offset
< lhs_offset
+ lhs_size
)
1419 if ((*p
)->offset
== lhs_offset
&& (*p
)->size
== lhs_size
)
1420 /* We already know this value is subsequently overwritten with
1424 /* Otherwise this is a partial overlap which we cannot
1429 rhs
= get_ssa_def_if_simple_copy (rhs
);
1430 n
= XALLOCA (struct ipa_known_agg_contents_list
);
1432 n
->offset
= lhs_offset
;
1433 if (is_gimple_ip_invariant (rhs
))
1439 n
->constant
= NULL_TREE
;
1444 if (const_count
== PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
)
1445 || item_count
== 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
))
1449 /* Third stage just goes over the list and creates an appropriate vector of
1450 ipa_agg_jf_item structures out of it, of sourse only if there are
1451 any known constants to begin with. */
1455 jfunc
->agg
.by_ref
= by_ref
;
1456 vec_alloc (jfunc
->agg
.items
, const_count
);
1461 struct ipa_agg_jf_item item
;
1462 item
.offset
= list
->offset
- arg_offset
;
1463 gcc_assert ((item
.offset
% BITS_PER_UNIT
) == 0);
1464 item
.value
= unshare_expr_without_location (list
->constant
);
1465 jfunc
->agg
.items
->quick_push (item
);
1472 /* Compute jump function for all arguments of callsite CS and insert the
1473 information in the jump_functions array in the ipa_edge_args corresponding
1474 to this callsite. */
1477 ipa_compute_jump_functions_for_edge (struct param_analysis_info
*parms_ainfo
,
1478 struct cgraph_edge
*cs
)
1480 struct ipa_node_params
*info
= IPA_NODE_REF (cs
->caller
);
1481 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
1482 gimple call
= cs
->call_stmt
;
1483 int n
, arg_num
= gimple_call_num_args (call
);
1485 if (arg_num
== 0 || args
->jump_functions
)
1487 vec_safe_grow_cleared (args
->jump_functions
, arg_num
);
1489 if (ipa_func_spec_opts_forbid_analysis_p (cs
->caller
))
1492 for (n
= 0; n
< arg_num
; n
++)
1494 struct ipa_jump_func
*jfunc
= ipa_get_ith_jump_func (args
, n
);
1495 tree arg
= gimple_call_arg (call
, n
);
1497 if (is_gimple_ip_invariant (arg
))
1498 ipa_set_jf_constant (jfunc
, arg
, cs
);
1499 else if (!is_gimple_reg_type (TREE_TYPE (arg
))
1500 && TREE_CODE (arg
) == PARM_DECL
)
1502 int index
= ipa_get_param_decl_index (info
, arg
);
1504 gcc_assert (index
>=0);
1505 /* Aggregate passed by value, check for pass-through, otherwise we
1506 will attempt to fill in aggregate contents later in this
1508 if (parm_preserved_before_stmt_p (&parms_ainfo
[index
], call
, arg
))
1510 ipa_set_jf_simple_pass_through (jfunc
, index
, false);
1514 else if (TREE_CODE (arg
) == SSA_NAME
)
1516 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
1518 int index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (arg
));
1520 && !detect_type_change_ssa (arg
, call
, jfunc
))
1523 agg_p
= parm_ref_data_pass_through_p (&parms_ainfo
[index
],
1525 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1530 gimple stmt
= SSA_NAME_DEF_STMT (arg
);
1531 if (is_gimple_assign (stmt
))
1532 compute_complex_assign_jump_func (info
, parms_ainfo
, jfunc
,
1534 else if (gimple_code (stmt
) == GIMPLE_PHI
)
1535 compute_complex_ancestor_jump_func (info
, parms_ainfo
, jfunc
,
1540 compute_known_type_jump_func (arg
, jfunc
, call
);
1542 if ((jfunc
->type
!= IPA_JF_PASS_THROUGH
1543 || !ipa_get_jf_pass_through_agg_preserved (jfunc
))
1544 && (jfunc
->type
!= IPA_JF_ANCESTOR
1545 || !ipa_get_jf_ancestor_agg_preserved (jfunc
))
1546 && (AGGREGATE_TYPE_P (TREE_TYPE (arg
))
1547 || (POINTER_TYPE_P (TREE_TYPE (arg
)))))
1548 determine_known_aggregate_parts (call
, arg
, jfunc
);
1552 /* Compute jump functions for all edges - both direct and indirect - outgoing
1553 from NODE. Also count the actual arguments in the process. */
1556 ipa_compute_jump_functions (struct cgraph_node
*node
,
1557 struct param_analysis_info
*parms_ainfo
)
1559 struct cgraph_edge
*cs
;
1561 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
1563 struct cgraph_node
*callee
= cgraph_function_or_thunk_node (cs
->callee
,
1565 /* We do not need to bother analyzing calls to unknown
1566 functions unless they may become known during lto/whopr. */
1567 if (!callee
->symbol
.definition
&& !flag_lto
)
1569 ipa_compute_jump_functions_for_edge (parms_ainfo
, cs
);
1572 for (cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
1573 ipa_compute_jump_functions_for_edge (parms_ainfo
, cs
);
1576 /* If STMT looks like a statement loading a value from a member pointer formal
1577 parameter, return that parameter and store the offset of the field to
1578 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1579 might be clobbered). If USE_DELTA, then we look for a use of the delta
1580 field rather than the pfn. */
1583 ipa_get_stmt_member_ptr_load_param (gimple stmt
, bool use_delta
,
1584 HOST_WIDE_INT
*offset_p
)
1586 tree rhs
, rec
, ref_field
, ref_offset
, fld
, ptr_field
, delta_field
;
1588 if (!gimple_assign_single_p (stmt
))
1591 rhs
= gimple_assign_rhs1 (stmt
);
1592 if (TREE_CODE (rhs
) == COMPONENT_REF
)
1594 ref_field
= TREE_OPERAND (rhs
, 1);
1595 rhs
= TREE_OPERAND (rhs
, 0);
1598 ref_field
= NULL_TREE
;
1599 if (TREE_CODE (rhs
) != MEM_REF
)
1601 rec
= TREE_OPERAND (rhs
, 0);
1602 if (TREE_CODE (rec
) != ADDR_EXPR
)
1604 rec
= TREE_OPERAND (rec
, 0);
1605 if (TREE_CODE (rec
) != PARM_DECL
1606 || !type_like_member_ptr_p (TREE_TYPE (rec
), &ptr_field
, &delta_field
))
1608 ref_offset
= TREE_OPERAND (rhs
, 1);
1615 *offset_p
= int_bit_position (fld
);
1619 if (integer_nonzerop (ref_offset
))
1621 return ref_field
== fld
? rec
: NULL_TREE
;
1624 return tree_int_cst_equal (byte_position (fld
), ref_offset
) ? rec
1628 /* Returns true iff T is an SSA_NAME defined by a statement. */
1631 ipa_is_ssa_with_stmt_def (tree t
)
1633 if (TREE_CODE (t
) == SSA_NAME
1634 && !SSA_NAME_IS_DEFAULT_DEF (t
))
1640 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1641 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1642 indirect call graph edge. */
1644 static struct cgraph_edge
*
1645 ipa_note_param_call (struct cgraph_node
*node
, int param_index
, gimple stmt
)
1647 struct cgraph_edge
*cs
;
1649 cs
= cgraph_edge (node
, stmt
);
1650 cs
->indirect_info
->param_index
= param_index
;
1651 cs
->indirect_info
->offset
= 0;
1652 cs
->indirect_info
->polymorphic
= 0;
1653 cs
->indirect_info
->agg_contents
= 0;
1654 cs
->indirect_info
->member_ptr
= 0;
1658 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1659 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1660 intermediate information about each formal parameter. Currently it checks
1661 whether the call calls a pointer that is a formal parameter and if so, the
1662 parameter is marked with the called flag and an indirect call graph edge
1663 describing the call is created. This is very simple for ordinary pointers
1664 represented in SSA but not-so-nice when it comes to member pointers. The
1665 ugly part of this function does nothing more than trying to match the
1666 pattern of such a call. An example of such a pattern is the gimple dump
1667 below, the call is on the last line:
1670 f$__delta_5 = f.__delta;
1671 f$__pfn_24 = f.__pfn;
1675 f$__delta_5 = MEM[(struct *)&f];
1676 f$__pfn_24 = MEM[(struct *)&f + 4B];
1678 and a few lines below:
1681 D.2496_3 = (int) f$__pfn_24;
1682 D.2497_4 = D.2496_3 & 1;
1689 D.2500_7 = (unsigned int) f$__delta_5;
1690 D.2501_8 = &S + D.2500_7;
1691 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1692 D.2503_10 = *D.2502_9;
1693 D.2504_12 = f$__pfn_24 + -1;
1694 D.2505_13 = (unsigned int) D.2504_12;
1695 D.2506_14 = D.2503_10 + D.2505_13;
1696 D.2507_15 = *D.2506_14;
1697 iftmp.11_16 = (String:: *) D.2507_15;
1700 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1701 D.2500_19 = (unsigned int) f$__delta_5;
1702 D.2508_20 = &S + D.2500_19;
1703 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1705 Such patterns are results of simple calls to a member pointer:
1707 int doprinting (int (MyString::* f)(int) const)
1709 MyString S ("somestring");
1714 Moreover, the function also looks for called pointers loaded from aggregates
1715 passed by value or reference. */
1718 ipa_analyze_indirect_call_uses (struct cgraph_node
*node
,
1719 struct ipa_node_params
*info
,
1720 struct param_analysis_info
*parms_ainfo
,
1721 gimple call
, tree target
)
1726 tree rec
, rec2
, cond
;
1729 basic_block bb
, virt_bb
, join
;
1730 HOST_WIDE_INT offset
;
1733 if (SSA_NAME_IS_DEFAULT_DEF (target
))
1735 tree var
= SSA_NAME_VAR (target
);
1736 index
= ipa_get_param_decl_index (info
, var
);
1738 ipa_note_param_call (node
, index
, call
);
1742 def
= SSA_NAME_DEF_STMT (target
);
1743 if (gimple_assign_single_p (def
)
1744 && ipa_load_from_parm_agg_1 (info
->descriptors
, parms_ainfo
, def
,
1745 gimple_assign_rhs1 (def
), &index
, &offset
,
1748 struct cgraph_edge
*cs
= ipa_note_param_call (node
, index
, call
);
1749 cs
->indirect_info
->offset
= offset
;
1750 cs
->indirect_info
->agg_contents
= 1;
1751 cs
->indirect_info
->by_ref
= by_ref
;
1755 /* Now we need to try to match the complex pattern of calling a member
1757 if (gimple_code (def
) != GIMPLE_PHI
1758 || gimple_phi_num_args (def
) != 2
1759 || !POINTER_TYPE_P (TREE_TYPE (target
))
1760 || TREE_CODE (TREE_TYPE (TREE_TYPE (target
))) != METHOD_TYPE
)
1763 /* First, we need to check whether one of these is a load from a member
1764 pointer that is a parameter to this function. */
1765 n1
= PHI_ARG_DEF (def
, 0);
1766 n2
= PHI_ARG_DEF (def
, 1);
1767 if (!ipa_is_ssa_with_stmt_def (n1
) || !ipa_is_ssa_with_stmt_def (n2
))
1769 d1
= SSA_NAME_DEF_STMT (n1
);
1770 d2
= SSA_NAME_DEF_STMT (n2
);
1772 join
= gimple_bb (def
);
1773 if ((rec
= ipa_get_stmt_member_ptr_load_param (d1
, false, &offset
)))
1775 if (ipa_get_stmt_member_ptr_load_param (d2
, false, NULL
))
1778 bb
= EDGE_PRED (join
, 0)->src
;
1779 virt_bb
= gimple_bb (d2
);
1781 else if ((rec
= ipa_get_stmt_member_ptr_load_param (d2
, false, &offset
)))
1783 bb
= EDGE_PRED (join
, 1)->src
;
1784 virt_bb
= gimple_bb (d1
);
1789 /* Second, we need to check that the basic blocks are laid out in the way
1790 corresponding to the pattern. */
1792 if (!single_pred_p (virt_bb
) || !single_succ_p (virt_bb
)
1793 || single_pred (virt_bb
) != bb
1794 || single_succ (virt_bb
) != join
)
1797 /* Third, let's see that the branching is done depending on the least
1798 significant bit of the pfn. */
1800 branch
= last_stmt (bb
);
1801 if (!branch
|| gimple_code (branch
) != GIMPLE_COND
)
1804 if ((gimple_cond_code (branch
) != NE_EXPR
1805 && gimple_cond_code (branch
) != EQ_EXPR
)
1806 || !integer_zerop (gimple_cond_rhs (branch
)))
1809 cond
= gimple_cond_lhs (branch
);
1810 if (!ipa_is_ssa_with_stmt_def (cond
))
1813 def
= SSA_NAME_DEF_STMT (cond
);
1814 if (!is_gimple_assign (def
)
1815 || gimple_assign_rhs_code (def
) != BIT_AND_EXPR
1816 || !integer_onep (gimple_assign_rhs2 (def
)))
1819 cond
= gimple_assign_rhs1 (def
);
1820 if (!ipa_is_ssa_with_stmt_def (cond
))
1823 def
= SSA_NAME_DEF_STMT (cond
);
1825 if (is_gimple_assign (def
)
1826 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
1828 cond
= gimple_assign_rhs1 (def
);
1829 if (!ipa_is_ssa_with_stmt_def (cond
))
1831 def
= SSA_NAME_DEF_STMT (cond
);
1834 rec2
= ipa_get_stmt_member_ptr_load_param (def
,
1835 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1836 == ptrmemfunc_vbit_in_delta
),
1841 index
= ipa_get_param_decl_index (info
, rec
);
1843 && parm_preserved_before_stmt_p (&parms_ainfo
[index
], call
, rec
))
1845 struct cgraph_edge
*cs
= ipa_note_param_call (node
, index
, call
);
1846 cs
->indirect_info
->offset
= offset
;
1847 cs
->indirect_info
->agg_contents
= 1;
1848 cs
->indirect_info
->member_ptr
= 1;
1854 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1855 object referenced in the expression is a formal parameter of the caller
1856 (described by INFO), create a call note for the statement. */
1859 ipa_analyze_virtual_call_uses (struct cgraph_node
*node
,
1860 struct ipa_node_params
*info
, gimple call
,
1863 struct cgraph_edge
*cs
;
1864 struct cgraph_indirect_call_info
*ii
;
1865 struct ipa_jump_func jfunc
;
1866 tree obj
= OBJ_TYPE_REF_OBJECT (target
);
1868 HOST_WIDE_INT anc_offset
;
1870 if (!flag_devirtualize
)
1873 if (TREE_CODE (obj
) != SSA_NAME
)
1876 if (SSA_NAME_IS_DEFAULT_DEF (obj
))
1878 if (TREE_CODE (SSA_NAME_VAR (obj
)) != PARM_DECL
)
1882 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (obj
));
1883 gcc_assert (index
>= 0);
1884 if (detect_type_change_ssa (obj
, call
, &jfunc
))
1889 gimple stmt
= SSA_NAME_DEF_STMT (obj
);
1892 expr
= get_ancestor_addr_info (stmt
, &obj
, &anc_offset
);
1895 index
= ipa_get_param_decl_index (info
,
1896 SSA_NAME_VAR (TREE_OPERAND (expr
, 0)));
1897 gcc_assert (index
>= 0);
1898 if (detect_type_change (obj
, expr
, call
, &jfunc
, anc_offset
))
1902 cs
= ipa_note_param_call (node
, index
, call
);
1903 ii
= cs
->indirect_info
;
1904 ii
->offset
= anc_offset
;
1905 ii
->otr_token
= tree_low_cst (OBJ_TYPE_REF_TOKEN (target
), 1);
1906 ii
->otr_type
= obj_type_ref_class (target
);
1907 ii
->polymorphic
= 1;
1910 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1911 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
1912 containing intermediate information about each formal parameter. */
1915 ipa_analyze_call_uses (struct cgraph_node
*node
,
1916 struct ipa_node_params
*info
,
1917 struct param_analysis_info
*parms_ainfo
, gimple call
)
1919 tree target
= gimple_call_fn (call
);
1923 if (TREE_CODE (target
) == SSA_NAME
)
1924 ipa_analyze_indirect_call_uses (node
, info
, parms_ainfo
, call
, target
);
1925 else if (virtual_method_call_p (target
))
1926 ipa_analyze_virtual_call_uses (node
, info
, call
, target
);
1930 /* Analyze the call statement STMT with respect to formal parameters (described
1931 in INFO) of caller given by NODE. Currently it only checks whether formal
1932 parameters are called. PARMS_AINFO is a pointer to a vector containing
1933 intermediate information about each formal parameter. */
1936 ipa_analyze_stmt_uses (struct cgraph_node
*node
, struct ipa_node_params
*info
,
1937 struct param_analysis_info
*parms_ainfo
, gimple stmt
)
1939 if (is_gimple_call (stmt
))
1940 ipa_analyze_call_uses (node
, info
, parms_ainfo
, stmt
);
1943 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
1944 If OP is a parameter declaration, mark it as used in the info structure
1948 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED
,
1949 tree op
, void *data
)
1951 struct ipa_node_params
*info
= (struct ipa_node_params
*) data
;
1953 op
= get_base_address (op
);
1955 && TREE_CODE (op
) == PARM_DECL
)
1957 int index
= ipa_get_param_decl_index (info
, op
);
1958 gcc_assert (index
>= 0);
1959 ipa_set_param_used (info
, index
, true);
1965 /* Scan the function body of NODE and inspect the uses of formal parameters.
1966 Store the findings in various structures of the associated ipa_node_params
1967 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
1968 vector containing intermediate information about each formal parameter. */
1971 ipa_analyze_params_uses (struct cgraph_node
*node
,
1972 struct param_analysis_info
*parms_ainfo
)
1974 tree decl
= node
->symbol
.decl
;
1976 struct function
*func
;
1977 gimple_stmt_iterator gsi
;
1978 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
1981 if (ipa_get_param_count (info
) == 0 || info
->uses_analysis_done
)
1984 info
->uses_analysis_done
= 1;
1985 if (ipa_func_spec_opts_forbid_analysis_p (node
))
1987 for (i
= 0; i
< ipa_get_param_count (info
); i
++)
1989 ipa_set_param_used (info
, i
, true);
1990 ipa_set_controlled_uses (info
, i
, IPA_UNDESCRIBED_USE
);
1995 for (i
= 0; i
< ipa_get_param_count (info
); i
++)
1997 tree parm
= ipa_get_param (info
, i
);
1998 int controlled_uses
= 0;
2000 /* For SSA regs see if parameter is used. For non-SSA we compute
2001 the flag during modification analysis. */
2002 if (is_gimple_reg (parm
))
2004 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->symbol
.decl
),
2006 if (ddef
&& !has_zero_uses (ddef
))
2008 imm_use_iterator imm_iter
;
2009 use_operand_p use_p
;
2011 ipa_set_param_used (info
, i
, true);
2012 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, ddef
)
2013 if (!is_gimple_call (USE_STMT (use_p
)))
2015 controlled_uses
= IPA_UNDESCRIBED_USE
;
2022 controlled_uses
= 0;
2025 controlled_uses
= IPA_UNDESCRIBED_USE
;
2026 ipa_set_controlled_uses (info
, i
, controlled_uses
);
2029 func
= DECL_STRUCT_FUNCTION (decl
);
2030 FOR_EACH_BB_FN (bb
, func
)
2032 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2034 gimple stmt
= gsi_stmt (gsi
);
2036 if (is_gimple_debug (stmt
))
2039 ipa_analyze_stmt_uses (node
, info
, parms_ainfo
, stmt
);
2040 walk_stmt_load_store_addr_ops (stmt
, info
,
2041 visit_ref_for_mod_analysis
,
2042 visit_ref_for_mod_analysis
,
2043 visit_ref_for_mod_analysis
);
2045 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2046 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), info
,
2047 visit_ref_for_mod_analysis
,
2048 visit_ref_for_mod_analysis
,
2049 visit_ref_for_mod_analysis
);
2053 /* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
2056 free_parms_ainfo (struct param_analysis_info
*parms_ainfo
, int param_count
)
2060 for (i
= 0; i
< param_count
; i
++)
2062 if (parms_ainfo
[i
].parm_visited_statements
)
2063 BITMAP_FREE (parms_ainfo
[i
].parm_visited_statements
);
2064 if (parms_ainfo
[i
].pt_visited_statements
)
2065 BITMAP_FREE (parms_ainfo
[i
].pt_visited_statements
);
2069 /* Initialize the array describing properties of of formal parameters
2070 of NODE, analyze their uses and compute jump functions associated
2071 with actual arguments of calls from within NODE. */
2074 ipa_analyze_node (struct cgraph_node
*node
)
2076 struct ipa_node_params
*info
;
2077 struct param_analysis_info
*parms_ainfo
;
2080 ipa_check_create_node_params ();
2081 ipa_check_create_edge_args ();
2082 info
= IPA_NODE_REF (node
);
2083 push_cfun (DECL_STRUCT_FUNCTION (node
->symbol
.decl
));
2084 ipa_initialize_node_params (node
);
2086 param_count
= ipa_get_param_count (info
);
2087 parms_ainfo
= XALLOCAVEC (struct param_analysis_info
, param_count
);
2088 memset (parms_ainfo
, 0, sizeof (struct param_analysis_info
) * param_count
);
2090 ipa_analyze_params_uses (node
, parms_ainfo
);
2091 ipa_compute_jump_functions (node
, parms_ainfo
);
2093 free_parms_ainfo (parms_ainfo
, param_count
);
2097 /* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2098 attempt a type-based devirtualization. If successful, return the
2099 target function declaration, otherwise return NULL. */
2102 ipa_intraprocedural_devirtualization (gimple call
)
2104 tree binfo
, token
, fndecl
;
2105 struct ipa_jump_func jfunc
;
2106 tree otr
= gimple_call_fn (call
);
2108 jfunc
.type
= IPA_JF_UNKNOWN
;
2109 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr
), &jfunc
,
2111 if (jfunc
.type
!= IPA_JF_KNOWN_TYPE
)
2113 binfo
= ipa_binfo_from_known_type_jfunc (&jfunc
);
2116 token
= OBJ_TYPE_REF_TOKEN (otr
);
2117 fndecl
= gimple_get_virt_method_for_binfo (tree_low_cst (token
, 1),
2122 /* Update the jump function DST when the call graph edge corresponding to SRC is
2123 is being inlined, knowing that DST is of type ancestor and src of known
2127 combine_known_type_and_ancestor_jfs (struct ipa_jump_func
*src
,
2128 struct ipa_jump_func
*dst
)
2130 HOST_WIDE_INT combined_offset
;
2133 combined_offset
= ipa_get_jf_known_type_offset (src
)
2134 + ipa_get_jf_ancestor_offset (dst
);
2135 combined_type
= ipa_get_jf_ancestor_type (dst
);
2137 ipa_set_jf_known_type (dst
, combined_offset
,
2138 ipa_get_jf_known_type_base_type (src
),
2142 /* Update the jump functions associated with call graph edge E when the call
2143 graph edge CS is being inlined, assuming that E->caller is already (possibly
2144 indirectly) inlined into CS->callee and that E has not been inlined. */
2147 update_jump_functions_after_inlining (struct cgraph_edge
*cs
,
2148 struct cgraph_edge
*e
)
2150 struct ipa_edge_args
*top
= IPA_EDGE_REF (cs
);
2151 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
2152 int count
= ipa_get_cs_argument_count (args
);
2155 for (i
= 0; i
< count
; i
++)
2157 struct ipa_jump_func
*dst
= ipa_get_ith_jump_func (args
, i
);
2159 if (dst
->type
== IPA_JF_ANCESTOR
)
2161 struct ipa_jump_func
*src
;
2162 int dst_fid
= dst
->value
.ancestor
.formal_id
;
2164 /* Variable number of arguments can cause havoc if we try to access
2165 one that does not exist in the inlined edge. So make sure we
2167 if (dst_fid
>= ipa_get_cs_argument_count (top
))
2169 dst
->type
= IPA_JF_UNKNOWN
;
2173 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2176 && (dst
->value
.ancestor
.agg_preserved
|| !src
->agg
.by_ref
))
2178 struct ipa_agg_jf_item
*item
;
2181 /* Currently we do not produce clobber aggregate jump functions,
2182 replace with merging when we do. */
2183 gcc_assert (!dst
->agg
.items
);
2185 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2186 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2187 FOR_EACH_VEC_SAFE_ELT (dst
->agg
.items
, j
, item
)
2188 item
->offset
-= dst
->value
.ancestor
.offset
;
2191 if (src
->type
== IPA_JF_KNOWN_TYPE
)
2192 combine_known_type_and_ancestor_jfs (src
, dst
);
2193 else if (src
->type
== IPA_JF_PASS_THROUGH
2194 && src
->value
.pass_through
.operation
== NOP_EXPR
)
2196 dst
->value
.ancestor
.formal_id
= src
->value
.pass_through
.formal_id
;
2197 dst
->value
.ancestor
.agg_preserved
&=
2198 src
->value
.pass_through
.agg_preserved
;
2200 else if (src
->type
== IPA_JF_ANCESTOR
)
2202 dst
->value
.ancestor
.formal_id
= src
->value
.ancestor
.formal_id
;
2203 dst
->value
.ancestor
.offset
+= src
->value
.ancestor
.offset
;
2204 dst
->value
.ancestor
.agg_preserved
&=
2205 src
->value
.ancestor
.agg_preserved
;
2208 dst
->type
= IPA_JF_UNKNOWN
;
2210 else if (dst
->type
== IPA_JF_PASS_THROUGH
)
2212 struct ipa_jump_func
*src
;
2213 /* We must check range due to calls with variable number of arguments
2214 and we cannot combine jump functions with operations. */
2215 if (dst
->value
.pass_through
.operation
== NOP_EXPR
2216 && (dst
->value
.pass_through
.formal_id
2217 < ipa_get_cs_argument_count (top
)))
2220 int dst_fid
= dst
->value
.pass_through
.formal_id
;
2221 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2222 agg_p
= dst
->value
.pass_through
.agg_preserved
;
2224 dst
->type
= src
->type
;
2225 dst
->value
= src
->value
;
2228 && (agg_p
|| !src
->agg
.by_ref
))
2230 /* Currently we do not produce clobber aggregate jump
2231 functions, replace with merging when we do. */
2232 gcc_assert (!dst
->agg
.items
);
2234 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2235 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2240 if (dst
->type
== IPA_JF_PASS_THROUGH
)
2241 dst
->value
.pass_through
.agg_preserved
= false;
2242 else if (dst
->type
== IPA_JF_ANCESTOR
)
2243 dst
->value
.ancestor
.agg_preserved
= false;
2247 dst
->type
= IPA_JF_UNKNOWN
;
2252 /* If TARGET is an addr_expr of a function declaration, make it the destination
2253 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2255 struct cgraph_edge
*
2256 ipa_make_edge_direct_to_target (struct cgraph_edge
*ie
, tree target
)
2258 struct cgraph_node
*callee
;
2259 struct inline_edge_summary
*es
= inline_edge_summary (ie
);
2260 bool unreachable
= false;
2262 if (TREE_CODE (target
) == ADDR_EXPR
)
2263 target
= TREE_OPERAND (target
, 0);
2264 if (TREE_CODE (target
) != FUNCTION_DECL
)
2266 target
= canonicalize_constructor_val (target
, NULL
);
2267 if (!target
|| TREE_CODE (target
) != FUNCTION_DECL
)
2269 if (ie
->indirect_info
->member_ptr
)
2270 /* Member pointer call that goes through a VMT lookup. */
2274 fprintf (dump_file
, "ipa-prop: Discovered direct call to non-function"
2275 " in %s/%i, making it unreachable.\n",
2276 cgraph_node_name (ie
->caller
), ie
->caller
->symbol
.order
);
2277 target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2278 callee
= cgraph_get_create_node (target
);
2282 callee
= cgraph_get_node (target
);
2285 callee
= cgraph_get_node (target
);
2287 /* Because may-edges are not explicitely represented and vtable may be external,
2288 we may create the first reference to the object in the unit. */
2289 if (!callee
|| callee
->global
.inlined_to
)
2292 /* We are better to ensure we can refer to it.
2293 In the case of static functions we are out of luck, since we already
2294 removed its body. In the case of public functions we may or may
2295 not introduce the reference. */
2296 if (!canonicalize_constructor_val (target
, NULL
)
2297 || !TREE_PUBLIC (target
))
2300 fprintf (dump_file
, "ipa-prop: Discovered call to a known target "
2301 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2302 xstrdup (cgraph_node_name (ie
->caller
)),
2303 ie
->caller
->symbol
.order
,
2304 xstrdup (cgraph_node_name (ie
->callee
)),
2305 ie
->callee
->symbol
.order
);
2308 callee
= cgraph_get_create_real_symbol_node (target
);
2310 ipa_check_create_node_params ();
2312 /* We can not make edges to inline clones. It is bug that someone removed
2313 the cgraph node too early. */
2314 gcc_assert (!callee
->global
.inlined_to
);
2316 if (dump_file
&& !unreachable
)
2318 fprintf (dump_file
, "ipa-prop: Discovered %s call to a known target "
2319 "(%s/%i -> %s/%i), for stmt ",
2320 ie
->indirect_info
->polymorphic
? "a virtual" : "an indirect",
2321 xstrdup (cgraph_node_name (ie
->caller
)),
2322 ie
->caller
->symbol
.order
,
2323 xstrdup (cgraph_node_name (callee
)),
2324 callee
->symbol
.order
);
2326 print_gimple_stmt (dump_file
, ie
->call_stmt
, 2, TDF_SLIM
);
2328 fprintf (dump_file
, "with uid %i\n", ie
->lto_stmt_uid
);
2330 ie
= cgraph_make_edge_direct (ie
, callee
);
2331 es
= inline_edge_summary (ie
);
2332 es
->call_stmt_size
-= (eni_size_weights
.indirect_call_cost
2333 - eni_size_weights
.call_cost
);
2334 es
->call_stmt_time
-= (eni_time_weights
.indirect_call_cost
2335 - eni_time_weights
.call_cost
);
2340 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2341 return NULL if there is not any. BY_REF specifies whether the value has to
2342 be passed by reference or by value. */
2345 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function
*agg
,
2346 HOST_WIDE_INT offset
, bool by_ref
)
2348 struct ipa_agg_jf_item
*item
;
2351 if (by_ref
!= agg
->by_ref
)
2354 FOR_EACH_VEC_SAFE_ELT (agg
->items
, i
, item
)
2355 if (item
->offset
== offset
)
2357 /* Currently we do not have clobber values, return NULL for them once
2359 gcc_checking_assert (is_gimple_ip_invariant (item
->value
));
2365 /* Remove a reference to SYMBOL from the list of references of a node given by
2366 reference description RDESC. */
2369 remove_described_reference (symtab_node symbol
, struct ipa_cst_ref_desc
*rdesc
)
2371 struct ipa_ref
*to_del
;
2372 struct cgraph_edge
*origin
;
2375 to_del
= ipa_find_reference ((symtab_node
) origin
->caller
, symbol
,
2376 origin
->call_stmt
, origin
->lto_stmt_uid
);
2377 gcc_assert (to_del
);
2378 ipa_remove_reference (to_del
);
2380 fprintf (dump_file
, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2381 xstrdup (cgraph_node_name (origin
->caller
)),
2382 origin
->caller
->symbol
.order
, xstrdup (symtab_node_name (symbol
)));
2385 /* If JFUNC has a reference description with refcount different from
2386 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2387 NULL. JFUNC must be a constant jump function. */
2389 static struct ipa_cst_ref_desc
*
2390 jfunc_rdesc_usable (struct ipa_jump_func
*jfunc
)
2392 struct ipa_cst_ref_desc
*rdesc
= ipa_get_jf_constant_rdesc (jfunc
);
2393 if (rdesc
&& rdesc
->refcount
!= IPA_UNDESCRIBED_USE
)
2399 /* Try to find a destination for indirect edge IE that corresponds to a simple
2400 call or a call of a member function pointer and where the destination is a
2401 pointer formal parameter described by jump function JFUNC. If it can be
2402 determined, return the newly direct edge, otherwise return NULL.
2403 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2405 static struct cgraph_edge
*
2406 try_make_edge_direct_simple_call (struct cgraph_edge
*ie
,
2407 struct ipa_jump_func
*jfunc
,
2408 struct ipa_node_params
*new_root_info
)
2410 struct cgraph_edge
*cs
;
2412 bool agg_contents
= ie
->indirect_info
->agg_contents
;
2413 bool speculative
= ie
->speculative
;
2414 struct ipa_cst_ref_desc
*rdesc
;
2416 if (ie
->indirect_info
->agg_contents
)
2417 target
= ipa_find_agg_cst_for_param (&jfunc
->agg
,
2418 ie
->indirect_info
->offset
,
2419 ie
->indirect_info
->by_ref
);
2421 target
= ipa_value_from_jfunc (new_root_info
, jfunc
);
2424 cs
= ipa_make_edge_direct_to_target (ie
, target
);
2426 /* FIXME: speculative edges can be handled. */
2427 if (cs
&& !agg_contents
&& !speculative
2428 && jfunc
->type
== IPA_JF_CONST
2429 && (rdesc
= jfunc_rdesc_usable (jfunc
))
2430 && --rdesc
->refcount
== 0)
2431 remove_described_reference ((symtab_node
) cs
->callee
, rdesc
);
2436 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2437 call based on a formal parameter which is described by jump function JFUNC
2438 and if it can be determined, make it direct and return the direct edge.
2439 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2442 static struct cgraph_edge
*
2443 try_make_edge_direct_virtual_call (struct cgraph_edge
*ie
,
2444 struct ipa_jump_func
*jfunc
,
2445 struct ipa_node_params
*new_root_info
)
2449 binfo
= ipa_value_from_jfunc (new_root_info
, jfunc
);
2454 if (TREE_CODE (binfo
) != TREE_BINFO
)
2456 binfo
= gimple_extract_devirt_binfo_from_cst
2457 (binfo
, ie
->indirect_info
->otr_type
);
2462 binfo
= get_binfo_at_offset (binfo
, ie
->indirect_info
->offset
,
2463 ie
->indirect_info
->otr_type
);
2465 target
= gimple_get_virt_method_for_binfo (ie
->indirect_info
->otr_token
,
2471 return ipa_make_edge_direct_to_target (ie
, target
);
2476 /* Update the param called notes associated with NODE when CS is being inlined,
2477 assuming NODE is (potentially indirectly) inlined into CS->callee.
2478 Moreover, if the callee is discovered to be constant, create a new cgraph
2479 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2480 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2483 update_indirect_edges_after_inlining (struct cgraph_edge
*cs
,
2484 struct cgraph_node
*node
,
2485 vec
<cgraph_edge_p
> *new_edges
)
2487 struct ipa_edge_args
*top
;
2488 struct cgraph_edge
*ie
, *next_ie
, *new_direct_edge
;
2489 struct ipa_node_params
*new_root_info
;
2492 ipa_check_create_edge_args ();
2493 top
= IPA_EDGE_REF (cs
);
2494 new_root_info
= IPA_NODE_REF (cs
->caller
->global
.inlined_to
2495 ? cs
->caller
->global
.inlined_to
2498 for (ie
= node
->indirect_calls
; ie
; ie
= next_ie
)
2500 struct cgraph_indirect_call_info
*ici
= ie
->indirect_info
;
2501 struct ipa_jump_func
*jfunc
;
2504 next_ie
= ie
->next_callee
;
2506 if (ici
->param_index
== -1)
2509 /* We must check range due to calls with variable number of arguments: */
2510 if (ici
->param_index
>= ipa_get_cs_argument_count (top
))
2512 ici
->param_index
= -1;
2516 param_index
= ici
->param_index
;
2517 jfunc
= ipa_get_ith_jump_func (top
, param_index
);
2519 if (!flag_indirect_inlining
)
2520 new_direct_edge
= NULL
;
2521 else if (ici
->polymorphic
)
2522 new_direct_edge
= try_make_edge_direct_virtual_call (ie
, jfunc
,
2525 new_direct_edge
= try_make_edge_direct_simple_call (ie
, jfunc
,
2527 /* If speculation was removed, then we need to do nothing. */
2528 if (new_direct_edge
&& new_direct_edge
!= ie
)
2530 new_direct_edge
->indirect_inlining_edge
= 1;
2531 top
= IPA_EDGE_REF (cs
);
2534 else if (new_direct_edge
)
2536 new_direct_edge
->indirect_inlining_edge
= 1;
2537 if (new_direct_edge
->call_stmt
)
2538 new_direct_edge
->call_stmt_cannot_inline_p
2539 = !gimple_check_call_matching_types (
2540 new_direct_edge
->call_stmt
,
2541 new_direct_edge
->callee
->symbol
.decl
, false);
2544 new_edges
->safe_push (new_direct_edge
);
2547 top
= IPA_EDGE_REF (cs
);
2549 else if (jfunc
->type
== IPA_JF_PASS_THROUGH
2550 && ipa_get_jf_pass_through_operation (jfunc
) == NOP_EXPR
)
2552 if (ici
->agg_contents
2553 && !ipa_get_jf_pass_through_agg_preserved (jfunc
))
2554 ici
->param_index
= -1;
2556 ici
->param_index
= ipa_get_jf_pass_through_formal_id (jfunc
);
2558 else if (jfunc
->type
== IPA_JF_ANCESTOR
)
2560 if (ici
->agg_contents
2561 && !ipa_get_jf_ancestor_agg_preserved (jfunc
))
2562 ici
->param_index
= -1;
2565 ici
->param_index
= ipa_get_jf_ancestor_formal_id (jfunc
);
2566 ici
->offset
+= ipa_get_jf_ancestor_offset (jfunc
);
2570 /* Either we can find a destination for this edge now or never. */
2571 ici
->param_index
= -1;
2577 /* Recursively traverse subtree of NODE (including node) made of inlined
2578 cgraph_edges when CS has been inlined and invoke
2579 update_indirect_edges_after_inlining on all nodes and
2580 update_jump_functions_after_inlining on all non-inlined edges that lead out
2581 of this subtree. Newly discovered indirect edges will be added to
2582 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2586 propagate_info_to_inlined_callees (struct cgraph_edge
*cs
,
2587 struct cgraph_node
*node
,
2588 vec
<cgraph_edge_p
> *new_edges
)
2590 struct cgraph_edge
*e
;
2593 res
= update_indirect_edges_after_inlining (cs
, node
, new_edges
);
2595 for (e
= node
->callees
; e
; e
= e
->next_callee
)
2596 if (!e
->inline_failed
)
2597 res
|= propagate_info_to_inlined_callees (cs
, e
->callee
, new_edges
);
2599 update_jump_functions_after_inlining (cs
, e
);
2600 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
2601 update_jump_functions_after_inlining (cs
, e
);
2606 /* Combine two controlled uses counts as done during inlining. */
2609 combine_controlled_uses_counters (int c
, int d
)
2611 if (c
== IPA_UNDESCRIBED_USE
|| d
== IPA_UNDESCRIBED_USE
)
2612 return IPA_UNDESCRIBED_USE
;
2617 /* Propagate number of controlled users from CS->caleee to the new root of the
2618 tree of inlined nodes. */
2621 propagate_controlled_uses (struct cgraph_edge
*cs
)
2623 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
2624 struct cgraph_node
*new_root
= cs
->caller
->global
.inlined_to
2625 ? cs
->caller
->global
.inlined_to
: cs
->caller
;
2626 struct ipa_node_params
*new_root_info
= IPA_NODE_REF (new_root
);
2627 struct ipa_node_params
*old_root_info
= IPA_NODE_REF (cs
->callee
);
2630 count
= MIN (ipa_get_cs_argument_count (args
),
2631 ipa_get_param_count (old_root_info
));
2632 for (i
= 0; i
< count
; i
++)
2634 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
2635 struct ipa_cst_ref_desc
*rdesc
;
2637 if (jf
->type
== IPA_JF_PASS_THROUGH
)
2640 src_idx
= ipa_get_jf_pass_through_formal_id (jf
);
2641 c
= ipa_get_controlled_uses (new_root_info
, src_idx
);
2642 d
= ipa_get_controlled_uses (old_root_info
, i
);
2644 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf
)
2645 == NOP_EXPR
|| c
== IPA_UNDESCRIBED_USE
);
2646 c
= combine_controlled_uses_counters (c
, d
);
2647 ipa_set_controlled_uses (new_root_info
, src_idx
, c
);
2648 if (c
== 0 && new_root_info
->ipcp_orig_node
)
2650 struct cgraph_node
*n
;
2651 struct ipa_ref
*ref
;
2652 tree t
= new_root_info
->known_vals
[src_idx
];
2654 if (t
&& TREE_CODE (t
) == ADDR_EXPR
2655 && TREE_CODE (TREE_OPERAND (t
, 0)) == FUNCTION_DECL
2656 && (n
= cgraph_get_node (TREE_OPERAND (t
, 0)))
2657 && (ref
= ipa_find_reference ((symtab_node
) new_root
,
2658 (symtab_node
) n
, NULL
, 0)))
2661 fprintf (dump_file
, "ipa-prop: Removing cloning-created "
2662 "reference from %s/%i to %s/%i.\n",
2663 xstrdup (cgraph_node_name (new_root
)),
2664 new_root
->symbol
.order
,
2665 xstrdup (cgraph_node_name (n
)), n
->symbol
.order
);
2666 ipa_remove_reference (ref
);
2670 else if (jf
->type
== IPA_JF_CONST
2671 && (rdesc
= jfunc_rdesc_usable (jf
)))
2673 int d
= ipa_get_controlled_uses (old_root_info
, i
);
2674 int c
= rdesc
->refcount
;
2675 rdesc
->refcount
= combine_controlled_uses_counters (c
, d
);
2676 if (rdesc
->refcount
== 0)
2678 tree cst
= ipa_get_jf_constant (jf
);
2679 struct cgraph_node
*n
;
2680 gcc_checking_assert (TREE_CODE (cst
) == ADDR_EXPR
2681 && TREE_CODE (TREE_OPERAND (cst
, 0))
2683 n
= cgraph_get_node (TREE_OPERAND (cst
, 0));
2686 struct cgraph_node
*clone
;
2687 remove_described_reference ((symtab_node
) n
, rdesc
);
2690 while (clone
->global
.inlined_to
2691 && clone
!= rdesc
->cs
->caller
2692 && IPA_NODE_REF (clone
)->ipcp_orig_node
)
2694 struct ipa_ref
*ref
;
2695 ref
= ipa_find_reference ((symtab_node
) clone
,
2696 (symtab_node
) n
, NULL
, 0);
2700 fprintf (dump_file
, "ipa-prop: Removing "
2701 "cloning-created reference "
2702 "from %s/%i to %s/%i.\n",
2703 xstrdup (cgraph_node_name (clone
)),
2704 clone
->symbol
.order
,
2705 xstrdup (cgraph_node_name (n
)),
2707 ipa_remove_reference (ref
);
2709 clone
= clone
->callers
->caller
;
2716 for (i
= ipa_get_param_count (old_root_info
);
2717 i
< ipa_get_cs_argument_count (args
);
2720 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
2722 if (jf
->type
== IPA_JF_CONST
)
2724 struct ipa_cst_ref_desc
*rdesc
= jfunc_rdesc_usable (jf
);
2726 rdesc
->refcount
= IPA_UNDESCRIBED_USE
;
2728 else if (jf
->type
== IPA_JF_PASS_THROUGH
)
2729 ipa_set_controlled_uses (new_root_info
,
2730 jf
->value
.pass_through
.formal_id
,
2731 IPA_UNDESCRIBED_USE
);
2735 /* Update jump functions and call note functions on inlining the call site CS.
2736 CS is expected to lead to a node already cloned by
2737 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
2738 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2742 ipa_propagate_indirect_call_infos (struct cgraph_edge
*cs
,
2743 vec
<cgraph_edge_p
> *new_edges
)
2746 /* Do nothing if the preparation phase has not been carried out yet
2747 (i.e. during early inlining). */
2748 if (!ipa_node_params_vector
.exists ())
2750 gcc_assert (ipa_edge_args_vector
);
2752 propagate_controlled_uses (cs
);
2753 changed
= propagate_info_to_inlined_callees (cs
, cs
->callee
, new_edges
);
2758 /* Frees all dynamically allocated structures that the argument info points
2762 ipa_free_edge_args_substructures (struct ipa_edge_args
*args
)
2764 vec_free (args
->jump_functions
);
2765 memset (args
, 0, sizeof (*args
));
2768 /* Free all ipa_edge structures. */
2771 ipa_free_all_edge_args (void)
2774 struct ipa_edge_args
*args
;
2776 if (!ipa_edge_args_vector
)
2779 FOR_EACH_VEC_ELT (*ipa_edge_args_vector
, i
, args
)
2780 ipa_free_edge_args_substructures (args
);
2782 vec_free (ipa_edge_args_vector
);
2785 /* Frees all dynamically allocated structures that the param info points
2789 ipa_free_node_params_substructures (struct ipa_node_params
*info
)
2791 info
->descriptors
.release ();
2792 free (info
->lattices
);
2793 /* Lattice values and their sources are deallocated with their alocation
2795 info
->known_vals
.release ();
2796 memset (info
, 0, sizeof (*info
));
2799 /* Free all ipa_node_params structures. */
2802 ipa_free_all_node_params (void)
2805 struct ipa_node_params
*info
;
2807 FOR_EACH_VEC_ELT (ipa_node_params_vector
, i
, info
)
2808 ipa_free_node_params_substructures (info
);
2810 ipa_node_params_vector
.release ();
2813 /* Set the aggregate replacements of NODE to be AGGVALS. */
2816 ipa_set_node_agg_value_chain (struct cgraph_node
*node
,
2817 struct ipa_agg_replacement_value
*aggvals
)
2819 if (vec_safe_length (ipa_node_agg_replacements
) <= (unsigned) cgraph_max_uid
)
2820 vec_safe_grow_cleared (ipa_node_agg_replacements
, cgraph_max_uid
+ 1);
2822 (*ipa_node_agg_replacements
)[node
->uid
] = aggvals
;
2825 /* Hook that is called by cgraph.c when an edge is removed. */
2828 ipa_edge_removal_hook (struct cgraph_edge
*cs
, void *data ATTRIBUTE_UNUSED
)
2830 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2831 if (vec_safe_length (ipa_edge_args_vector
) <= (unsigned)cs
->uid
)
2833 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs
));
2836 /* Hook that is called by cgraph.c when a node is removed. */
2839 ipa_node_removal_hook (struct cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
2841 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2842 if (ipa_node_params_vector
.length () > (unsigned)node
->uid
)
2843 ipa_free_node_params_substructures (IPA_NODE_REF (node
));
2844 if (vec_safe_length (ipa_node_agg_replacements
) > (unsigned)node
->uid
)
2845 (*ipa_node_agg_replacements
)[(unsigned)node
->uid
] = NULL
;
2848 /* Hook that is called by cgraph.c when an edge is duplicated. */
2851 ipa_edge_duplication_hook (struct cgraph_edge
*src
, struct cgraph_edge
*dst
,
2852 __attribute__((unused
)) void *data
)
2854 struct ipa_edge_args
*old_args
, *new_args
;
2857 ipa_check_create_edge_args ();
2859 old_args
= IPA_EDGE_REF (src
);
2860 new_args
= IPA_EDGE_REF (dst
);
2862 new_args
->jump_functions
= vec_safe_copy (old_args
->jump_functions
);
2864 for (i
= 0; i
< vec_safe_length (old_args
->jump_functions
); i
++)
2866 struct ipa_jump_func
*src_jf
= ipa_get_ith_jump_func (old_args
, i
);
2867 struct ipa_jump_func
*dst_jf
= ipa_get_ith_jump_func (new_args
, i
);
2869 dst_jf
->agg
.items
= vec_safe_copy (dst_jf
->agg
.items
);
2871 if (src_jf
->type
== IPA_JF_CONST
)
2873 struct ipa_cst_ref_desc
*src_rdesc
= jfunc_rdesc_usable (src_jf
);
2876 dst_jf
->value
.constant
.rdesc
= NULL
;
2877 else if (src_rdesc
->cs
== src
)
2879 struct ipa_cst_ref_desc
*dst_rdesc
;
2880 gcc_checking_assert (ipa_refdesc_pool
);
2882 = (struct ipa_cst_ref_desc
*) pool_alloc (ipa_refdesc_pool
);
2883 dst_rdesc
->cs
= dst
;
2884 dst_rdesc
->refcount
= src_rdesc
->refcount
;
2885 if (dst
->caller
->global
.inlined_to
)
2887 dst_rdesc
->next_duplicate
= src_rdesc
->next_duplicate
;
2888 src_rdesc
->next_duplicate
= dst_rdesc
;
2890 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
2894 struct ipa_cst_ref_desc
*dst_rdesc
;
2895 /* This can happen during inlining, when a JFUNC can refer to a
2896 reference taken in a function up in the tree of inline clones.
2897 We need to find the duplicate that refers to our tree of
2900 gcc_assert (dst
->caller
->global
.inlined_to
);
2901 for (dst_rdesc
= src_rdesc
->next_duplicate
;
2903 dst_rdesc
= dst_rdesc
->next_duplicate
)
2904 if (dst_rdesc
->cs
->caller
->global
.inlined_to
2905 == dst
->caller
->global
.inlined_to
)
2907 gcc_assert (dst_rdesc
);
2908 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
2914 /* Hook that is called by cgraph.c when a node is duplicated. */
2917 ipa_node_duplication_hook (struct cgraph_node
*src
, struct cgraph_node
*dst
,
2918 ATTRIBUTE_UNUSED
void *data
)
2920 struct ipa_node_params
*old_info
, *new_info
;
2921 struct ipa_agg_replacement_value
*old_av
, *new_av
;
2923 ipa_check_create_node_params ();
2924 old_info
= IPA_NODE_REF (src
);
2925 new_info
= IPA_NODE_REF (dst
);
2927 new_info
->descriptors
= old_info
->descriptors
.copy ();
2928 new_info
->lattices
= NULL
;
2929 new_info
->ipcp_orig_node
= old_info
->ipcp_orig_node
;
2931 new_info
->uses_analysis_done
= old_info
->uses_analysis_done
;
2932 new_info
->node_enqueued
= old_info
->node_enqueued
;
2934 old_av
= ipa_get_agg_replacements_for_node (src
);
2941 struct ipa_agg_replacement_value
*v
;
2943 v
= ggc_alloc_ipa_agg_replacement_value ();
2944 memcpy (v
, old_av
, sizeof (*v
));
2947 old_av
= old_av
->next
;
2949 ipa_set_node_agg_value_chain (dst
, new_av
);
2953 /* Analyze newly added function into callgraph. */
2956 ipa_add_new_function (struct cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
2958 ipa_analyze_node (node
);
2961 /* Register our cgraph hooks if they are not already there. */
2964 ipa_register_cgraph_hooks (void)
2966 if (!edge_removal_hook_holder
)
2967 edge_removal_hook_holder
=
2968 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook
, NULL
);
2969 if (!node_removal_hook_holder
)
2970 node_removal_hook_holder
=
2971 cgraph_add_node_removal_hook (&ipa_node_removal_hook
, NULL
);
2972 if (!edge_duplication_hook_holder
)
2973 edge_duplication_hook_holder
=
2974 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook
, NULL
);
2975 if (!node_duplication_hook_holder
)
2976 node_duplication_hook_holder
=
2977 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook
, NULL
);
2978 function_insertion_hook_holder
=
2979 cgraph_add_function_insertion_hook (&ipa_add_new_function
, NULL
);
2982 /* Unregister our cgraph hooks if they are not already there. */
2985 ipa_unregister_cgraph_hooks (void)
2987 cgraph_remove_edge_removal_hook (edge_removal_hook_holder
);
2988 edge_removal_hook_holder
= NULL
;
2989 cgraph_remove_node_removal_hook (node_removal_hook_holder
);
2990 node_removal_hook_holder
= NULL
;
2991 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder
);
2992 edge_duplication_hook_holder
= NULL
;
2993 cgraph_remove_node_duplication_hook (node_duplication_hook_holder
);
2994 node_duplication_hook_holder
= NULL
;
2995 cgraph_remove_function_insertion_hook (function_insertion_hook_holder
);
2996 function_insertion_hook_holder
= NULL
;
2999 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3000 longer needed after ipa-cp. */
3003 ipa_free_all_structures_after_ipa_cp (void)
3007 ipa_free_all_edge_args ();
3008 ipa_free_all_node_params ();
3009 free_alloc_pool (ipcp_sources_pool
);
3010 free_alloc_pool (ipcp_values_pool
);
3011 free_alloc_pool (ipcp_agg_lattice_pool
);
3012 ipa_unregister_cgraph_hooks ();
3013 if (ipa_refdesc_pool
)
3014 free_alloc_pool (ipa_refdesc_pool
);
3018 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3019 longer needed after indirect inlining. */
3022 ipa_free_all_structures_after_iinln (void)
3024 ipa_free_all_edge_args ();
3025 ipa_free_all_node_params ();
3026 ipa_unregister_cgraph_hooks ();
3027 if (ipcp_sources_pool
)
3028 free_alloc_pool (ipcp_sources_pool
);
3029 if (ipcp_values_pool
)
3030 free_alloc_pool (ipcp_values_pool
);
3031 if (ipcp_agg_lattice_pool
)
3032 free_alloc_pool (ipcp_agg_lattice_pool
);
3033 if (ipa_refdesc_pool
)
3034 free_alloc_pool (ipa_refdesc_pool
);
3037 /* Print ipa_tree_map data structures of all functions in the
3041 ipa_print_node_params (FILE *f
, struct cgraph_node
*node
)
3045 struct ipa_node_params
*info
;
3047 if (!node
->symbol
.definition
)
3049 info
= IPA_NODE_REF (node
);
3050 fprintf (f
, " function %s/%i parameter descriptors:\n",
3051 cgraph_node_name (node
), node
->symbol
.order
);
3052 count
= ipa_get_param_count (info
);
3053 for (i
= 0; i
< count
; i
++)
3057 temp
= ipa_get_param (info
, i
);
3058 if (TREE_CODE (temp
) == PARM_DECL
)
3059 fprintf (f
, " param %d : %s", i
,
3061 ? (*lang_hooks
.decl_printable_name
) (temp
, 2)
3063 if (ipa_is_param_used (info
, i
))
3064 fprintf (f
, " used");
3065 c
= ipa_get_controlled_uses (info
, i
);
3066 if (c
== IPA_UNDESCRIBED_USE
)
3067 fprintf (f
, " undescribed_use");
3069 fprintf (f
, " controlled_uses=%i", c
);
3074 /* Print ipa_tree_map data structures of all functions in the
3078 ipa_print_all_params (FILE * f
)
3080 struct cgraph_node
*node
;
3082 fprintf (f
, "\nFunction parameters:\n");
3083 FOR_EACH_FUNCTION (node
)
3084 ipa_print_node_params (f
, node
);
3087 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3090 ipa_get_vector_of_formal_parms (tree fndecl
)
3096 gcc_assert (!flag_wpa
);
3097 count
= count_formal_params (fndecl
);
3098 args
.create (count
);
3099 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
3100 args
.quick_push (parm
);
3105 /* Return a heap allocated vector containing types of formal parameters of
3106 function type FNTYPE. */
3108 static inline vec
<tree
>
3109 get_vector_of_formal_parm_types (tree fntype
)
3115 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
3118 types
.create (count
);
3119 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
3120 types
.quick_push (TREE_VALUE (t
));
3125 /* Modify the function declaration FNDECL and its type according to the plan in
3126 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3127 to reflect the actual parameters being modified which are determined by the
3128 base_index field. */
3131 ipa_modify_formal_parameters (tree fndecl
, ipa_parm_adjustment_vec adjustments
,
3132 const char *synth_parm_prefix
)
3134 vec
<tree
> oparms
, otypes
;
3135 tree orig_type
, new_type
= NULL
;
3136 tree old_arg_types
, t
, new_arg_types
= NULL
;
3137 tree parm
, *link
= &DECL_ARGUMENTS (fndecl
);
3138 int i
, len
= adjustments
.length ();
3139 tree new_reversed
= NULL
;
3140 bool care_for_types
, last_parm_void
;
3142 if (!synth_parm_prefix
)
3143 synth_parm_prefix
= "SYNTH";
3145 oparms
= ipa_get_vector_of_formal_parms (fndecl
);
3146 orig_type
= TREE_TYPE (fndecl
);
3147 old_arg_types
= TYPE_ARG_TYPES (orig_type
);
3149 /* The following test is an ugly hack, some functions simply don't have any
3150 arguments in their type. This is probably a bug but well... */
3151 care_for_types
= (old_arg_types
!= NULL_TREE
);
3154 last_parm_void
= (TREE_VALUE (tree_last (old_arg_types
))
3156 otypes
= get_vector_of_formal_parm_types (orig_type
);
3158 gcc_assert (oparms
.length () + 1 == otypes
.length ());
3160 gcc_assert (oparms
.length () == otypes
.length ());
3164 last_parm_void
= false;
3168 for (i
= 0; i
< len
; i
++)
3170 struct ipa_parm_adjustment
*adj
;
3173 adj
= &adjustments
[i
];
3174 parm
= oparms
[adj
->base_index
];
3177 if (adj
->copy_param
)
3180 new_arg_types
= tree_cons (NULL_TREE
, otypes
[adj
->base_index
],
3183 link
= &DECL_CHAIN (parm
);
3185 else if (!adj
->remove_param
)
3191 ptype
= build_pointer_type (adj
->type
);
3196 new_arg_types
= tree_cons (NULL_TREE
, ptype
, new_arg_types
);
3198 new_parm
= build_decl (UNKNOWN_LOCATION
, PARM_DECL
, NULL_TREE
,
3200 DECL_NAME (new_parm
) = create_tmp_var_name (synth_parm_prefix
);
3202 DECL_ARTIFICIAL (new_parm
) = 1;
3203 DECL_ARG_TYPE (new_parm
) = ptype
;
3204 DECL_CONTEXT (new_parm
) = fndecl
;
3205 TREE_USED (new_parm
) = 1;
3206 DECL_IGNORED_P (new_parm
) = 1;
3207 layout_decl (new_parm
, 0);
3210 adj
->reduction
= new_parm
;
3214 link
= &DECL_CHAIN (new_parm
);
3222 new_reversed
= nreverse (new_arg_types
);
3226 TREE_CHAIN (new_arg_types
) = void_list_node
;
3228 new_reversed
= void_list_node
;
3232 /* Use copy_node to preserve as much as possible from original type
3233 (debug info, attribute lists etc.)
3234 Exception is METHOD_TYPEs must have THIS argument.
3235 When we are asked to remove it, we need to build new FUNCTION_TYPE
3237 if (TREE_CODE (orig_type
) != METHOD_TYPE
3238 || (adjustments
[0].copy_param
3239 && adjustments
[0].base_index
== 0))
3241 new_type
= build_distinct_type_copy (orig_type
);
3242 TYPE_ARG_TYPES (new_type
) = new_reversed
;
3247 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type
),
3249 TYPE_CONTEXT (new_type
) = TYPE_CONTEXT (orig_type
);
3250 DECL_VINDEX (fndecl
) = NULL_TREE
;
3253 /* When signature changes, we need to clear builtin info. */
3254 if (DECL_BUILT_IN (fndecl
))
3256 DECL_BUILT_IN_CLASS (fndecl
) = NOT_BUILT_IN
;
3257 DECL_FUNCTION_CODE (fndecl
) = (enum built_in_function
) 0;
3260 /* This is a new type, not a copy of an old type. Need to reassociate
3261 variants. We can handle everything except the main variant lazily. */
3262 t
= TYPE_MAIN_VARIANT (orig_type
);
3265 TYPE_MAIN_VARIANT (new_type
) = t
;
3266 TYPE_NEXT_VARIANT (new_type
) = TYPE_NEXT_VARIANT (t
);
3267 TYPE_NEXT_VARIANT (t
) = new_type
;
3271 TYPE_MAIN_VARIANT (new_type
) = new_type
;
3272 TYPE_NEXT_VARIANT (new_type
) = NULL
;
3275 TREE_TYPE (fndecl
) = new_type
;
3276 DECL_VIRTUAL_P (fndecl
) = 0;
3281 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3282 If this is a directly recursive call, CS must be NULL. Otherwise it must
3283 contain the corresponding call graph edge. */
3286 ipa_modify_call_arguments (struct cgraph_edge
*cs
, gimple stmt
,
3287 ipa_parm_adjustment_vec adjustments
)
3289 struct cgraph_node
*current_node
= cgraph_get_node (current_function_decl
);
3291 vec
<tree
, va_gc
> **debug_args
= NULL
;
3293 gimple_stmt_iterator gsi
, prev_gsi
;
3297 len
= adjustments
.length ();
3299 callee_decl
= !cs
? gimple_call_fndecl (stmt
) : cs
->callee
->symbol
.decl
;
3300 ipa_remove_stmt_references ((symtab_node
) current_node
, stmt
);
3302 gsi
= gsi_for_stmt (stmt
);
3304 gsi_prev (&prev_gsi
);
3305 for (i
= 0; i
< len
; i
++)
3307 struct ipa_parm_adjustment
*adj
;
3309 adj
= &adjustments
[i
];
3311 if (adj
->copy_param
)
3313 tree arg
= gimple_call_arg (stmt
, adj
->base_index
);
3315 vargs
.quick_push (arg
);
3317 else if (!adj
->remove_param
)
3319 tree expr
, base
, off
;
3321 unsigned int deref_align
;
3322 bool deref_base
= false;
3324 /* We create a new parameter out of the value of the old one, we can
3325 do the following kind of transformations:
3327 - A scalar passed by reference is converted to a scalar passed by
3328 value. (adj->by_ref is false and the type of the original
3329 actual argument is a pointer to a scalar).
3331 - A part of an aggregate is passed instead of the whole aggregate.
3332 The part can be passed either by value or by reference, this is
3333 determined by value of adj->by_ref. Moreover, the code below
3334 handles both situations when the original aggregate is passed by
3335 value (its type is not a pointer) and when it is passed by
3336 reference (it is a pointer to an aggregate).
3338 When the new argument is passed by reference (adj->by_ref is true)
3339 it must be a part of an aggregate and therefore we form it by
3340 simply taking the address of a reference inside the original
3343 gcc_checking_assert (adj
->offset
% BITS_PER_UNIT
== 0);
3344 base
= gimple_call_arg (stmt
, adj
->base_index
);
3345 loc
= DECL_P (base
) ? DECL_SOURCE_LOCATION (base
)
3346 : EXPR_LOCATION (base
);
3348 if (TREE_CODE (base
) != ADDR_EXPR
3349 && POINTER_TYPE_P (TREE_TYPE (base
)))
3350 off
= build_int_cst (adj
->alias_ptr_type
,
3351 adj
->offset
/ BITS_PER_UNIT
);
3354 HOST_WIDE_INT base_offset
;
3358 if (TREE_CODE (base
) == ADDR_EXPR
)
3360 base
= TREE_OPERAND (base
, 0);
3366 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
3367 /* Aggregate arguments can have non-invariant addresses. */
3370 base
= build_fold_addr_expr (prev_base
);
3371 off
= build_int_cst (adj
->alias_ptr_type
,
3372 adj
->offset
/ BITS_PER_UNIT
);
3374 else if (TREE_CODE (base
) == MEM_REF
)
3379 deref_align
= TYPE_ALIGN (TREE_TYPE (base
));
3381 off
= build_int_cst (adj
->alias_ptr_type
,
3383 + adj
->offset
/ BITS_PER_UNIT
);
3384 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1),
3386 base
= TREE_OPERAND (base
, 0);
3390 off
= build_int_cst (adj
->alias_ptr_type
,
3392 + adj
->offset
/ BITS_PER_UNIT
);
3393 base
= build_fold_addr_expr (base
);
3399 tree type
= adj
->type
;
3401 unsigned HOST_WIDE_INT misalign
;
3405 align
= deref_align
;
3410 get_pointer_alignment_1 (base
, &align
, &misalign
);
3411 if (TYPE_ALIGN (type
) > align
)
3412 align
= TYPE_ALIGN (type
);
3414 misalign
+= (tree_to_double_int (off
)
3415 .sext (TYPE_PRECISION (TREE_TYPE (off
))).low
3417 misalign
= misalign
& (align
- 1);
3419 align
= (misalign
& -misalign
);
3420 if (align
< TYPE_ALIGN (type
))
3421 type
= build_aligned_type (type
, align
);
3422 expr
= fold_build2_loc (loc
, MEM_REF
, type
, base
, off
);
3426 expr
= fold_build2_loc (loc
, MEM_REF
, adj
->type
, base
, off
);
3427 expr
= build_fold_addr_expr (expr
);
3430 expr
= force_gimple_operand_gsi (&gsi
, expr
,
3432 || is_gimple_reg_type (adj
->type
),
3433 NULL
, true, GSI_SAME_STMT
);
3434 vargs
.quick_push (expr
);
3436 if (!adj
->copy_param
&& MAY_HAVE_DEBUG_STMTS
)
3439 tree ddecl
= NULL_TREE
, origin
= DECL_ORIGIN (adj
->base
), arg
;
3442 arg
= gimple_call_arg (stmt
, adj
->base_index
);
3443 if (!useless_type_conversion_p (TREE_TYPE (origin
), TREE_TYPE (arg
)))
3445 if (!fold_convertible_p (TREE_TYPE (origin
), arg
))
3447 arg
= fold_convert_loc (gimple_location (stmt
),
3448 TREE_TYPE (origin
), arg
);
3450 if (debug_args
== NULL
)
3451 debug_args
= decl_debug_args_insert (callee_decl
);
3452 for (ix
= 0; vec_safe_iterate (*debug_args
, ix
, &ddecl
); ix
+= 2)
3453 if (ddecl
== origin
)
3455 ddecl
= (**debug_args
)[ix
+ 1];
3460 ddecl
= make_node (DEBUG_EXPR_DECL
);
3461 DECL_ARTIFICIAL (ddecl
) = 1;
3462 TREE_TYPE (ddecl
) = TREE_TYPE (origin
);
3463 DECL_MODE (ddecl
) = DECL_MODE (origin
);
3465 vec_safe_push (*debug_args
, origin
);
3466 vec_safe_push (*debug_args
, ddecl
);
3468 def_temp
= gimple_build_debug_bind (ddecl
, unshare_expr (arg
), stmt
);
3469 gsi_insert_before (&gsi
, def_temp
, GSI_SAME_STMT
);
3473 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3475 fprintf (dump_file
, "replacing stmt:");
3476 print_gimple_stmt (dump_file
, gsi_stmt (gsi
), 0, 0);
3479 new_stmt
= gimple_build_call_vec (callee_decl
, vargs
);
3481 if (gimple_call_lhs (stmt
))
3482 gimple_call_set_lhs (new_stmt
, gimple_call_lhs (stmt
));
3484 gimple_set_block (new_stmt
, gimple_block (stmt
));
3485 if (gimple_has_location (stmt
))
3486 gimple_set_location (new_stmt
, gimple_location (stmt
));
3487 gimple_call_set_chain (new_stmt
, gimple_call_chain (stmt
));
3488 gimple_call_copy_flags (new_stmt
, stmt
);
3490 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3492 fprintf (dump_file
, "with stmt:");
3493 print_gimple_stmt (dump_file
, new_stmt
, 0, 0);
3494 fprintf (dump_file
, "\n");
3496 gsi_replace (&gsi
, new_stmt
, true);
3498 cgraph_set_call_stmt (cs
, new_stmt
);
3501 ipa_record_stmt_references (current_node
, gsi_stmt (gsi
));
3504 while ((gsi_end_p (prev_gsi
) && !gsi_end_p (gsi
))
3505 || (!gsi_end_p (prev_gsi
) && gsi_stmt (gsi
) == gsi_stmt (prev_gsi
)));
3507 update_ssa (TODO_update_ssa
);
3508 free_dominance_info (CDI_DOMINATORS
);
3511 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
3514 index_in_adjustments_multiple_times_p (int base_index
,
3515 ipa_parm_adjustment_vec adjustments
)
3517 int i
, len
= adjustments
.length ();
3520 for (i
= 0; i
< len
; i
++)
3522 struct ipa_parm_adjustment
*adj
;
3523 adj
= &adjustments
[i
];
3525 if (adj
->base_index
== base_index
)
3537 /* Return adjustments that should have the same effect on function parameters
3538 and call arguments as if they were first changed according to adjustments in
3539 INNER and then by adjustments in OUTER. */
3541 ipa_parm_adjustment_vec
3542 ipa_combine_adjustments (ipa_parm_adjustment_vec inner
,
3543 ipa_parm_adjustment_vec outer
)
3545 int i
, outlen
= outer
.length ();
3546 int inlen
= inner
.length ();
3548 ipa_parm_adjustment_vec adjustments
, tmp
;
3551 for (i
= 0; i
< inlen
; i
++)
3553 struct ipa_parm_adjustment
*n
;
3556 if (n
->remove_param
)
3559 tmp
.quick_push (*n
);
3562 adjustments
.create (outlen
+ removals
);
3563 for (i
= 0; i
< outlen
; i
++)
3565 struct ipa_parm_adjustment r
;
3566 struct ipa_parm_adjustment
*out
= &outer
[i
];
3567 struct ipa_parm_adjustment
*in
= &tmp
[out
->base_index
];
3569 memset (&r
, 0, sizeof (r
));
3570 gcc_assert (!in
->remove_param
);
3571 if (out
->remove_param
)
3573 if (!index_in_adjustments_multiple_times_p (in
->base_index
, tmp
))
3575 r
.remove_param
= true;
3576 adjustments
.quick_push (r
);
3581 r
.base_index
= in
->base_index
;
3584 /* FIXME: Create nonlocal value too. */
3586 if (in
->copy_param
&& out
->copy_param
)
3587 r
.copy_param
= true;
3588 else if (in
->copy_param
)
3589 r
.offset
= out
->offset
;
3590 else if (out
->copy_param
)
3591 r
.offset
= in
->offset
;
3593 r
.offset
= in
->offset
+ out
->offset
;
3594 adjustments
.quick_push (r
);
3597 for (i
= 0; i
< inlen
; i
++)
3599 struct ipa_parm_adjustment
*n
= &inner
[i
];
3601 if (n
->remove_param
)
3602 adjustments
.quick_push (*n
);
3609 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
3610 friendly way, assuming they are meant to be applied to FNDECL. */
3613 ipa_dump_param_adjustments (FILE *file
, ipa_parm_adjustment_vec adjustments
,
3616 int i
, len
= adjustments
.length ();
3618 vec
<tree
> parms
= ipa_get_vector_of_formal_parms (fndecl
);
3620 fprintf (file
, "IPA param adjustments: ");
3621 for (i
= 0; i
< len
; i
++)
3623 struct ipa_parm_adjustment
*adj
;
3624 adj
= &adjustments
[i
];
3627 fprintf (file
, " ");
3631 fprintf (file
, "%i. base_index: %i - ", i
, adj
->base_index
);
3632 print_generic_expr (file
, parms
[adj
->base_index
], 0);
3635 fprintf (file
, ", base: ");
3636 print_generic_expr (file
, adj
->base
, 0);
3640 fprintf (file
, ", reduction: ");
3641 print_generic_expr (file
, adj
->reduction
, 0);
3643 if (adj
->new_ssa_base
)
3645 fprintf (file
, ", new_ssa_base: ");
3646 print_generic_expr (file
, adj
->new_ssa_base
, 0);
3649 if (adj
->copy_param
)
3650 fprintf (file
, ", copy_param");
3651 else if (adj
->remove_param
)
3652 fprintf (file
, ", remove_param");
3654 fprintf (file
, ", offset %li", (long) adj
->offset
);
3656 fprintf (file
, ", by_ref");
3657 print_node_brief (file
, ", type: ", adj
->type
, 0);
3658 fprintf (file
, "\n");
3663 /* Dump the AV linked list. */
3666 ipa_dump_agg_replacement_values (FILE *f
, struct ipa_agg_replacement_value
*av
)
3669 fprintf (f
, " Aggregate replacements:");
3670 for (; av
; av
= av
->next
)
3672 fprintf (f
, "%s %i[" HOST_WIDE_INT_PRINT_DEC
"]=", comma
? "," : "",
3673 av
->index
, av
->offset
);
3674 print_generic_expr (f
, av
->value
, 0);
3680 /* Stream out jump function JUMP_FUNC to OB. */
3683 ipa_write_jump_function (struct output_block
*ob
,
3684 struct ipa_jump_func
*jump_func
)
3686 struct ipa_agg_jf_item
*item
;
3687 struct bitpack_d bp
;
3690 streamer_write_uhwi (ob
, jump_func
->type
);
3691 switch (jump_func
->type
)
3693 case IPA_JF_UNKNOWN
:
3695 case IPA_JF_KNOWN_TYPE
:
3696 streamer_write_uhwi (ob
, jump_func
->value
.known_type
.offset
);
3697 stream_write_tree (ob
, jump_func
->value
.known_type
.base_type
, true);
3698 stream_write_tree (ob
, jump_func
->value
.known_type
.component_type
, true);
3702 EXPR_LOCATION (jump_func
->value
.constant
.value
) == UNKNOWN_LOCATION
);
3703 stream_write_tree (ob
, jump_func
->value
.constant
.value
, true);
3705 case IPA_JF_PASS_THROUGH
:
3706 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.operation
);
3707 if (jump_func
->value
.pass_through
.operation
== NOP_EXPR
)
3709 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
3710 bp
= bitpack_create (ob
->main_stream
);
3711 bp_pack_value (&bp
, jump_func
->value
.pass_through
.agg_preserved
, 1);
3712 streamer_write_bitpack (&bp
);
3716 stream_write_tree (ob
, jump_func
->value
.pass_through
.operand
, true);
3717 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
3720 case IPA_JF_ANCESTOR
:
3721 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.offset
);
3722 stream_write_tree (ob
, jump_func
->value
.ancestor
.type
, true);
3723 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.formal_id
);
3724 bp
= bitpack_create (ob
->main_stream
);
3725 bp_pack_value (&bp
, jump_func
->value
.ancestor
.agg_preserved
, 1);
3726 streamer_write_bitpack (&bp
);
3730 count
= vec_safe_length (jump_func
->agg
.items
);
3731 streamer_write_uhwi (ob
, count
);
3734 bp
= bitpack_create (ob
->main_stream
);
3735 bp_pack_value (&bp
, jump_func
->agg
.by_ref
, 1);
3736 streamer_write_bitpack (&bp
);
3739 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, i
, item
)
3741 streamer_write_uhwi (ob
, item
->offset
);
3742 stream_write_tree (ob
, item
->value
, true);
3746 /* Read in jump function JUMP_FUNC from IB. */
3749 ipa_read_jump_function (struct lto_input_block
*ib
,
3750 struct ipa_jump_func
*jump_func
,
3751 struct cgraph_edge
*cs
,
3752 struct data_in
*data_in
)
3754 enum jump_func_type jftype
;
3755 enum tree_code operation
;
3758 jftype
= (enum jump_func_type
) streamer_read_uhwi (ib
);
3761 case IPA_JF_UNKNOWN
:
3762 jump_func
->type
= IPA_JF_UNKNOWN
;
3764 case IPA_JF_KNOWN_TYPE
:
3766 HOST_WIDE_INT offset
= streamer_read_uhwi (ib
);
3767 tree base_type
= stream_read_tree (ib
, data_in
);
3768 tree component_type
= stream_read_tree (ib
, data_in
);
3770 ipa_set_jf_known_type (jump_func
, offset
, base_type
, component_type
);
3774 ipa_set_jf_constant (jump_func
, stream_read_tree (ib
, data_in
), cs
);
3776 case IPA_JF_PASS_THROUGH
:
3777 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
3778 if (operation
== NOP_EXPR
)
3780 int formal_id
= streamer_read_uhwi (ib
);
3781 struct bitpack_d bp
= streamer_read_bitpack (ib
);
3782 bool agg_preserved
= bp_unpack_value (&bp
, 1);
3783 ipa_set_jf_simple_pass_through (jump_func
, formal_id
, agg_preserved
);
3787 tree operand
= stream_read_tree (ib
, data_in
);
3788 int formal_id
= streamer_read_uhwi (ib
);
3789 ipa_set_jf_arith_pass_through (jump_func
, formal_id
, operand
,
3793 case IPA_JF_ANCESTOR
:
3795 HOST_WIDE_INT offset
= streamer_read_uhwi (ib
);
3796 tree type
= stream_read_tree (ib
, data_in
);
3797 int formal_id
= streamer_read_uhwi (ib
);
3798 struct bitpack_d bp
= streamer_read_bitpack (ib
);
3799 bool agg_preserved
= bp_unpack_value (&bp
, 1);
3801 ipa_set_ancestor_jf (jump_func
, offset
, type
, formal_id
, agg_preserved
);
3806 count
= streamer_read_uhwi (ib
);
3807 vec_alloc (jump_func
->agg
.items
, count
);
3810 struct bitpack_d bp
= streamer_read_bitpack (ib
);
3811 jump_func
->agg
.by_ref
= bp_unpack_value (&bp
, 1);
3813 for (i
= 0; i
< count
; i
++)
3815 struct ipa_agg_jf_item item
;
3816 item
.offset
= streamer_read_uhwi (ib
);
3817 item
.value
= stream_read_tree (ib
, data_in
);
3818 jump_func
->agg
.items
->quick_push (item
);
3822 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
3823 relevant to indirect inlining to OB. */
3826 ipa_write_indirect_edge_info (struct output_block
*ob
,
3827 struct cgraph_edge
*cs
)
3829 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
3830 struct bitpack_d bp
;
3832 streamer_write_hwi (ob
, ii
->param_index
);
3833 streamer_write_hwi (ob
, ii
->offset
);
3834 bp
= bitpack_create (ob
->main_stream
);
3835 bp_pack_value (&bp
, ii
->polymorphic
, 1);
3836 bp_pack_value (&bp
, ii
->agg_contents
, 1);
3837 bp_pack_value (&bp
, ii
->member_ptr
, 1);
3838 bp_pack_value (&bp
, ii
->by_ref
, 1);
3839 streamer_write_bitpack (&bp
);
3841 if (ii
->polymorphic
)
3843 streamer_write_hwi (ob
, ii
->otr_token
);
3844 stream_write_tree (ob
, ii
->otr_type
, true);
3848 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
3849 relevant to indirect inlining from IB. */
3852 ipa_read_indirect_edge_info (struct lto_input_block
*ib
,
3853 struct data_in
*data_in ATTRIBUTE_UNUSED
,
3854 struct cgraph_edge
*cs
)
3856 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
3857 struct bitpack_d bp
;
3859 ii
->param_index
= (int) streamer_read_hwi (ib
);
3860 ii
->offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
3861 bp
= streamer_read_bitpack (ib
);
3862 ii
->polymorphic
= bp_unpack_value (&bp
, 1);
3863 ii
->agg_contents
= bp_unpack_value (&bp
, 1);
3864 ii
->member_ptr
= bp_unpack_value (&bp
, 1);
3865 ii
->by_ref
= bp_unpack_value (&bp
, 1);
3866 if (ii
->polymorphic
)
3868 ii
->otr_token
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
3869 ii
->otr_type
= stream_read_tree (ib
, data_in
);
3873 /* Stream out NODE info to OB. */
3876 ipa_write_node_info (struct output_block
*ob
, struct cgraph_node
*node
)
3879 lto_symtab_encoder_t encoder
;
3880 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
3882 struct cgraph_edge
*e
;
3883 struct bitpack_d bp
;
3885 encoder
= ob
->decl_state
->symtab_node_encoder
;
3886 node_ref
= lto_symtab_encoder_encode (encoder
, (symtab_node
) node
);
3887 streamer_write_uhwi (ob
, node_ref
);
3889 streamer_write_uhwi (ob
, ipa_get_param_count (info
));
3890 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
3891 streamer_write_uhwi (ob
, ipa_get_param_move_cost (info
, j
));
3892 bp
= bitpack_create (ob
->main_stream
);
3893 gcc_assert (info
->uses_analysis_done
3894 || ipa_get_param_count (info
) == 0);
3895 gcc_assert (!info
->node_enqueued
);
3896 gcc_assert (!info
->ipcp_orig_node
);
3897 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
3898 bp_pack_value (&bp
, ipa_is_param_used (info
, j
), 1);
3899 streamer_write_bitpack (&bp
);
3900 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
3901 streamer_write_hwi (ob
, ipa_get_controlled_uses (info
, j
));
3902 for (e
= node
->callees
; e
; e
= e
->next_callee
)
3904 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
3906 streamer_write_uhwi (ob
, ipa_get_cs_argument_count (args
));
3907 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
3908 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
3910 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
3912 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
3914 streamer_write_uhwi (ob
, ipa_get_cs_argument_count (args
));
3915 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
3916 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
3917 ipa_write_indirect_edge_info (ob
, e
);
3921 /* Stream in NODE info from IB. */
3924 ipa_read_node_info (struct lto_input_block
*ib
, struct cgraph_node
*node
,
3925 struct data_in
*data_in
)
3927 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
3929 struct cgraph_edge
*e
;
3930 struct bitpack_d bp
;
3932 ipa_alloc_node_params (node
, streamer_read_uhwi (ib
));
3934 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
3935 info
->descriptors
[k
].move_cost
= streamer_read_uhwi (ib
);
3937 bp
= streamer_read_bitpack (ib
);
3938 if (ipa_get_param_count (info
) != 0)
3939 info
->uses_analysis_done
= true;
3940 info
->node_enqueued
= false;
3941 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
3942 ipa_set_param_used (info
, k
, bp_unpack_value (&bp
, 1));
3943 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
3944 ipa_set_controlled_uses (info
, k
, streamer_read_hwi (ib
));
3945 for (e
= node
->callees
; e
; e
= e
->next_callee
)
3947 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
3948 int count
= streamer_read_uhwi (ib
);
3952 vec_safe_grow_cleared (args
->jump_functions
, count
);
3954 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
3955 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
3958 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
3960 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
3961 int count
= streamer_read_uhwi (ib
);
3965 vec_safe_grow_cleared (args
->jump_functions
, count
);
3966 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
3967 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
3970 ipa_read_indirect_edge_info (ib
, data_in
, e
);
3974 /* Write jump functions for nodes in SET. */
3977 ipa_prop_write_jump_functions (void)
3979 struct cgraph_node
*node
;
3980 struct output_block
*ob
;
3981 unsigned int count
= 0;
3982 lto_symtab_encoder_iterator lsei
;
3983 lto_symtab_encoder_t encoder
;
3986 if (!ipa_node_params_vector
.exists ())
3989 ob
= create_output_block (LTO_section_jump_functions
);
3990 encoder
= ob
->decl_state
->symtab_node_encoder
;
3991 ob
->cgraph_node
= NULL
;
3992 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
3993 lsei_next_function_in_partition (&lsei
))
3995 node
= lsei_cgraph_node (lsei
);
3996 if (cgraph_function_with_gimple_body_p (node
)
3997 && IPA_NODE_REF (node
) != NULL
)
4001 streamer_write_uhwi (ob
, count
);
4003 /* Process all of the functions. */
4004 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4005 lsei_next_function_in_partition (&lsei
))
4007 node
= lsei_cgraph_node (lsei
);
4008 if (cgraph_function_with_gimple_body_p (node
)
4009 && IPA_NODE_REF (node
) != NULL
)
4010 ipa_write_node_info (ob
, node
);
4012 streamer_write_char_stream (ob
->main_stream
, 0);
4013 produce_asm (ob
, NULL
);
4014 destroy_output_block (ob
);
4017 /* Read section in file FILE_DATA of length LEN with data DATA. */
4020 ipa_prop_read_section (struct lto_file_decl_data
*file_data
, const char *data
,
4023 const struct lto_function_header
*header
=
4024 (const struct lto_function_header
*) data
;
4025 const int cfg_offset
= sizeof (struct lto_function_header
);
4026 const int main_offset
= cfg_offset
+ header
->cfg_size
;
4027 const int string_offset
= main_offset
+ header
->main_size
;
4028 struct data_in
*data_in
;
4029 struct lto_input_block ib_main
;
4033 LTO_INIT_INPUT_BLOCK (ib_main
, (const char *) data
+ main_offset
, 0,
4037 lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
4038 header
->string_size
, vNULL
);
4039 count
= streamer_read_uhwi (&ib_main
);
4041 for (i
= 0; i
< count
; i
++)
4044 struct cgraph_node
*node
;
4045 lto_symtab_encoder_t encoder
;
4047 index
= streamer_read_uhwi (&ib_main
);
4048 encoder
= file_data
->symtab_node_encoder
;
4049 node
= cgraph (lto_symtab_encoder_deref (encoder
, index
));
4050 gcc_assert (node
->symbol
.definition
);
4051 ipa_read_node_info (&ib_main
, node
, data_in
);
4053 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
4055 lto_data_in_delete (data_in
);
4058 /* Read ipcp jump functions. */
4061 ipa_prop_read_jump_functions (void)
4063 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
4064 struct lto_file_decl_data
*file_data
;
4067 ipa_check_create_node_params ();
4068 ipa_check_create_edge_args ();
4069 ipa_register_cgraph_hooks ();
4071 while ((file_data
= file_data_vec
[j
++]))
4074 const char *data
= lto_get_section_data (file_data
, LTO_section_jump_functions
, NULL
, &len
);
4077 ipa_prop_read_section (file_data
, data
, len
);
4081 /* After merging units, we can get mismatch in argument counts.
4082 Also decl merging might've rendered parameter lists obsolete.
4083 Also compute called_with_variable_arg info. */
4086 ipa_update_after_lto_read (void)
4088 ipa_check_create_node_params ();
4089 ipa_check_create_edge_args ();
4093 write_agg_replacement_chain (struct output_block
*ob
, struct cgraph_node
*node
)
4096 unsigned int count
= 0;
4097 lto_symtab_encoder_t encoder
;
4098 struct ipa_agg_replacement_value
*aggvals
, *av
;
4100 aggvals
= ipa_get_agg_replacements_for_node (node
);
4101 encoder
= ob
->decl_state
->symtab_node_encoder
;
4102 node_ref
= lto_symtab_encoder_encode (encoder
, (symtab_node
) node
);
4103 streamer_write_uhwi (ob
, node_ref
);
4105 for (av
= aggvals
; av
; av
= av
->next
)
4107 streamer_write_uhwi (ob
, count
);
4109 for (av
= aggvals
; av
; av
= av
->next
)
4111 struct bitpack_d bp
;
4113 streamer_write_uhwi (ob
, av
->offset
);
4114 streamer_write_uhwi (ob
, av
->index
);
4115 stream_write_tree (ob
, av
->value
, true);
4117 bp
= bitpack_create (ob
->main_stream
);
4118 bp_pack_value (&bp
, av
->by_ref
, 1);
4119 streamer_write_bitpack (&bp
);
4123 /* Stream in the aggregate value replacement chain for NODE from IB. */
4126 read_agg_replacement_chain (struct lto_input_block
*ib
,
4127 struct cgraph_node
*node
,
4128 struct data_in
*data_in
)
4130 struct ipa_agg_replacement_value
*aggvals
= NULL
;
4131 unsigned int count
, i
;
4133 count
= streamer_read_uhwi (ib
);
4134 for (i
= 0; i
<count
; i
++)
4136 struct ipa_agg_replacement_value
*av
;
4137 struct bitpack_d bp
;
4139 av
= ggc_alloc_ipa_agg_replacement_value ();
4140 av
->offset
= streamer_read_uhwi (ib
);
4141 av
->index
= streamer_read_uhwi (ib
);
4142 av
->value
= stream_read_tree (ib
, data_in
);
4143 bp
= streamer_read_bitpack (ib
);
4144 av
->by_ref
= bp_unpack_value (&bp
, 1);
4148 ipa_set_node_agg_value_chain (node
, aggvals
);
4151 /* Write all aggregate replacement for nodes in set. */
4154 ipa_prop_write_all_agg_replacement (void)
4156 struct cgraph_node
*node
;
4157 struct output_block
*ob
;
4158 unsigned int count
= 0;
4159 lto_symtab_encoder_iterator lsei
;
4160 lto_symtab_encoder_t encoder
;
4162 if (!ipa_node_agg_replacements
)
4165 ob
= create_output_block (LTO_section_ipcp_transform
);
4166 encoder
= ob
->decl_state
->symtab_node_encoder
;
4167 ob
->cgraph_node
= NULL
;
4168 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4169 lsei_next_function_in_partition (&lsei
))
4171 node
= lsei_cgraph_node (lsei
);
4172 if (cgraph_function_with_gimple_body_p (node
)
4173 && ipa_get_agg_replacements_for_node (node
) != NULL
)
4177 streamer_write_uhwi (ob
, count
);
4179 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4180 lsei_next_function_in_partition (&lsei
))
4182 node
= lsei_cgraph_node (lsei
);
4183 if (cgraph_function_with_gimple_body_p (node
)
4184 && ipa_get_agg_replacements_for_node (node
) != NULL
)
4185 write_agg_replacement_chain (ob
, node
);
4187 streamer_write_char_stream (ob
->main_stream
, 0);
4188 produce_asm (ob
, NULL
);
4189 destroy_output_block (ob
);
4192 /* Read replacements section in file FILE_DATA of length LEN with data
4196 read_replacements_section (struct lto_file_decl_data
*file_data
,
4200 const struct lto_function_header
*header
=
4201 (const struct lto_function_header
*) data
;
4202 const int cfg_offset
= sizeof (struct lto_function_header
);
4203 const int main_offset
= cfg_offset
+ header
->cfg_size
;
4204 const int string_offset
= main_offset
+ header
->main_size
;
4205 struct data_in
*data_in
;
4206 struct lto_input_block ib_main
;
4210 LTO_INIT_INPUT_BLOCK (ib_main
, (const char *) data
+ main_offset
, 0,
4213 data_in
= lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
4214 header
->string_size
, vNULL
);
4215 count
= streamer_read_uhwi (&ib_main
);
4217 for (i
= 0; i
< count
; i
++)
4220 struct cgraph_node
*node
;
4221 lto_symtab_encoder_t encoder
;
4223 index
= streamer_read_uhwi (&ib_main
);
4224 encoder
= file_data
->symtab_node_encoder
;
4225 node
= cgraph (lto_symtab_encoder_deref (encoder
, index
));
4226 gcc_assert (node
->symbol
.definition
);
4227 read_agg_replacement_chain (&ib_main
, node
, data_in
);
4229 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
4231 lto_data_in_delete (data_in
);
4234 /* Read IPA-CP aggregate replacements. */
4237 ipa_prop_read_all_agg_replacement (void)
4239 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
4240 struct lto_file_decl_data
*file_data
;
4243 while ((file_data
= file_data_vec
[j
++]))
4246 const char *data
= lto_get_section_data (file_data
,
4247 LTO_section_ipcp_transform
,
4250 read_replacements_section (file_data
, data
, len
);
4254 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4258 adjust_agg_replacement_values (struct cgraph_node
*node
,
4259 struct ipa_agg_replacement_value
*aggval
)
4261 struct ipa_agg_replacement_value
*v
;
4262 int i
, c
= 0, d
= 0, *adj
;
4264 if (!node
->clone
.combined_args_to_skip
)
4267 for (v
= aggval
; v
; v
= v
->next
)
4269 gcc_assert (v
->index
>= 0);
4275 adj
= XALLOCAVEC (int, c
);
4276 for (i
= 0; i
< c
; i
++)
4277 if (bitmap_bit_p (node
->clone
.combined_args_to_skip
, i
))
4285 for (v
= aggval
; v
; v
= v
->next
)
4286 v
->index
= adj
[v
->index
];
4290 /* Function body transformation phase. */
4293 ipcp_transform_function (struct cgraph_node
*node
)
4295 vec
<ipa_param_descriptor_t
> descriptors
= vNULL
;
4296 struct param_analysis_info
*parms_ainfo
;
4297 struct ipa_agg_replacement_value
*aggval
;
4298 gimple_stmt_iterator gsi
;
4301 bool cfg_changed
= false, something_changed
= false;
4303 gcc_checking_assert (cfun
);
4304 gcc_checking_assert (current_function_decl
);
4307 fprintf (dump_file
, "Modification phase of node %s/%i\n",
4308 cgraph_node_name (node
), node
->symbol
.order
);
4310 aggval
= ipa_get_agg_replacements_for_node (node
);
4313 param_count
= count_formal_params (node
->symbol
.decl
);
4314 if (param_count
== 0)
4316 adjust_agg_replacement_values (node
, aggval
);
4318 ipa_dump_agg_replacement_values (dump_file
, aggval
);
4319 parms_ainfo
= XALLOCAVEC (struct param_analysis_info
, param_count
);
4320 memset (parms_ainfo
, 0, sizeof (struct param_analysis_info
) * param_count
);
4321 descriptors
.safe_grow_cleared (param_count
);
4322 ipa_populate_param_decls (node
, descriptors
);
4325 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4327 struct ipa_agg_replacement_value
*v
;
4328 gimple stmt
= gsi_stmt (gsi
);
4330 HOST_WIDE_INT offset
;
4334 if (!gimple_assign_load_p (stmt
))
4336 rhs
= gimple_assign_rhs1 (stmt
);
4337 if (!is_gimple_reg_type (TREE_TYPE (rhs
)))
4342 while (handled_component_p (t
))
4344 /* V_C_E can do things like convert an array of integers to one
4345 bigger integer and similar things we do not handle below. */
4346 if (TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
)
4351 t
= TREE_OPERAND (t
, 0);
4356 if (!ipa_load_from_parm_agg_1 (descriptors
, parms_ainfo
, stmt
,
4357 rhs
, &index
, &offset
, &by_ref
))
4359 for (v
= aggval
; v
; v
= v
->next
)
4360 if (v
->index
== index
4361 && v
->offset
== offset
)
4363 if (!v
|| v
->by_ref
!= by_ref
)
4366 gcc_checking_assert (is_gimple_ip_invariant (v
->value
));
4367 if (!useless_type_conversion_p (TREE_TYPE (rhs
), TREE_TYPE (v
->value
)))
4369 if (fold_convertible_p (TREE_TYPE (rhs
), v
->value
))
4370 val
= fold_build1 (NOP_EXPR
, TREE_TYPE (rhs
), v
->value
);
4371 else if (TYPE_SIZE (TREE_TYPE (rhs
))
4372 == TYPE_SIZE (TREE_TYPE (v
->value
)))
4373 val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (rhs
), v
->value
);
4378 fprintf (dump_file
, " const ");
4379 print_generic_expr (dump_file
, v
->value
, 0);
4380 fprintf (dump_file
, " can't be converted to type of ");
4381 print_generic_expr (dump_file
, rhs
, 0);
4382 fprintf (dump_file
, "\n");
4390 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4392 fprintf (dump_file
, "Modifying stmt:\n ");
4393 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4395 gimple_assign_set_rhs_from_tree (&gsi
, val
);
4398 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4400 fprintf (dump_file
, "into:\n ");
4401 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4402 fprintf (dump_file
, "\n");
4405 something_changed
= true;
4406 if (maybe_clean_eh_stmt (stmt
)
4407 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
4411 (*ipa_node_agg_replacements
)[node
->uid
] = NULL
;
4412 free_parms_ainfo (parms_ainfo
, param_count
);
4413 descriptors
.release ();
4415 if (!something_changed
)
4417 else if (cfg_changed
)
4418 return TODO_update_ssa_only_virtuals
| TODO_cleanup_cfg
;
4420 return TODO_update_ssa_only_virtuals
;