1 /* Interprocedural analyses.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
24 #include "basic-block.h"
25 #include "tree-ssa-alias.h"
26 #include "internal-fn.h"
27 #include "gimple-fold.h"
29 #include "gimple-expr.h"
33 #include "stor-layout.h"
34 #include "print-tree.h"
36 #include "gimple-iterator.h"
37 #include "gimplify-me.h"
38 #include "gimple-walk.h"
39 #include "langhooks.h"
43 #include "gimple-ssa.h"
45 #include "tree-phinodes.h"
46 #include "ssa-iterators.h"
47 #include "tree-into-ssa.h"
49 #include "tree-pass.h"
50 #include "tree-inline.h"
51 #include "ipa-inline.h"
53 #include "diagnostic.h"
54 #include "gimple-pretty-print.h"
55 #include "lto-streamer.h"
56 #include "data-streamer.h"
57 #include "tree-streamer.h"
59 #include "ipa-utils.h"
60 #include "stringpool.h"
61 #include "tree-ssanames.h"
67 /* Intermediate information that we get from alias analysis about a particular
68 parameter in a particular basic_block. When a parameter or the memory it
69 references is marked modified, we use that information in all dominatd
70 blocks without cosulting alias analysis oracle. */
72 struct param_aa_status
74 /* Set when this structure contains meaningful information. If not, the
75 structure describing a dominating BB should be used instead. */
78 /* Whether we have seen something which might have modified the data in
79 question. PARM is for the parameter itself, REF is for data it points to
80 but using the alias type of individual accesses and PT is the same thing
81 but for computing aggregate pass-through functions using a very inclusive
83 bool parm_modified
, ref_modified
, pt_modified
;
86 /* Information related to a given BB that used only when looking at function
91 /* Call graph edges going out of this BB. */
92 vec
<cgraph_edge
*> cg_edges
;
93 /* Alias analysis statuses of each formal parameter at this bb. */
94 vec
<param_aa_status
> param_aa_statuses
;
97 /* Structure with global information that is only used when looking at function
100 struct func_body_info
102 /* The node that is being analyzed. */
106 struct ipa_node_params
*info
;
108 /* Information about individual BBs. */
109 vec
<ipa_bb_info
> bb_infos
;
111 /* Number of parameters. */
114 /* Number of statements already walked by when analyzing this function. */
115 unsigned int aa_walked
;
118 /* Vector where the parameter infos are actually stored. */
119 vec
<ipa_node_params
> ipa_node_params_vector
;
120 /* Vector of known aggregate values in cloned nodes. */
121 vec
<ipa_agg_replacement_value_p
, va_gc
> *ipa_node_agg_replacements
;
122 /* Vector where the parameter infos are actually stored. */
123 vec
<ipa_edge_args
, va_gc
> *ipa_edge_args_vector
;
125 /* Holders of ipa cgraph hooks: */
126 static struct cgraph_edge_hook_list
*edge_removal_hook_holder
;
127 static struct cgraph_node_hook_list
*node_removal_hook_holder
;
128 static struct cgraph_2edge_hook_list
*edge_duplication_hook_holder
;
129 static struct cgraph_2node_hook_list
*node_duplication_hook_holder
;
130 static struct cgraph_node_hook_list
*function_insertion_hook_holder
;
132 /* Description of a reference to an IPA constant. */
133 struct ipa_cst_ref_desc
135 /* Edge that corresponds to the statement which took the reference. */
136 struct cgraph_edge
*cs
;
137 /* Linked list of duplicates created when call graph edges are cloned. */
138 struct ipa_cst_ref_desc
*next_duplicate
;
139 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
140 if out of control. */
144 /* Allocation pool for reference descriptions. */
146 static alloc_pool ipa_refdesc_pool
;
148 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
149 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
152 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node
*node
)
154 tree fs_opts
= DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node
->decl
);
155 struct cl_optimization
*os
;
159 os
= TREE_OPTIMIZATION (fs_opts
);
160 return !os
->x_optimize
|| !os
->x_flag_ipa_cp
;
163 /* Return index of the formal whose tree is PTREE in function which corresponds
167 ipa_get_param_decl_index_1 (vec
<ipa_param_descriptor
> descriptors
, tree ptree
)
171 count
= descriptors
.length ();
172 for (i
= 0; i
< count
; i
++)
173 if (descriptors
[i
].decl
== ptree
)
179 /* Return index of the formal whose tree is PTREE in function which corresponds
183 ipa_get_param_decl_index (struct ipa_node_params
*info
, tree ptree
)
185 return ipa_get_param_decl_index_1 (info
->descriptors
, ptree
);
188 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
192 ipa_populate_param_decls (struct cgraph_node
*node
,
193 vec
<ipa_param_descriptor
> &descriptors
)
201 gcc_assert (gimple_has_body_p (fndecl
));
202 fnargs
= DECL_ARGUMENTS (fndecl
);
204 for (parm
= fnargs
; parm
; parm
= DECL_CHAIN (parm
))
206 descriptors
[param_num
].decl
= parm
;
207 descriptors
[param_num
].move_cost
= estimate_move_cost (TREE_TYPE (parm
),
213 /* Return how many formal parameters FNDECL has. */
216 count_formal_params (tree fndecl
)
220 gcc_assert (gimple_has_body_p (fndecl
));
222 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
228 /* Return the declaration of Ith formal parameter of the function corresponding
229 to INFO. Note there is no setter function as this array is built just once
230 using ipa_initialize_node_params. */
233 ipa_dump_param (FILE *file
, struct ipa_node_params
*info
, int i
)
235 fprintf (file
, "param #%i", i
);
236 if (info
->descriptors
[i
].decl
)
239 print_generic_expr (file
, info
->descriptors
[i
].decl
, 0);
243 /* Initialize the ipa_node_params structure associated with NODE
244 to hold PARAM_COUNT parameters. */
247 ipa_alloc_node_params (struct cgraph_node
*node
, int param_count
)
249 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
251 if (!info
->descriptors
.exists () && param_count
)
252 info
->descriptors
.safe_grow_cleared (param_count
);
255 /* Initialize the ipa_node_params structure associated with NODE by counting
256 the function parameters, creating the descriptors and populating their
260 ipa_initialize_node_params (struct cgraph_node
*node
)
262 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
264 if (!info
->descriptors
.exists ())
266 ipa_alloc_node_params (node
, count_formal_params (node
->decl
));
267 ipa_populate_param_decls (node
, info
->descriptors
);
271 /* Print the jump functions associated with call graph edge CS to file F. */
274 ipa_print_node_jump_functions_for_edge (FILE *f
, struct cgraph_edge
*cs
)
278 count
= ipa_get_cs_argument_count (IPA_EDGE_REF (cs
));
279 for (i
= 0; i
< count
; i
++)
281 struct ipa_jump_func
*jump_func
;
282 enum jump_func_type type
;
284 jump_func
= ipa_get_ith_jump_func (IPA_EDGE_REF (cs
), i
);
285 type
= jump_func
->type
;
287 fprintf (f
, " param %d: ", i
);
288 if (type
== IPA_JF_UNKNOWN
)
289 fprintf (f
, "UNKNOWN\n");
290 else if (type
== IPA_JF_KNOWN_TYPE
)
292 fprintf (f
, "KNOWN TYPE: base ");
293 print_generic_expr (f
, jump_func
->value
.known_type
.base_type
, 0);
294 fprintf (f
, ", offset "HOST_WIDE_INT_PRINT_DEC
", component ",
295 jump_func
->value
.known_type
.offset
);
296 print_generic_expr (f
, jump_func
->value
.known_type
.component_type
, 0);
299 else if (type
== IPA_JF_CONST
)
301 tree val
= jump_func
->value
.constant
.value
;
302 fprintf (f
, "CONST: ");
303 print_generic_expr (f
, val
, 0);
304 if (TREE_CODE (val
) == ADDR_EXPR
305 && TREE_CODE (TREE_OPERAND (val
, 0)) == CONST_DECL
)
308 print_generic_expr (f
, DECL_INITIAL (TREE_OPERAND (val
, 0)),
313 else if (type
== IPA_JF_PASS_THROUGH
)
315 fprintf (f
, "PASS THROUGH: ");
316 fprintf (f
, "%d, op %s",
317 jump_func
->value
.pass_through
.formal_id
,
318 get_tree_code_name(jump_func
->value
.pass_through
.operation
));
319 if (jump_func
->value
.pass_through
.operation
!= NOP_EXPR
)
322 print_generic_expr (f
,
323 jump_func
->value
.pass_through
.operand
, 0);
325 if (jump_func
->value
.pass_through
.agg_preserved
)
326 fprintf (f
, ", agg_preserved");
327 if (jump_func
->value
.pass_through
.type_preserved
)
328 fprintf (f
, ", type_preserved");
331 else if (type
== IPA_JF_ANCESTOR
)
333 fprintf (f
, "ANCESTOR: ");
334 fprintf (f
, "%d, offset "HOST_WIDE_INT_PRINT_DEC
", ",
335 jump_func
->value
.ancestor
.formal_id
,
336 jump_func
->value
.ancestor
.offset
);
337 print_generic_expr (f
, jump_func
->value
.ancestor
.type
, 0);
338 if (jump_func
->value
.ancestor
.agg_preserved
)
339 fprintf (f
, ", agg_preserved");
340 if (jump_func
->value
.ancestor
.type_preserved
)
341 fprintf (f
, ", type_preserved");
345 if (jump_func
->agg
.items
)
347 struct ipa_agg_jf_item
*item
;
350 fprintf (f
, " Aggregate passed by %s:\n",
351 jump_func
->agg
.by_ref
? "reference" : "value");
352 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, j
, item
)
354 fprintf (f
, " offset: " HOST_WIDE_INT_PRINT_DEC
", ",
356 if (TYPE_P (item
->value
))
357 fprintf (f
, "clobber of " HOST_WIDE_INT_PRINT_DEC
" bits",
358 tree_to_uhwi (TYPE_SIZE (item
->value
)));
361 fprintf (f
, "cst: ");
362 print_generic_expr (f
, item
->value
, 0);
367 if (IPA_EDGE_REF (cs
)->polymorphic_call_contexts
)
368 ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs
), i
)->dump (f
);
373 /* Print the jump functions of all arguments on all call graph edges going from
377 ipa_print_node_jump_functions (FILE *f
, struct cgraph_node
*node
)
379 struct cgraph_edge
*cs
;
381 fprintf (f
, " Jump functions of caller %s/%i:\n", node
->name (),
383 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
385 if (!ipa_edge_args_info_available_for_edge_p (cs
))
388 fprintf (f
, " callsite %s/%i -> %s/%i : \n",
389 xstrdup (node
->name ()), node
->order
,
390 xstrdup (cs
->callee
->name ()),
392 ipa_print_node_jump_functions_for_edge (f
, cs
);
395 for (cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
397 struct cgraph_indirect_call_info
*ii
;
398 if (!ipa_edge_args_info_available_for_edge_p (cs
))
401 ii
= cs
->indirect_info
;
402 if (ii
->agg_contents
)
403 fprintf (f
, " indirect %s callsite, calling param %i, "
404 "offset " HOST_WIDE_INT_PRINT_DEC
", %s",
405 ii
->member_ptr
? "member ptr" : "aggregate",
406 ii
->param_index
, ii
->offset
,
407 ii
->by_ref
? "by reference" : "by_value");
409 fprintf (f
, " indirect %s callsite, calling param %i, "
410 "offset " HOST_WIDE_INT_PRINT_DEC
,
411 ii
->polymorphic
? "polymorphic" : "simple", ii
->param_index
,
416 fprintf (f
, ", for stmt ");
417 print_gimple_stmt (f
, cs
->call_stmt
, 0, TDF_SLIM
);
422 ii
->context
.dump (f
);
423 ipa_print_node_jump_functions_for_edge (f
, cs
);
427 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
430 ipa_print_all_jump_functions (FILE *f
)
432 struct cgraph_node
*node
;
434 fprintf (f
, "\nJump functions:\n");
435 FOR_EACH_FUNCTION (node
)
437 ipa_print_node_jump_functions (f
, node
);
441 /* Set JFUNC to be a known type jump function. */
444 ipa_set_jf_known_type (struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
,
445 tree base_type
, tree component_type
)
447 /* Recording and propagating main variants increases change that types
449 base_type
= TYPE_MAIN_VARIANT (base_type
);
450 component_type
= TYPE_MAIN_VARIANT (component_type
);
452 gcc_assert (contains_polymorphic_type_p (base_type
)
453 && contains_polymorphic_type_p (component_type
));
454 if (!flag_devirtualize
)
456 jfunc
->type
= IPA_JF_KNOWN_TYPE
;
457 jfunc
->value
.known_type
.offset
= offset
,
458 jfunc
->value
.known_type
.base_type
= base_type
;
459 jfunc
->value
.known_type
.component_type
= component_type
;
460 gcc_assert (component_type
);
463 /* Set JFUNC to be a copy of another jmp (to be used by jump function
464 combination code). The two functions will share their rdesc. */
467 ipa_set_jf_cst_copy (struct ipa_jump_func
*dst
,
468 struct ipa_jump_func
*src
)
471 gcc_checking_assert (src
->type
== IPA_JF_CONST
);
472 dst
->type
= IPA_JF_CONST
;
473 dst
->value
.constant
= src
->value
.constant
;
476 /* Set JFUNC to be a constant jmp function. */
479 ipa_set_jf_constant (struct ipa_jump_func
*jfunc
, tree constant
,
480 struct cgraph_edge
*cs
)
482 constant
= unshare_expr (constant
);
483 if (constant
&& EXPR_P (constant
))
484 SET_EXPR_LOCATION (constant
, UNKNOWN_LOCATION
);
485 jfunc
->type
= IPA_JF_CONST
;
486 jfunc
->value
.constant
.value
= unshare_expr_without_location (constant
);
488 if (TREE_CODE (constant
) == ADDR_EXPR
489 && TREE_CODE (TREE_OPERAND (constant
, 0)) == FUNCTION_DECL
)
491 struct ipa_cst_ref_desc
*rdesc
;
492 if (!ipa_refdesc_pool
)
493 ipa_refdesc_pool
= create_alloc_pool ("IPA-PROP ref descriptions",
494 sizeof (struct ipa_cst_ref_desc
), 32);
496 rdesc
= (struct ipa_cst_ref_desc
*) pool_alloc (ipa_refdesc_pool
);
498 rdesc
->next_duplicate
= NULL
;
500 jfunc
->value
.constant
.rdesc
= rdesc
;
503 jfunc
->value
.constant
.rdesc
= NULL
;
506 /* Set JFUNC to be a simple pass-through jump function. */
508 ipa_set_jf_simple_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
509 bool agg_preserved
, bool type_preserved
)
511 jfunc
->type
= IPA_JF_PASS_THROUGH
;
512 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
513 jfunc
->value
.pass_through
.formal_id
= formal_id
;
514 jfunc
->value
.pass_through
.operation
= NOP_EXPR
;
515 jfunc
->value
.pass_through
.agg_preserved
= agg_preserved
;
516 jfunc
->value
.pass_through
.type_preserved
= type_preserved
;
519 /* Set JFUNC to be an arithmetic pass through jump function. */
522 ipa_set_jf_arith_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
523 tree operand
, enum tree_code operation
)
525 jfunc
->type
= IPA_JF_PASS_THROUGH
;
526 jfunc
->value
.pass_through
.operand
= unshare_expr_without_location (operand
);
527 jfunc
->value
.pass_through
.formal_id
= formal_id
;
528 jfunc
->value
.pass_through
.operation
= operation
;
529 jfunc
->value
.pass_through
.agg_preserved
= false;
530 jfunc
->value
.pass_through
.type_preserved
= false;
533 /* Set JFUNC to be an ancestor jump function. */
536 ipa_set_ancestor_jf (struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
,
537 tree type
, int formal_id
, bool agg_preserved
,
540 if (!flag_devirtualize
)
541 type_preserved
= false;
545 type
= TYPE_MAIN_VARIANT (type
);
546 gcc_assert (!type_preserved
|| contains_polymorphic_type_p (type
));
547 jfunc
->type
= IPA_JF_ANCESTOR
;
548 jfunc
->value
.ancestor
.formal_id
= formal_id
;
549 jfunc
->value
.ancestor
.offset
= offset
;
550 jfunc
->value
.ancestor
.type
= type_preserved
? type
: NULL
;
551 jfunc
->value
.ancestor
.agg_preserved
= agg_preserved
;
552 jfunc
->value
.ancestor
.type_preserved
= type_preserved
;
555 /* Extract the acual BINFO being described by JFUNC which must be a known type
559 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func
*jfunc
)
561 if (!RECORD_OR_UNION_TYPE_P (jfunc
->value
.known_type
.base_type
))
564 tree base_binfo
= TYPE_BINFO (jfunc
->value
.known_type
.base_type
);
568 /* FIXME: At LTO we can't propagate to non-polymorphic type, because
569 we have no ODR equivalency on those. This should be fixed by
570 propagating on types rather than binfos that would make type
571 matching here unnecesary. */
573 && (TREE_CODE (jfunc
->value
.known_type
.component_type
) != RECORD_TYPE
574 || !TYPE_BINFO (jfunc
->value
.known_type
.component_type
)
575 || !BINFO_VTABLE (TYPE_BINFO (jfunc
->value
.known_type
.component_type
))))
577 if (!jfunc
->value
.known_type
.offset
)
581 return get_binfo_at_offset (base_binfo
,
582 jfunc
->value
.known_type
.offset
,
583 jfunc
->value
.known_type
.component_type
);
586 /* Get IPA BB information about the given BB. FBI is the context of analyzis
587 of this function body. */
589 static struct ipa_bb_info
*
590 ipa_get_bb_info (struct func_body_info
*fbi
, basic_block bb
)
592 gcc_checking_assert (fbi
);
593 return &fbi
->bb_infos
[bb
->index
];
596 /* Structure to be passed in between detect_type_change and
597 check_stmt_for_type_change. */
599 struct prop_type_change_info
601 /* Offset into the object where there is the virtual method pointer we are
603 HOST_WIDE_INT offset
;
604 /* The declaration or SSA_NAME pointer of the base that we are checking for
607 /* If we actually can tell the type that the object has changed to, it is
608 stored in this field. Otherwise it remains NULL_TREE. */
609 tree known_current_type
;
610 /* Set to true if dynamic type change has been detected. */
611 bool type_maybe_changed
;
612 /* Set to true if multiple types have been encountered. known_current_type
613 must be disregarded in that case. */
614 bool multiple_types_encountered
;
617 /* Return true if STMT can modify a virtual method table pointer.
619 This function makes special assumptions about both constructors and
620 destructors which are all the functions that are allowed to alter the VMT
621 pointers. It assumes that destructors begin with assignment into all VMT
622 pointers and that constructors essentially look in the following way:
624 1) The very first thing they do is that they call constructors of ancestor
625 sub-objects that have them.
627 2) Then VMT pointers of this and all its ancestors is set to new values
628 corresponding to the type corresponding to the constructor.
630 3) Only afterwards, other stuff such as constructor of member sub-objects
631 and the code written by the user is run. Only this may include calling
632 virtual functions, directly or indirectly.
634 There is no way to call a constructor of an ancestor sub-object in any
637 This means that we do not have to care whether constructors get the correct
638 type information because they will always change it (in fact, if we define
639 the type to be given by the VMT pointer, it is undefined).
641 The most important fact to derive from the above is that if, for some
642 statement in the section 3, we try to detect whether the dynamic type has
643 changed, we can safely ignore all calls as we examine the function body
644 backwards until we reach statements in section 2 because these calls cannot
645 be ancestor constructors or destructors (if the input is not bogus) and so
646 do not change the dynamic type (this holds true only for automatically
647 allocated objects but at the moment we devirtualize only these). We then
648 must detect that statements in section 2 change the dynamic type and can try
649 to derive the new type. That is enough and we can stop, we will never see
650 the calls into constructors of sub-objects in this code. Therefore we can
651 safely ignore all call statements that we traverse.
655 stmt_may_be_vtbl_ptr_store (gimple stmt
)
657 if (is_gimple_call (stmt
))
659 if (gimple_clobber_p (stmt
))
661 else if (is_gimple_assign (stmt
))
663 tree lhs
= gimple_assign_lhs (stmt
);
665 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs
)))
667 if (flag_strict_aliasing
668 && !POINTER_TYPE_P (TREE_TYPE (lhs
)))
671 if (TREE_CODE (lhs
) == COMPONENT_REF
672 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
674 /* In the future we might want to use get_base_ref_and_offset to find
675 if there is a field corresponding to the offset and if so, proceed
676 almost like if it was a component ref. */
682 /* If STMT can be proved to be an assignment to the virtual method table
683 pointer of ANALYZED_OBJ and the type associated with the new table
684 identified, return the type. Otherwise return NULL_TREE. */
687 extr_type_from_vtbl_ptr_store (gimple stmt
, struct prop_type_change_info
*tci
)
689 HOST_WIDE_INT offset
, size
, max_size
;
690 tree lhs
, rhs
, base
, binfo
;
692 if (!gimple_assign_single_p (stmt
))
695 lhs
= gimple_assign_lhs (stmt
);
696 rhs
= gimple_assign_rhs1 (stmt
);
697 if (TREE_CODE (lhs
) != COMPONENT_REF
698 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
701 base
= get_ref_base_and_extent (lhs
, &offset
, &size
, &max_size
);
702 if (offset
!= tci
->offset
703 || size
!= POINTER_SIZE
704 || max_size
!= POINTER_SIZE
)
706 if (TREE_CODE (base
) == MEM_REF
)
708 if (TREE_CODE (tci
->object
) != MEM_REF
709 || TREE_OPERAND (tci
->object
, 0) != TREE_OPERAND (base
, 0)
710 || !tree_int_cst_equal (TREE_OPERAND (tci
->object
, 1),
711 TREE_OPERAND (base
, 1)))
714 else if (tci
->object
!= base
)
717 binfo
= vtable_pointer_value_to_binfo (rhs
);
719 /* FIXME: vtable_pointer_value_to_binfo may return BINFO of a
720 base of outer type. In this case we would need to either
721 work on binfos or translate it back to outer type and offset.
722 KNOWN_TYPE jump functions are not ready for that, yet. */
723 if (!binfo
|| TYPE_BINFO (BINFO_TYPE (binfo
)) != binfo
)
726 return BINFO_TYPE (binfo
);
729 /* Callback of walk_aliased_vdefs and a helper function for
730 detect_type_change to check whether a particular statement may modify
731 the virtual table pointer, and if possible also determine the new type of
732 the (sub-)object. It stores its result into DATA, which points to a
733 prop_type_change_info structure. */
736 check_stmt_for_type_change (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef
, void *data
)
738 gimple stmt
= SSA_NAME_DEF_STMT (vdef
);
739 struct prop_type_change_info
*tci
= (struct prop_type_change_info
*) data
;
741 if (stmt_may_be_vtbl_ptr_store (stmt
))
745 type
= extr_type_from_vtbl_ptr_store (stmt
, tci
);
746 gcc_assert (!type
|| TYPE_MAIN_VARIANT (type
) == type
);
747 if (tci
->type_maybe_changed
748 && type
!= tci
->known_current_type
)
749 tci
->multiple_types_encountered
= true;
750 tci
->known_current_type
= type
;
751 tci
->type_maybe_changed
= true;
758 /* See if ARG is PARAM_DECl describing instance passed by pointer
759 or reference in FUNCTION. Return false if the dynamic type may change
760 in between beggining of the function until CALL is invoked.
762 Generally functions are not allowed to change type of such instances,
763 but they call destructors. We assume that methods can not destroy the THIS
764 pointer. Also as a special cases, constructor and destructors may change
765 type of the THIS pointer. */
768 param_type_may_change_p (tree function
, tree arg
, gimple call
)
770 /* Pure functions can not do any changes on the dynamic type;
771 that require writting to memory. */
772 if (flags_from_decl_or_type (function
) & (ECF_PURE
| ECF_CONST
))
774 /* We need to check if we are within inlined consturctor
775 or destructor (ideally we would have way to check that the
776 inline cdtor is actually working on ARG, but we don't have
777 easy tie on this, so punt on all non-pure cdtors.
778 We may also record the types of cdtors and once we know type
779 of the instance match them.
781 Also code unification optimizations may merge calls from
782 different blocks making return values unreliable. So
783 do nothing during late optimization. */
784 if (DECL_STRUCT_FUNCTION (function
)->after_inlining
)
786 if (TREE_CODE (arg
) == SSA_NAME
787 && SSA_NAME_IS_DEFAULT_DEF (arg
)
788 && TREE_CODE (SSA_NAME_VAR (arg
)) == PARM_DECL
)
790 /* Normal (non-THIS) argument. */
791 if ((SSA_NAME_VAR (arg
) != DECL_ARGUMENTS (function
)
792 || TREE_CODE (TREE_TYPE (function
)) != METHOD_TYPE
)
793 /* THIS pointer of an method - here we we want to watch constructors
794 and destructors as those definitely may change the dynamic
796 || (TREE_CODE (TREE_TYPE (function
)) == METHOD_TYPE
797 && !DECL_CXX_CONSTRUCTOR_P (function
)
798 && !DECL_CXX_DESTRUCTOR_P (function
)
799 && (SSA_NAME_VAR (arg
) == DECL_ARGUMENTS (function
))))
801 /* Walk the inline stack and watch out for ctors/dtors. */
802 for (tree block
= gimple_block (call
); block
&& TREE_CODE (block
) == BLOCK
;
803 block
= BLOCK_SUPERCONTEXT (block
))
804 if (BLOCK_ABSTRACT_ORIGIN (block
)
805 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block
)) == FUNCTION_DECL
)
807 tree fn
= BLOCK_ABSTRACT_ORIGIN (block
);
809 if (flags_from_decl_or_type (fn
) & (ECF_PURE
| ECF_CONST
))
811 if (TREE_CODE (TREE_TYPE (fn
)) == METHOD_TYPE
812 && (DECL_CXX_CONSTRUCTOR_P (fn
)
813 || DECL_CXX_DESTRUCTOR_P (fn
)))
822 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
823 callsite CALL) by looking for assignments to its virtual table pointer. If
824 it is, return true and fill in the jump function JFUNC with relevant type
825 information or set it to unknown. ARG is the object itself (not a pointer
826 to it, unless dereferenced). BASE is the base of the memory access as
827 returned by get_ref_base_and_extent, as is the offset.
829 This is helper function for detect_type_change and detect_type_change_ssa
830 that does the heavy work which is usually unnecesary. */
833 detect_type_change_from_memory_writes (tree arg
, tree base
, tree comp_type
,
834 gimple call
, struct ipa_jump_func
*jfunc
,
835 HOST_WIDE_INT offset
)
837 struct prop_type_change_info tci
;
839 bool entry_reached
= false;
841 gcc_checking_assert (DECL_P (arg
)
842 || TREE_CODE (arg
) == MEM_REF
843 || handled_component_p (arg
));
845 comp_type
= TYPE_MAIN_VARIANT (comp_type
);
847 /* Const calls cannot call virtual methods through VMT and so type changes do
849 if (!flag_devirtualize
|| !gimple_vuse (call
)
850 /* Be sure expected_type is polymorphic. */
852 || TREE_CODE (comp_type
) != RECORD_TYPE
853 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))
854 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))))
857 ao_ref_init (&ao
, arg
);
860 ao
.size
= POINTER_SIZE
;
861 ao
.max_size
= ao
.size
;
864 tci
.object
= get_base_address (arg
);
865 tci
.known_current_type
= NULL_TREE
;
866 tci
.type_maybe_changed
= false;
867 tci
.multiple_types_encountered
= false;
869 walk_aliased_vdefs (&ao
, gimple_vuse (call
), check_stmt_for_type_change
,
870 &tci
, NULL
, &entry_reached
);
871 if (!tci
.type_maybe_changed
)
874 if (!tci
.known_current_type
875 || tci
.multiple_types_encountered
877 /* When the walk reached function entry, it means that type
878 is set along some paths but not along others. */
880 jfunc
->type
= IPA_JF_UNKNOWN
;
882 ipa_set_jf_known_type (jfunc
, 0, tci
.known_current_type
, comp_type
);
887 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
888 If it is, return true and fill in the jump function JFUNC with relevant type
889 information or set it to unknown. ARG is the object itself (not a pointer
890 to it, unless dereferenced). BASE is the base of the memory access as
891 returned by get_ref_base_and_extent, as is the offset. */
894 detect_type_change (tree arg
, tree base
, tree comp_type
, gimple call
,
895 struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
)
897 if (!flag_devirtualize
)
900 if (TREE_CODE (base
) == MEM_REF
901 && !param_type_may_change_p (current_function_decl
,
902 TREE_OPERAND (base
, 0),
905 return detect_type_change_from_memory_writes (arg
, base
, comp_type
,
906 call
, jfunc
, offset
);
909 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
910 SSA name (its dereference will become the base and the offset is assumed to
914 detect_type_change_ssa (tree arg
, tree comp_type
,
915 gimple call
, struct ipa_jump_func
*jfunc
)
917 gcc_checking_assert (TREE_CODE (arg
) == SSA_NAME
);
918 if (!flag_devirtualize
919 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
922 if (!param_type_may_change_p (current_function_decl
, arg
, call
))
925 arg
= build2 (MEM_REF
, ptr_type_node
, arg
,
926 build_int_cst (ptr_type_node
, 0));
928 return detect_type_change_from_memory_writes (arg
, arg
, comp_type
,
932 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
933 boolean variable pointed to by DATA. */
936 mark_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
939 bool *b
= (bool *) data
;
944 /* Return true if we have already walked so many statements in AA that we
945 should really just start giving up. */
948 aa_overwalked (struct func_body_info
*fbi
)
950 gcc_checking_assert (fbi
);
951 return fbi
->aa_walked
> (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS
);
954 /* Find the nearest valid aa status for parameter specified by INDEX that
957 static struct param_aa_status
*
958 find_dominating_aa_status (struct func_body_info
*fbi
, basic_block bb
,
963 bb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
966 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
967 if (!bi
->param_aa_statuses
.is_empty ()
968 && bi
->param_aa_statuses
[index
].valid
)
969 return &bi
->param_aa_statuses
[index
];
973 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
974 structures and/or intialize the result with a dominating description as
977 static struct param_aa_status
*
978 parm_bb_aa_status_for_bb (struct func_body_info
*fbi
, basic_block bb
,
981 gcc_checking_assert (fbi
);
982 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
983 if (bi
->param_aa_statuses
.is_empty ())
984 bi
->param_aa_statuses
.safe_grow_cleared (fbi
->param_count
);
985 struct param_aa_status
*paa
= &bi
->param_aa_statuses
[index
];
988 gcc_checking_assert (!paa
->parm_modified
989 && !paa
->ref_modified
990 && !paa
->pt_modified
);
991 struct param_aa_status
*dom_paa
;
992 dom_paa
= find_dominating_aa_status (fbi
, bb
, index
);
1002 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
1003 a value known not to be modified in this function before reaching the
1004 statement STMT. FBI holds information about the function we have so far
1005 gathered but do not survive the summary building stage. */
1008 parm_preserved_before_stmt_p (struct func_body_info
*fbi
, int index
,
1009 gimple stmt
, tree parm_load
)
1011 struct param_aa_status
*paa
;
1012 bool modified
= false;
1015 /* FIXME: FBI can be NULL if we are being called from outside
1016 ipa_node_analysis or ipcp_transform_function, which currently happens
1017 during inlining analysis. It would be great to extend fbi's lifetime and
1018 always have it. Currently, we are just not afraid of too much walking in
1022 if (aa_overwalked (fbi
))
1024 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
1025 if (paa
->parm_modified
)
1031 gcc_checking_assert (gimple_vuse (stmt
) != NULL_TREE
);
1032 ao_ref_init (&refd
, parm_load
);
1033 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
1036 fbi
->aa_walked
+= walked
;
1037 if (paa
&& modified
)
1038 paa
->parm_modified
= true;
1042 /* If STMT is an assignment that loads a value from an parameter declaration,
1043 return the index of the parameter in ipa_node_params which has not been
1044 modified. Otherwise return -1. */
1047 load_from_unmodified_param (struct func_body_info
*fbi
,
1048 vec
<ipa_param_descriptor
> descriptors
,
1054 if (!gimple_assign_single_p (stmt
))
1057 op1
= gimple_assign_rhs1 (stmt
);
1058 if (TREE_CODE (op1
) != PARM_DECL
)
1061 index
= ipa_get_param_decl_index_1 (descriptors
, op1
);
1063 || !parm_preserved_before_stmt_p (fbi
, index
, stmt
, op1
))
1069 /* Return true if memory reference REF (which must be a load through parameter
1070 with INDEX) loads data that are known to be unmodified in this function
1071 before reaching statement STMT. */
1074 parm_ref_data_preserved_p (struct func_body_info
*fbi
,
1075 int index
, gimple stmt
, tree ref
)
1077 struct param_aa_status
*paa
;
1078 bool modified
= false;
1081 /* FIXME: FBI can be NULL if we are being called from outside
1082 ipa_node_analysis or ipcp_transform_function, which currently happens
1083 during inlining analysis. It would be great to extend fbi's lifetime and
1084 always have it. Currently, we are just not afraid of too much walking in
1088 if (aa_overwalked (fbi
))
1090 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
1091 if (paa
->ref_modified
)
1097 gcc_checking_assert (gimple_vuse (stmt
));
1098 ao_ref_init (&refd
, ref
);
1099 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
1102 fbi
->aa_walked
+= walked
;
1103 if (paa
&& modified
)
1104 paa
->ref_modified
= true;
1108 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1109 is known to be unmodified in this function before reaching call statement
1110 CALL into which it is passed. FBI describes the function body. */
1113 parm_ref_data_pass_through_p (struct func_body_info
*fbi
, int index
,
1114 gimple call
, tree parm
)
1116 bool modified
= false;
1119 /* It's unnecessary to calculate anything about memory contnets for a const
1120 function because it is not goin to use it. But do not cache the result
1121 either. Also, no such calculations for non-pointers. */
1122 if (!gimple_vuse (call
)
1123 || !POINTER_TYPE_P (TREE_TYPE (parm
))
1124 || aa_overwalked (fbi
))
1127 struct param_aa_status
*paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (call
),
1129 if (paa
->pt_modified
)
1132 ao_ref_init_from_ptr_and_size (&refd
, parm
, NULL_TREE
);
1133 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (call
), mark_modified
,
1135 fbi
->aa_walked
+= walked
;
1137 paa
->pt_modified
= true;
1141 /* Return true if we can prove that OP is a memory reference loading unmodified
1142 data from an aggregate passed as a parameter and if the aggregate is passed
1143 by reference, that the alias type of the load corresponds to the type of the
1144 formal parameter (so that we can rely on this type for TBAA in callers).
1145 INFO and PARMS_AINFO describe parameters of the current function (but the
1146 latter can be NULL), STMT is the load statement. If function returns true,
1147 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1148 within the aggregate and whether it is a load from a value passed by
1149 reference respectively. */
1152 ipa_load_from_parm_agg_1 (struct func_body_info
*fbi
,
1153 vec
<ipa_param_descriptor
> descriptors
,
1154 gimple stmt
, tree op
, int *index_p
,
1155 HOST_WIDE_INT
*offset_p
, HOST_WIDE_INT
*size_p
,
1159 HOST_WIDE_INT size
, max_size
;
1160 tree base
= get_ref_base_and_extent (op
, offset_p
, &size
, &max_size
);
1162 if (max_size
== -1 || max_size
!= size
|| *offset_p
< 0)
1167 int index
= ipa_get_param_decl_index_1 (descriptors
, base
);
1169 && parm_preserved_before_stmt_p (fbi
, index
, stmt
, op
))
1180 if (TREE_CODE (base
) != MEM_REF
1181 || TREE_CODE (TREE_OPERAND (base
, 0)) != SSA_NAME
1182 || !integer_zerop (TREE_OPERAND (base
, 1)))
1185 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base
, 0)))
1187 tree parm
= SSA_NAME_VAR (TREE_OPERAND (base
, 0));
1188 index
= ipa_get_param_decl_index_1 (descriptors
, parm
);
1192 /* This branch catches situations where a pointer parameter is not a
1193 gimple register, for example:
1195 void hip7(S*) (struct S * p)
1197 void (*<T2e4>) (struct S *) D.1867;
1202 D.1867_2 = p.1_1->f;
1207 gimple def
= SSA_NAME_DEF_STMT (TREE_OPERAND (base
, 0));
1208 index
= load_from_unmodified_param (fbi
, descriptors
, def
);
1212 && parm_ref_data_preserved_p (fbi
, index
, stmt
, op
))
1223 /* Just like the previous function, just without the param_analysis_info
1224 pointer, for users outside of this file. */
1227 ipa_load_from_parm_agg (struct ipa_node_params
*info
, gimple stmt
,
1228 tree op
, int *index_p
, HOST_WIDE_INT
*offset_p
,
1231 return ipa_load_from_parm_agg_1 (NULL
, info
->descriptors
, stmt
, op
, index_p
,
1232 offset_p
, NULL
, by_ref_p
);
1235 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1236 of an assignment statement STMT, try to determine whether we are actually
1237 handling any of the following cases and construct an appropriate jump
1238 function into JFUNC if so:
1240 1) The passed value is loaded from a formal parameter which is not a gimple
1241 register (most probably because it is addressable, the value has to be
1242 scalar) and we can guarantee the value has not changed. This case can
1243 therefore be described by a simple pass-through jump function. For example:
1252 2) The passed value can be described by a simple arithmetic pass-through
1259 D.2064_4 = a.1(D) + 4;
1262 This case can also occur in combination of the previous one, e.g.:
1270 D.2064_4 = a.0_3 + 4;
1273 3) The passed value is an address of an object within another one (which
1274 also passed by reference). Such situations are described by an ancestor
1275 jump function and describe situations such as:
1277 B::foo() (struct B * const this)
1281 D.1845_2 = &this_1(D)->D.1748;
1284 INFO is the structure describing individual parameters access different
1285 stages of IPA optimizations. PARMS_AINFO contains the information that is
1286 only needed for intraprocedural analysis. */
1289 compute_complex_assign_jump_func (struct func_body_info
*fbi
,
1290 struct ipa_node_params
*info
,
1291 struct ipa_jump_func
*jfunc
,
1292 gimple call
, gimple stmt
, tree name
,
1295 HOST_WIDE_INT offset
, size
, max_size
;
1296 tree op1
, tc_ssa
, base
, ssa
;
1299 op1
= gimple_assign_rhs1 (stmt
);
1301 if (TREE_CODE (op1
) == SSA_NAME
)
1303 if (SSA_NAME_IS_DEFAULT_DEF (op1
))
1304 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (op1
));
1306 index
= load_from_unmodified_param (fbi
, info
->descriptors
,
1307 SSA_NAME_DEF_STMT (op1
));
1312 index
= load_from_unmodified_param (fbi
, info
->descriptors
, stmt
);
1313 tc_ssa
= gimple_assign_lhs (stmt
);
1318 tree op2
= gimple_assign_rhs2 (stmt
);
1322 if (!is_gimple_ip_invariant (op2
)
1323 || (TREE_CODE_CLASS (gimple_expr_code (stmt
)) != tcc_comparison
1324 && !useless_type_conversion_p (TREE_TYPE (name
),
1328 ipa_set_jf_arith_pass_through (jfunc
, index
, op2
,
1329 gimple_assign_rhs_code (stmt
));
1331 else if (gimple_assign_single_p (stmt
))
1333 bool agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
, tc_ssa
);
1334 bool type_p
= false;
1336 if (param_type
&& POINTER_TYPE_P (param_type
))
1337 type_p
= !detect_type_change_ssa (tc_ssa
, TREE_TYPE (param_type
),
1339 if (type_p
|| jfunc
->type
== IPA_JF_UNKNOWN
)
1340 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
, type_p
);
1345 if (TREE_CODE (op1
) != ADDR_EXPR
)
1347 op1
= TREE_OPERAND (op1
, 0);
1348 if (TREE_CODE (TREE_TYPE (op1
)) != RECORD_TYPE
)
1350 base
= get_ref_base_and_extent (op1
, &offset
, &size
, &max_size
);
1351 if (TREE_CODE (base
) != MEM_REF
1352 /* If this is a varying address, punt. */
1354 || max_size
!= size
)
1356 offset
+= mem_ref_offset (base
).to_short_addr () * BITS_PER_UNIT
;
1357 ssa
= TREE_OPERAND (base
, 0);
1358 if (TREE_CODE (ssa
) != SSA_NAME
1359 || !SSA_NAME_IS_DEFAULT_DEF (ssa
)
1363 /* Dynamic types are changed in constructors and destructors. */
1364 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (ssa
));
1365 if (index
>= 0 && param_type
&& POINTER_TYPE_P (param_type
))
1367 bool type_p
= (contains_polymorphic_type_p (TREE_TYPE (param_type
))
1368 && !detect_type_change (op1
, base
, TREE_TYPE (param_type
),
1369 call
, jfunc
, offset
));
1370 if (type_p
|| jfunc
->type
== IPA_JF_UNKNOWN
)
1371 ipa_set_ancestor_jf (jfunc
, offset
,
1372 type_p
? TREE_TYPE (param_type
) : NULL
, index
,
1373 parm_ref_data_pass_through_p (fbi
, index
,
1374 call
, ssa
), type_p
);
1378 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1381 iftmp.1_3 = &obj_2(D)->D.1762;
1383 The base of the MEM_REF must be a default definition SSA NAME of a
1384 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1385 whole MEM_REF expression is returned and the offset calculated from any
1386 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1387 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1390 get_ancestor_addr_info (gimple assign
, tree
*obj_p
, HOST_WIDE_INT
*offset
)
1392 HOST_WIDE_INT size
, max_size
;
1393 tree expr
, parm
, obj
;
1395 if (!gimple_assign_single_p (assign
))
1397 expr
= gimple_assign_rhs1 (assign
);
1399 if (TREE_CODE (expr
) != ADDR_EXPR
)
1401 expr
= TREE_OPERAND (expr
, 0);
1403 expr
= get_ref_base_and_extent (expr
, offset
, &size
, &max_size
);
1405 if (TREE_CODE (expr
) != MEM_REF
1406 /* If this is a varying address, punt. */
1411 parm
= TREE_OPERAND (expr
, 0);
1412 if (TREE_CODE (parm
) != SSA_NAME
1413 || !SSA_NAME_IS_DEFAULT_DEF (parm
)
1414 || TREE_CODE (SSA_NAME_VAR (parm
)) != PARM_DECL
)
1417 *offset
+= mem_ref_offset (expr
).to_short_addr () * BITS_PER_UNIT
;
1423 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1424 statement PHI, try to find out whether NAME is in fact a
1425 multiple-inheritance typecast from a descendant into an ancestor of a formal
1426 parameter and thus can be described by an ancestor jump function and if so,
1427 write the appropriate function into JFUNC.
1429 Essentially we want to match the following pattern:
1437 iftmp.1_3 = &obj_2(D)->D.1762;
1440 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1441 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1445 compute_complex_ancestor_jump_func (struct func_body_info
*fbi
,
1446 struct ipa_node_params
*info
,
1447 struct ipa_jump_func
*jfunc
,
1448 gimple call
, gimple phi
, tree param_type
)
1450 HOST_WIDE_INT offset
;
1451 gimple assign
, cond
;
1452 basic_block phi_bb
, assign_bb
, cond_bb
;
1453 tree tmp
, parm
, expr
, obj
;
1456 if (gimple_phi_num_args (phi
) != 2)
1459 if (integer_zerop (PHI_ARG_DEF (phi
, 1)))
1460 tmp
= PHI_ARG_DEF (phi
, 0);
1461 else if (integer_zerop (PHI_ARG_DEF (phi
, 0)))
1462 tmp
= PHI_ARG_DEF (phi
, 1);
1465 if (TREE_CODE (tmp
) != SSA_NAME
1466 || SSA_NAME_IS_DEFAULT_DEF (tmp
)
1467 || !POINTER_TYPE_P (TREE_TYPE (tmp
))
1468 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp
))) != RECORD_TYPE
)
1471 assign
= SSA_NAME_DEF_STMT (tmp
);
1472 assign_bb
= gimple_bb (assign
);
1473 if (!single_pred_p (assign_bb
))
1475 expr
= get_ancestor_addr_info (assign
, &obj
, &offset
);
1478 parm
= TREE_OPERAND (expr
, 0);
1479 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (parm
));
1483 cond_bb
= single_pred (assign_bb
);
1484 cond
= last_stmt (cond_bb
);
1486 || gimple_code (cond
) != GIMPLE_COND
1487 || gimple_cond_code (cond
) != NE_EXPR
1488 || gimple_cond_lhs (cond
) != parm
1489 || !integer_zerop (gimple_cond_rhs (cond
)))
1492 phi_bb
= gimple_bb (phi
);
1493 for (i
= 0; i
< 2; i
++)
1495 basic_block pred
= EDGE_PRED (phi_bb
, i
)->src
;
1496 if (pred
!= assign_bb
&& pred
!= cond_bb
)
1500 bool type_p
= false;
1501 if (param_type
&& POINTER_TYPE_P (param_type
)
1502 && contains_polymorphic_type_p (TREE_TYPE (param_type
)))
1503 type_p
= !detect_type_change (obj
, expr
, TREE_TYPE (param_type
),
1504 call
, jfunc
, offset
);
1505 if (type_p
|| jfunc
->type
== IPA_JF_UNKNOWN
)
1506 ipa_set_ancestor_jf (jfunc
, offset
, type_p
? TREE_TYPE (param_type
) : NULL
,
1508 parm_ref_data_pass_through_p (fbi
, index
, call
, parm
),
1512 /* Given OP which is passed as an actual argument to a called function,
1513 determine if it is possible to construct a KNOWN_TYPE jump function for it
1514 and if so, create one and store it to JFUNC.
1515 EXPECTED_TYPE represents a type the argument should be in */
1518 compute_known_type_jump_func (tree op
, struct ipa_jump_func
*jfunc
,
1519 gimple call
, tree expected_type
)
1521 HOST_WIDE_INT offset
, size
, max_size
;
1524 if (!flag_devirtualize
1525 || TREE_CODE (op
) != ADDR_EXPR
1526 || !contains_polymorphic_type_p (TREE_TYPE (TREE_TYPE (op
)))
1527 /* Be sure expected_type is polymorphic. */
1529 || !contains_polymorphic_type_p (expected_type
))
1532 op
= TREE_OPERAND (op
, 0);
1533 base
= get_ref_base_and_extent (op
, &offset
, &size
, &max_size
);
1537 || !contains_polymorphic_type_p (TREE_TYPE (base
)))
1540 if (decl_maybe_in_construction_p (base
, TREE_TYPE (base
),
1541 call
, current_function_decl
)
1542 /* Even if the var seems to be in construction by inline call stack,
1543 we may work out the actual type by walking memory writes. */
1544 && (is_global_var (base
)
1545 || detect_type_change (op
, base
, expected_type
, call
, jfunc
, offset
)))
1548 ipa_set_jf_known_type (jfunc
, offset
, TREE_TYPE (base
),
1552 /* Inspect the given TYPE and return true iff it has the same structure (the
1553 same number of fields of the same types) as a C++ member pointer. If
1554 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1555 corresponding fields there. */
1558 type_like_member_ptr_p (tree type
, tree
*method_ptr
, tree
*delta
)
1562 if (TREE_CODE (type
) != RECORD_TYPE
)
1565 fld
= TYPE_FIELDS (type
);
1566 if (!fld
|| !POINTER_TYPE_P (TREE_TYPE (fld
))
1567 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld
))) != METHOD_TYPE
1568 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1574 fld
= DECL_CHAIN (fld
);
1575 if (!fld
|| INTEGRAL_TYPE_P (fld
)
1576 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1581 if (DECL_CHAIN (fld
))
1587 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1588 return the rhs of its defining statement. Otherwise return RHS as it
1592 get_ssa_def_if_simple_copy (tree rhs
)
1594 while (TREE_CODE (rhs
) == SSA_NAME
&& !SSA_NAME_IS_DEFAULT_DEF (rhs
))
1596 gimple def_stmt
= SSA_NAME_DEF_STMT (rhs
);
1598 if (gimple_assign_single_p (def_stmt
))
1599 rhs
= gimple_assign_rhs1 (def_stmt
);
1606 /* Simple linked list, describing known contents of an aggregate beforere
1609 struct ipa_known_agg_contents_list
1611 /* Offset and size of the described part of the aggregate. */
1612 HOST_WIDE_INT offset
, size
;
1613 /* Known constant value or NULL if the contents is known to be unknown. */
1615 /* Pointer to the next structure in the list. */
1616 struct ipa_known_agg_contents_list
*next
;
1619 /* Find the proper place in linked list of ipa_known_agg_contents_list
1620 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1621 unless there is a partial overlap, in which case return NULL, or such
1622 element is already there, in which case set *ALREADY_THERE to true. */
1624 static struct ipa_known_agg_contents_list
**
1625 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list
**list
,
1626 HOST_WIDE_INT lhs_offset
,
1627 HOST_WIDE_INT lhs_size
,
1628 bool *already_there
)
1630 struct ipa_known_agg_contents_list
**p
= list
;
1631 while (*p
&& (*p
)->offset
< lhs_offset
)
1633 if ((*p
)->offset
+ (*p
)->size
> lhs_offset
)
1638 if (*p
&& (*p
)->offset
< lhs_offset
+ lhs_size
)
1640 if ((*p
)->offset
== lhs_offset
&& (*p
)->size
== lhs_size
)
1641 /* We already know this value is subsequently overwritten with
1643 *already_there
= true;
1645 /* Otherwise this is a partial overlap which we cannot
1652 /* Build aggregate jump function from LIST, assuming there are exactly
1653 CONST_COUNT constant entries there and that th offset of the passed argument
1654 is ARG_OFFSET and store it into JFUNC. */
1657 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list
*list
,
1658 int const_count
, HOST_WIDE_INT arg_offset
,
1659 struct ipa_jump_func
*jfunc
)
1661 vec_alloc (jfunc
->agg
.items
, const_count
);
1666 struct ipa_agg_jf_item item
;
1667 item
.offset
= list
->offset
- arg_offset
;
1668 gcc_assert ((item
.offset
% BITS_PER_UNIT
) == 0);
1669 item
.value
= unshare_expr_without_location (list
->constant
);
1670 jfunc
->agg
.items
->quick_push (item
);
1676 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1677 in ARG is filled in with constant values. ARG can either be an aggregate
1678 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1679 aggregate. JFUNC is the jump function into which the constants are
1680 subsequently stored. */
1683 determine_locally_known_aggregate_parts (gimple call
, tree arg
, tree arg_type
,
1684 struct ipa_jump_func
*jfunc
)
1686 struct ipa_known_agg_contents_list
*list
= NULL
;
1687 int item_count
= 0, const_count
= 0;
1688 HOST_WIDE_INT arg_offset
, arg_size
;
1689 gimple_stmt_iterator gsi
;
1691 bool check_ref
, by_ref
;
1694 /* The function operates in three stages. First, we prepare check_ref, r,
1695 arg_base and arg_offset based on what is actually passed as an actual
1698 if (POINTER_TYPE_P (arg_type
))
1701 if (TREE_CODE (arg
) == SSA_NAME
)
1704 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type
))))
1709 type_size
= TYPE_SIZE (TREE_TYPE (arg_type
));
1710 arg_size
= tree_to_uhwi (type_size
);
1711 ao_ref_init_from_ptr_and_size (&r
, arg_base
, NULL_TREE
);
1713 else if (TREE_CODE (arg
) == ADDR_EXPR
)
1715 HOST_WIDE_INT arg_max_size
;
1717 arg
= TREE_OPERAND (arg
, 0);
1718 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1720 if (arg_max_size
== -1
1721 || arg_max_size
!= arg_size
1724 if (DECL_P (arg_base
))
1727 ao_ref_init (&r
, arg_base
);
1737 HOST_WIDE_INT arg_max_size
;
1739 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg
)));
1743 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1745 if (arg_max_size
== -1
1746 || arg_max_size
!= arg_size
1750 ao_ref_init (&r
, arg
);
1753 /* Second stage walks back the BB, looks at individual statements and as long
1754 as it is confident of how the statements affect contents of the
1755 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1757 gsi
= gsi_for_stmt (call
);
1759 for (; !gsi_end_p (gsi
); gsi_prev (&gsi
))
1761 struct ipa_known_agg_contents_list
*n
, **p
;
1762 gimple stmt
= gsi_stmt (gsi
);
1763 HOST_WIDE_INT lhs_offset
, lhs_size
, lhs_max_size
;
1764 tree lhs
, rhs
, lhs_base
;
1766 if (!stmt_may_clobber_ref_p_1 (stmt
, &r
))
1768 if (!gimple_assign_single_p (stmt
))
1771 lhs
= gimple_assign_lhs (stmt
);
1772 rhs
= gimple_assign_rhs1 (stmt
);
1773 if (!is_gimple_reg_type (TREE_TYPE (rhs
))
1774 || TREE_CODE (lhs
) == BIT_FIELD_REF
1775 || contains_bitfld_component_ref_p (lhs
))
1778 lhs_base
= get_ref_base_and_extent (lhs
, &lhs_offset
, &lhs_size
,
1780 if (lhs_max_size
== -1
1781 || lhs_max_size
!= lhs_size
)
1786 if (TREE_CODE (lhs_base
) != MEM_REF
1787 || TREE_OPERAND (lhs_base
, 0) != arg_base
1788 || !integer_zerop (TREE_OPERAND (lhs_base
, 1)))
1791 else if (lhs_base
!= arg_base
)
1793 if (DECL_P (lhs_base
))
1799 bool already_there
= false;
1800 p
= get_place_in_agg_contents_list (&list
, lhs_offset
, lhs_size
,
1807 rhs
= get_ssa_def_if_simple_copy (rhs
);
1808 n
= XALLOCA (struct ipa_known_agg_contents_list
);
1810 n
->offset
= lhs_offset
;
1811 if (is_gimple_ip_invariant (rhs
))
1817 n
->constant
= NULL_TREE
;
1822 if (const_count
== PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
)
1823 || item_count
== 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
))
1827 /* Third stage just goes over the list and creates an appropriate vector of
1828 ipa_agg_jf_item structures out of it, of sourse only if there are
1829 any known constants to begin with. */
1833 jfunc
->agg
.by_ref
= by_ref
;
1834 build_agg_jump_func_from_list (list
, const_count
, arg_offset
, jfunc
);
1839 ipa_get_callee_param_type (struct cgraph_edge
*e
, int i
)
1842 tree type
= (e
->callee
1843 ? TREE_TYPE (e
->callee
->decl
)
1844 : gimple_call_fntype (e
->call_stmt
));
1845 tree t
= TYPE_ARG_TYPES (type
);
1847 for (n
= 0; n
< i
; n
++)
1854 return TREE_VALUE (t
);
1857 t
= DECL_ARGUMENTS (e
->callee
->decl
);
1858 for (n
= 0; n
< i
; n
++)
1865 return TREE_TYPE (t
);
1869 /* Compute jump function for all arguments of callsite CS and insert the
1870 information in the jump_functions array in the ipa_edge_args corresponding
1871 to this callsite. */
1874 ipa_compute_jump_functions_for_edge (struct func_body_info
*fbi
,
1875 struct cgraph_edge
*cs
)
1877 struct ipa_node_params
*info
= IPA_NODE_REF (cs
->caller
);
1878 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
1879 gimple call
= cs
->call_stmt
;
1880 int n
, arg_num
= gimple_call_num_args (call
);
1881 bool useful_context
= false;
1883 if (arg_num
== 0 || args
->jump_functions
)
1885 vec_safe_grow_cleared (args
->jump_functions
, arg_num
);
1886 if (flag_devirtualize
)
1887 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, arg_num
);
1889 if (gimple_call_internal_p (call
))
1891 if (ipa_func_spec_opts_forbid_analysis_p (cs
->caller
))
1894 for (n
= 0; n
< arg_num
; n
++)
1896 struct ipa_jump_func
*jfunc
= ipa_get_ith_jump_func (args
, n
);
1897 tree arg
= gimple_call_arg (call
, n
);
1898 tree param_type
= ipa_get_callee_param_type (cs
, n
);
1899 if (flag_devirtualize
&& POINTER_TYPE_P (TREE_TYPE (arg
)))
1901 struct ipa_polymorphic_call_context
context (cs
->caller
->decl
,
1904 /* TODO: We should also handle dynamic types. */
1905 *ipa_get_ith_polymorhic_call_context (args
, n
) = context
;
1906 if (!context
.useless_p ())
1907 useful_context
= true;
1910 if (is_gimple_ip_invariant (arg
))
1911 ipa_set_jf_constant (jfunc
, arg
, cs
);
1912 else if (!is_gimple_reg_type (TREE_TYPE (arg
))
1913 && TREE_CODE (arg
) == PARM_DECL
)
1915 int index
= ipa_get_param_decl_index (info
, arg
);
1917 gcc_assert (index
>=0);
1918 /* Aggregate passed by value, check for pass-through, otherwise we
1919 will attempt to fill in aggregate contents later in this
1921 if (parm_preserved_before_stmt_p (fbi
, index
, call
, arg
))
1923 ipa_set_jf_simple_pass_through (jfunc
, index
, false, false);
1927 else if (TREE_CODE (arg
) == SSA_NAME
)
1929 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
1931 int index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (arg
));
1935 agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
, arg
);
1936 if (param_type
&& POINTER_TYPE_P (param_type
))
1937 type_p
= !detect_type_change_ssa (arg
, TREE_TYPE (param_type
),
1941 if (type_p
|| jfunc
->type
== IPA_JF_UNKNOWN
)
1942 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
,
1948 gimple stmt
= SSA_NAME_DEF_STMT (arg
);
1949 if (is_gimple_assign (stmt
))
1950 compute_complex_assign_jump_func (fbi
, info
, jfunc
,
1951 call
, stmt
, arg
, param_type
);
1952 else if (gimple_code (stmt
) == GIMPLE_PHI
)
1953 compute_complex_ancestor_jump_func (fbi
, info
, jfunc
,
1954 call
, stmt
, param_type
);
1958 compute_known_type_jump_func (arg
, jfunc
, call
,
1960 && POINTER_TYPE_P (param_type
)
1961 ? TREE_TYPE (param_type
)
1964 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1965 passed (because type conversions are ignored in gimple). Usually we can
1966 safely get type from function declaration, but in case of K&R prototypes or
1967 variadic functions we can try our luck with type of the pointer passed.
1968 TODO: Since we look for actual initialization of the memory object, we may better
1969 work out the type based on the memory stores we find. */
1971 param_type
= TREE_TYPE (arg
);
1973 if ((jfunc
->type
!= IPA_JF_PASS_THROUGH
1974 || !ipa_get_jf_pass_through_agg_preserved (jfunc
))
1975 && (jfunc
->type
!= IPA_JF_ANCESTOR
1976 || !ipa_get_jf_ancestor_agg_preserved (jfunc
))
1977 && (AGGREGATE_TYPE_P (TREE_TYPE (arg
))
1978 || POINTER_TYPE_P (param_type
)))
1979 determine_locally_known_aggregate_parts (call
, arg
, param_type
, jfunc
);
1981 if (!useful_context
)
1982 vec_free (args
->polymorphic_call_contexts
);
1985 /* Compute jump functions for all edges - both direct and indirect - outgoing
1989 ipa_compute_jump_functions_for_bb (struct func_body_info
*fbi
, basic_block bb
)
1991 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
1993 struct cgraph_edge
*cs
;
1995 FOR_EACH_VEC_ELT_REVERSE (bi
->cg_edges
, i
, cs
)
1997 struct cgraph_node
*callee
= cs
->callee
;
2001 callee
->ultimate_alias_target ();
2002 /* We do not need to bother analyzing calls to unknown functions
2003 unless they may become known during lto/whopr. */
2004 if (!callee
->definition
&& !flag_lto
)
2007 ipa_compute_jump_functions_for_edge (fbi
, cs
);
2011 /* If STMT looks like a statement loading a value from a member pointer formal
2012 parameter, return that parameter and store the offset of the field to
2013 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2014 might be clobbered). If USE_DELTA, then we look for a use of the delta
2015 field rather than the pfn. */
2018 ipa_get_stmt_member_ptr_load_param (gimple stmt
, bool use_delta
,
2019 HOST_WIDE_INT
*offset_p
)
2021 tree rhs
, rec
, ref_field
, ref_offset
, fld
, ptr_field
, delta_field
;
2023 if (!gimple_assign_single_p (stmt
))
2026 rhs
= gimple_assign_rhs1 (stmt
);
2027 if (TREE_CODE (rhs
) == COMPONENT_REF
)
2029 ref_field
= TREE_OPERAND (rhs
, 1);
2030 rhs
= TREE_OPERAND (rhs
, 0);
2033 ref_field
= NULL_TREE
;
2034 if (TREE_CODE (rhs
) != MEM_REF
)
2036 rec
= TREE_OPERAND (rhs
, 0);
2037 if (TREE_CODE (rec
) != ADDR_EXPR
)
2039 rec
= TREE_OPERAND (rec
, 0);
2040 if (TREE_CODE (rec
) != PARM_DECL
2041 || !type_like_member_ptr_p (TREE_TYPE (rec
), &ptr_field
, &delta_field
))
2043 ref_offset
= TREE_OPERAND (rhs
, 1);
2050 *offset_p
= int_bit_position (fld
);
2054 if (integer_nonzerop (ref_offset
))
2056 return ref_field
== fld
? rec
: NULL_TREE
;
2059 return tree_int_cst_equal (byte_position (fld
), ref_offset
) ? rec
2063 /* Returns true iff T is an SSA_NAME defined by a statement. */
2066 ipa_is_ssa_with_stmt_def (tree t
)
2068 if (TREE_CODE (t
) == SSA_NAME
2069 && !SSA_NAME_IS_DEFAULT_DEF (t
))
2075 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2076 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2077 indirect call graph edge. */
2079 static struct cgraph_edge
*
2080 ipa_note_param_call (struct cgraph_node
*node
, int param_index
, gimple stmt
)
2082 struct cgraph_edge
*cs
;
2084 cs
= node
->get_edge (stmt
);
2085 cs
->indirect_info
->param_index
= param_index
;
2086 cs
->indirect_info
->agg_contents
= 0;
2087 cs
->indirect_info
->member_ptr
= 0;
2091 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2092 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2093 intermediate information about each formal parameter. Currently it checks
2094 whether the call calls a pointer that is a formal parameter and if so, the
2095 parameter is marked with the called flag and an indirect call graph edge
2096 describing the call is created. This is very simple for ordinary pointers
2097 represented in SSA but not-so-nice when it comes to member pointers. The
2098 ugly part of this function does nothing more than trying to match the
2099 pattern of such a call. An example of such a pattern is the gimple dump
2100 below, the call is on the last line:
2103 f$__delta_5 = f.__delta;
2104 f$__pfn_24 = f.__pfn;
2108 f$__delta_5 = MEM[(struct *)&f];
2109 f$__pfn_24 = MEM[(struct *)&f + 4B];
2111 and a few lines below:
2114 D.2496_3 = (int) f$__pfn_24;
2115 D.2497_4 = D.2496_3 & 1;
2122 D.2500_7 = (unsigned int) f$__delta_5;
2123 D.2501_8 = &S + D.2500_7;
2124 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2125 D.2503_10 = *D.2502_9;
2126 D.2504_12 = f$__pfn_24 + -1;
2127 D.2505_13 = (unsigned int) D.2504_12;
2128 D.2506_14 = D.2503_10 + D.2505_13;
2129 D.2507_15 = *D.2506_14;
2130 iftmp.11_16 = (String:: *) D.2507_15;
2133 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2134 D.2500_19 = (unsigned int) f$__delta_5;
2135 D.2508_20 = &S + D.2500_19;
2136 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2138 Such patterns are results of simple calls to a member pointer:
2140 int doprinting (int (MyString::* f)(int) const)
2142 MyString S ("somestring");
2147 Moreover, the function also looks for called pointers loaded from aggregates
2148 passed by value or reference. */
2151 ipa_analyze_indirect_call_uses (struct func_body_info
*fbi
, gimple call
,
2154 struct ipa_node_params
*info
= fbi
->info
;
2155 HOST_WIDE_INT offset
;
2158 if (SSA_NAME_IS_DEFAULT_DEF (target
))
2160 tree var
= SSA_NAME_VAR (target
);
2161 int index
= ipa_get_param_decl_index (info
, var
);
2163 ipa_note_param_call (fbi
->node
, index
, call
);
2168 gimple def
= SSA_NAME_DEF_STMT (target
);
2169 if (gimple_assign_single_p (def
)
2170 && ipa_load_from_parm_agg_1 (fbi
, info
->descriptors
, def
,
2171 gimple_assign_rhs1 (def
), &index
, &offset
,
2174 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2175 cs
->indirect_info
->offset
= offset
;
2176 cs
->indirect_info
->agg_contents
= 1;
2177 cs
->indirect_info
->by_ref
= by_ref
;
2181 /* Now we need to try to match the complex pattern of calling a member
2183 if (gimple_code (def
) != GIMPLE_PHI
2184 || gimple_phi_num_args (def
) != 2
2185 || !POINTER_TYPE_P (TREE_TYPE (target
))
2186 || TREE_CODE (TREE_TYPE (TREE_TYPE (target
))) != METHOD_TYPE
)
2189 /* First, we need to check whether one of these is a load from a member
2190 pointer that is a parameter to this function. */
2191 tree n1
= PHI_ARG_DEF (def
, 0);
2192 tree n2
= PHI_ARG_DEF (def
, 1);
2193 if (!ipa_is_ssa_with_stmt_def (n1
) || !ipa_is_ssa_with_stmt_def (n2
))
2195 gimple d1
= SSA_NAME_DEF_STMT (n1
);
2196 gimple d2
= SSA_NAME_DEF_STMT (n2
);
2199 basic_block bb
, virt_bb
;
2200 basic_block join
= gimple_bb (def
);
2201 if ((rec
= ipa_get_stmt_member_ptr_load_param (d1
, false, &offset
)))
2203 if (ipa_get_stmt_member_ptr_load_param (d2
, false, NULL
))
2206 bb
= EDGE_PRED (join
, 0)->src
;
2207 virt_bb
= gimple_bb (d2
);
2209 else if ((rec
= ipa_get_stmt_member_ptr_load_param (d2
, false, &offset
)))
2211 bb
= EDGE_PRED (join
, 1)->src
;
2212 virt_bb
= gimple_bb (d1
);
2217 /* Second, we need to check that the basic blocks are laid out in the way
2218 corresponding to the pattern. */
2220 if (!single_pred_p (virt_bb
) || !single_succ_p (virt_bb
)
2221 || single_pred (virt_bb
) != bb
2222 || single_succ (virt_bb
) != join
)
2225 /* Third, let's see that the branching is done depending on the least
2226 significant bit of the pfn. */
2228 gimple branch
= last_stmt (bb
);
2229 if (!branch
|| gimple_code (branch
) != GIMPLE_COND
)
2232 if ((gimple_cond_code (branch
) != NE_EXPR
2233 && gimple_cond_code (branch
) != EQ_EXPR
)
2234 || !integer_zerop (gimple_cond_rhs (branch
)))
2237 tree cond
= gimple_cond_lhs (branch
);
2238 if (!ipa_is_ssa_with_stmt_def (cond
))
2241 def
= SSA_NAME_DEF_STMT (cond
);
2242 if (!is_gimple_assign (def
)
2243 || gimple_assign_rhs_code (def
) != BIT_AND_EXPR
2244 || !integer_onep (gimple_assign_rhs2 (def
)))
2247 cond
= gimple_assign_rhs1 (def
);
2248 if (!ipa_is_ssa_with_stmt_def (cond
))
2251 def
= SSA_NAME_DEF_STMT (cond
);
2253 if (is_gimple_assign (def
)
2254 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
2256 cond
= gimple_assign_rhs1 (def
);
2257 if (!ipa_is_ssa_with_stmt_def (cond
))
2259 def
= SSA_NAME_DEF_STMT (cond
);
2263 rec2
= ipa_get_stmt_member_ptr_load_param (def
,
2264 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2265 == ptrmemfunc_vbit_in_delta
),
2270 index
= ipa_get_param_decl_index (info
, rec
);
2272 && parm_preserved_before_stmt_p (fbi
, index
, call
, rec
))
2274 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2275 cs
->indirect_info
->offset
= offset
;
2276 cs
->indirect_info
->agg_contents
= 1;
2277 cs
->indirect_info
->member_ptr
= 1;
2283 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2284 object referenced in the expression is a formal parameter of the caller
2285 FBI->node (described by FBI->info), create a call note for the
2289 ipa_analyze_virtual_call_uses (struct func_body_info
*fbi
,
2290 gimple call
, tree target
)
2292 tree obj
= OBJ_TYPE_REF_OBJECT (target
);
2294 HOST_WIDE_INT anc_offset
;
2296 if (!flag_devirtualize
)
2299 if (TREE_CODE (obj
) != SSA_NAME
)
2302 struct ipa_node_params
*info
= fbi
->info
;
2303 if (SSA_NAME_IS_DEFAULT_DEF (obj
))
2305 struct ipa_jump_func jfunc
;
2306 if (TREE_CODE (SSA_NAME_VAR (obj
)) != PARM_DECL
)
2310 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (obj
));
2311 gcc_assert (index
>= 0);
2312 if (detect_type_change_ssa (obj
, obj_type_ref_class (target
),
2318 struct ipa_jump_func jfunc
;
2319 gimple stmt
= SSA_NAME_DEF_STMT (obj
);
2322 expr
= get_ancestor_addr_info (stmt
, &obj
, &anc_offset
);
2325 index
= ipa_get_param_decl_index (info
,
2326 SSA_NAME_VAR (TREE_OPERAND (expr
, 0)));
2327 gcc_assert (index
>= 0);
2328 if (detect_type_change (obj
, expr
, obj_type_ref_class (target
),
2329 call
, &jfunc
, anc_offset
))
2333 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2334 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
2335 ii
->offset
= anc_offset
;
2336 ii
->otr_token
= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target
));
2337 ii
->otr_type
= obj_type_ref_class (target
);
2338 ii
->polymorphic
= 1;
2341 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2342 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2343 containing intermediate information about each formal parameter. */
2346 ipa_analyze_call_uses (struct func_body_info
*fbi
, gimple call
)
2348 tree target
= gimple_call_fn (call
);
2351 || (TREE_CODE (target
) != SSA_NAME
2352 && !virtual_method_call_p (target
)))
2355 struct cgraph_edge
*cs
= fbi
->node
->get_edge (call
);
2356 /* If we previously turned the call into a direct call, there is
2357 no need to analyze. */
2358 if (cs
&& !cs
->indirect_unknown_callee
)
2361 if (cs
->indirect_info
->polymorphic
)
2364 tree target
= gimple_call_fn (call
);
2365 ipa_polymorphic_call_context
context (current_function_decl
,
2366 target
, call
, &instance
);
2368 gcc_checking_assert (cs
->indirect_info
->otr_type
2369 == obj_type_ref_class (target
));
2370 gcc_checking_assert (cs
->indirect_info
->otr_token
2371 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target
)));
2373 if (context
.get_dynamic_type (instance
,
2374 OBJ_TYPE_REF_OBJECT (target
),
2375 obj_type_ref_class (target
), call
))
2376 cs
->indirect_info
->context
= context
;
2379 if (TREE_CODE (target
) == SSA_NAME
)
2380 ipa_analyze_indirect_call_uses (fbi
, call
, target
);
2381 else if (virtual_method_call_p (target
))
2382 ipa_analyze_virtual_call_uses (fbi
, call
, target
);
2386 /* Analyze the call statement STMT with respect to formal parameters (described
2387 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2388 formal parameters are called. */
2391 ipa_analyze_stmt_uses (struct func_body_info
*fbi
, gimple stmt
)
2393 if (is_gimple_call (stmt
))
2394 ipa_analyze_call_uses (fbi
, stmt
);
2397 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2398 If OP is a parameter declaration, mark it as used in the info structure
2402 visit_ref_for_mod_analysis (gimple
, tree op
, tree
, void *data
)
2404 struct ipa_node_params
*info
= (struct ipa_node_params
*) data
;
2406 op
= get_base_address (op
);
2408 && TREE_CODE (op
) == PARM_DECL
)
2410 int index
= ipa_get_param_decl_index (info
, op
);
2411 gcc_assert (index
>= 0);
2412 ipa_set_param_used (info
, index
, true);
2418 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2419 the findings in various structures of the associated ipa_node_params
2420 structure, such as parameter flags, notes etc. FBI holds various data about
2421 the function being analyzed. */
2424 ipa_analyze_params_uses_in_bb (struct func_body_info
*fbi
, basic_block bb
)
2426 gimple_stmt_iterator gsi
;
2427 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2429 gimple stmt
= gsi_stmt (gsi
);
2431 if (is_gimple_debug (stmt
))
2434 ipa_analyze_stmt_uses (fbi
, stmt
);
2435 walk_stmt_load_store_addr_ops (stmt
, fbi
->info
,
2436 visit_ref_for_mod_analysis
,
2437 visit_ref_for_mod_analysis
,
2438 visit_ref_for_mod_analysis
);
2440 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2441 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), fbi
->info
,
2442 visit_ref_for_mod_analysis
,
2443 visit_ref_for_mod_analysis
,
2444 visit_ref_for_mod_analysis
);
2447 /* Calculate controlled uses of parameters of NODE. */
2450 ipa_analyze_controlled_uses (struct cgraph_node
*node
)
2452 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
2454 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2456 tree parm
= ipa_get_param (info
, i
);
2457 int controlled_uses
= 0;
2459 /* For SSA regs see if parameter is used. For non-SSA we compute
2460 the flag during modification analysis. */
2461 if (is_gimple_reg (parm
))
2463 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
),
2465 if (ddef
&& !has_zero_uses (ddef
))
2467 imm_use_iterator imm_iter
;
2468 use_operand_p use_p
;
2470 ipa_set_param_used (info
, i
, true);
2471 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, ddef
)
2472 if (!is_gimple_call (USE_STMT (use_p
)))
2474 if (!is_gimple_debug (USE_STMT (use_p
)))
2476 controlled_uses
= IPA_UNDESCRIBED_USE
;
2484 controlled_uses
= 0;
2487 controlled_uses
= IPA_UNDESCRIBED_USE
;
2488 ipa_set_controlled_uses (info
, i
, controlled_uses
);
2492 /* Free stuff in BI. */
2495 free_ipa_bb_info (struct ipa_bb_info
*bi
)
2497 bi
->cg_edges
.release ();
2498 bi
->param_aa_statuses
.release ();
2501 /* Dominator walker driving the analysis. */
2503 class analysis_dom_walker
: public dom_walker
2506 analysis_dom_walker (struct func_body_info
*fbi
)
2507 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
) {}
2509 virtual void before_dom_children (basic_block
);
2512 struct func_body_info
*m_fbi
;
2516 analysis_dom_walker::before_dom_children (basic_block bb
)
2518 ipa_analyze_params_uses_in_bb (m_fbi
, bb
);
2519 ipa_compute_jump_functions_for_bb (m_fbi
, bb
);
2522 /* Initialize the array describing properties of of formal parameters
2523 of NODE, analyze their uses and compute jump functions associated
2524 with actual arguments of calls from within NODE. */
2527 ipa_analyze_node (struct cgraph_node
*node
)
2529 struct func_body_info fbi
;
2530 struct ipa_node_params
*info
;
2532 ipa_check_create_node_params ();
2533 ipa_check_create_edge_args ();
2534 info
= IPA_NODE_REF (node
);
2536 if (info
->analysis_done
)
2538 info
->analysis_done
= 1;
2540 if (ipa_func_spec_opts_forbid_analysis_p (node
))
2542 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2544 ipa_set_param_used (info
, i
, true);
2545 ipa_set_controlled_uses (info
, i
, IPA_UNDESCRIBED_USE
);
2550 struct function
*func
= DECL_STRUCT_FUNCTION (node
->decl
);
2552 calculate_dominance_info (CDI_DOMINATORS
);
2553 ipa_initialize_node_params (node
);
2554 ipa_analyze_controlled_uses (node
);
2557 fbi
.info
= IPA_NODE_REF (node
);
2558 fbi
.bb_infos
= vNULL
;
2559 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
));
2560 fbi
.param_count
= ipa_get_param_count (info
);
2563 for (struct cgraph_edge
*cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
2565 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
2566 bi
->cg_edges
.safe_push (cs
);
2569 for (struct cgraph_edge
*cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
2571 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
2572 bi
->cg_edges
.safe_push (cs
);
2575 analysis_dom_walker (&fbi
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
2578 struct ipa_bb_info
*bi
;
2579 FOR_EACH_VEC_ELT (fbi
.bb_infos
, i
, bi
)
2580 free_ipa_bb_info (bi
);
2581 fbi
.bb_infos
.release ();
2582 free_dominance_info (CDI_DOMINATORS
);
2586 /* Update the jump function DST when the call graph edge corresponding to SRC is
2587 is being inlined, knowing that DST is of type ancestor and src of known
2591 combine_known_type_and_ancestor_jfs (struct ipa_jump_func
*src
,
2592 struct ipa_jump_func
*dst
)
2594 HOST_WIDE_INT combined_offset
;
2597 if (!ipa_get_jf_ancestor_type_preserved (dst
))
2599 dst
->type
= IPA_JF_UNKNOWN
;
2603 combined_offset
= ipa_get_jf_known_type_offset (src
)
2604 + ipa_get_jf_ancestor_offset (dst
);
2605 combined_type
= ipa_get_jf_ancestor_type (dst
);
2607 ipa_set_jf_known_type (dst
, combined_offset
,
2608 ipa_get_jf_known_type_base_type (src
),
2612 /* Update the jump functions associated with call graph edge E when the call
2613 graph edge CS is being inlined, assuming that E->caller is already (possibly
2614 indirectly) inlined into CS->callee and that E has not been inlined. */
2617 update_jump_functions_after_inlining (struct cgraph_edge
*cs
,
2618 struct cgraph_edge
*e
)
2620 struct ipa_edge_args
*top
= IPA_EDGE_REF (cs
);
2621 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
2622 int count
= ipa_get_cs_argument_count (args
);
2625 for (i
= 0; i
< count
; i
++)
2627 struct ipa_jump_func
*dst
= ipa_get_ith_jump_func (args
, i
);
2628 struct ipa_polymorphic_call_context
*dst_ctx
2629 = ipa_get_ith_polymorhic_call_context (args
, i
);
2631 if (dst
->type
== IPA_JF_ANCESTOR
)
2633 struct ipa_jump_func
*src
;
2634 int dst_fid
= dst
->value
.ancestor
.formal_id
;
2635 struct ipa_polymorphic_call_context
*src_ctx
2636 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
2638 /* Variable number of arguments can cause havoc if we try to access
2639 one that does not exist in the inlined edge. So make sure we
2641 if (dst_fid
>= ipa_get_cs_argument_count (top
))
2643 dst
->type
= IPA_JF_UNKNOWN
;
2647 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2649 if (src_ctx
&& !src_ctx
->useless_p ())
2651 struct ipa_polymorphic_call_context ctx
= *src_ctx
;
2653 /* TODO: Make type preserved safe WRT contexts. */
2654 if (!dst
->value
.ancestor
.agg_preserved
)
2655 ctx
.possible_dynamic_type_change ();
2656 ctx
.offset_by (dst
->value
.ancestor
.offset
);
2657 if (!ctx
.useless_p ())
2659 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
2661 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
2666 && (dst
->value
.ancestor
.agg_preserved
|| !src
->agg
.by_ref
))
2668 struct ipa_agg_jf_item
*item
;
2671 /* Currently we do not produce clobber aggregate jump functions,
2672 replace with merging when we do. */
2673 gcc_assert (!dst
->agg
.items
);
2675 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2676 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2677 FOR_EACH_VEC_SAFE_ELT (dst
->agg
.items
, j
, item
)
2678 item
->offset
-= dst
->value
.ancestor
.offset
;
2681 if (src
->type
== IPA_JF_KNOWN_TYPE
)
2682 combine_known_type_and_ancestor_jfs (src
, dst
);
2683 else if (src
->type
== IPA_JF_PASS_THROUGH
2684 && src
->value
.pass_through
.operation
== NOP_EXPR
)
2686 dst
->value
.ancestor
.formal_id
= src
->value
.pass_through
.formal_id
;
2687 dst
->value
.ancestor
.agg_preserved
&=
2688 src
->value
.pass_through
.agg_preserved
;
2689 dst
->value
.ancestor
.type_preserved
&=
2690 src
->value
.pass_through
.type_preserved
;
2692 else if (src
->type
== IPA_JF_ANCESTOR
)
2694 dst
->value
.ancestor
.formal_id
= src
->value
.ancestor
.formal_id
;
2695 dst
->value
.ancestor
.offset
+= src
->value
.ancestor
.offset
;
2696 dst
->value
.ancestor
.agg_preserved
&=
2697 src
->value
.ancestor
.agg_preserved
;
2698 dst
->value
.ancestor
.type_preserved
&=
2699 src
->value
.ancestor
.type_preserved
;
2702 dst
->type
= IPA_JF_UNKNOWN
;
2704 else if (dst
->type
== IPA_JF_PASS_THROUGH
)
2706 struct ipa_jump_func
*src
;
2707 /* We must check range due to calls with variable number of arguments
2708 and we cannot combine jump functions with operations. */
2709 if (dst
->value
.pass_through
.operation
== NOP_EXPR
2710 && (dst
->value
.pass_through
.formal_id
2711 < ipa_get_cs_argument_count (top
)))
2713 int dst_fid
= dst
->value
.pass_through
.formal_id
;
2714 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2715 bool dst_agg_p
= ipa_get_jf_pass_through_agg_preserved (dst
);
2716 struct ipa_polymorphic_call_context
*src_ctx
2717 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
2719 if (src_ctx
&& !src_ctx
->useless_p ())
2721 struct ipa_polymorphic_call_context ctx
= *src_ctx
;
2723 /* TODO: Make type preserved safe WRT contexts. */
2724 if (!dst
->value
.ancestor
.agg_preserved
)
2725 ctx
.possible_dynamic_type_change ();
2726 if (!ctx
.useless_p ())
2730 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
2732 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
2734 dst_ctx
->combine_with (ctx
);
2739 case IPA_JF_UNKNOWN
:
2740 dst
->type
= IPA_JF_UNKNOWN
;
2742 case IPA_JF_KNOWN_TYPE
:
2743 if (ipa_get_jf_pass_through_type_preserved (dst
))
2744 ipa_set_jf_known_type (dst
,
2745 ipa_get_jf_known_type_offset (src
),
2746 ipa_get_jf_known_type_base_type (src
),
2747 ipa_get_jf_known_type_component_type (src
));
2749 dst
->type
= IPA_JF_UNKNOWN
;
2752 ipa_set_jf_cst_copy (dst
, src
);
2755 case IPA_JF_PASS_THROUGH
:
2757 int formal_id
= ipa_get_jf_pass_through_formal_id (src
);
2758 enum tree_code operation
;
2759 operation
= ipa_get_jf_pass_through_operation (src
);
2761 if (operation
== NOP_EXPR
)
2765 && ipa_get_jf_pass_through_agg_preserved (src
);
2766 type_p
= ipa_get_jf_pass_through_type_preserved (src
)
2767 && ipa_get_jf_pass_through_type_preserved (dst
);
2768 ipa_set_jf_simple_pass_through (dst
, formal_id
,
2773 tree operand
= ipa_get_jf_pass_through_operand (src
);
2774 ipa_set_jf_arith_pass_through (dst
, formal_id
, operand
,
2779 case IPA_JF_ANCESTOR
:
2783 && ipa_get_jf_ancestor_agg_preserved (src
);
2784 type_p
= ipa_get_jf_ancestor_type_preserved (src
)
2785 && ipa_get_jf_pass_through_type_preserved (dst
);
2786 ipa_set_ancestor_jf (dst
,
2787 ipa_get_jf_ancestor_offset (src
),
2788 ipa_get_jf_ancestor_type (src
),
2789 ipa_get_jf_ancestor_formal_id (src
),
2798 && (dst_agg_p
|| !src
->agg
.by_ref
))
2800 /* Currently we do not produce clobber aggregate jump
2801 functions, replace with merging when we do. */
2802 gcc_assert (!dst
->agg
.items
);
2804 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2805 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2809 dst
->type
= IPA_JF_UNKNOWN
;
2814 /* If TARGET is an addr_expr of a function declaration, make it the
2815 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2816 Otherwise, return NULL. */
2818 struct cgraph_edge
*
2819 ipa_make_edge_direct_to_target (struct cgraph_edge
*ie
, tree target
,
2822 struct cgraph_node
*callee
;
2823 struct inline_edge_summary
*es
= inline_edge_summary (ie
);
2824 bool unreachable
= false;
2826 if (TREE_CODE (target
) == ADDR_EXPR
)
2827 target
= TREE_OPERAND (target
, 0);
2828 if (TREE_CODE (target
) != FUNCTION_DECL
)
2830 target
= canonicalize_constructor_val (target
, NULL
);
2831 if (!target
|| TREE_CODE (target
) != FUNCTION_DECL
)
2833 if (ie
->indirect_info
->member_ptr
)
2834 /* Member pointer call that goes through a VMT lookup. */
2837 if (dump_enabled_p ())
2839 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2840 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2841 "discovered direct call to non-function in %s/%i, "
2842 "making it __builtin_unreachable\n",
2843 ie
->caller
->name (), ie
->caller
->order
);
2846 target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2847 callee
= cgraph_node::get_create (target
);
2851 callee
= cgraph_node::get (target
);
2854 callee
= cgraph_node::get (target
);
2856 /* Because may-edges are not explicitely represented and vtable may be external,
2857 we may create the first reference to the object in the unit. */
2858 if (!callee
|| callee
->global
.inlined_to
)
2861 /* We are better to ensure we can refer to it.
2862 In the case of static functions we are out of luck, since we already
2863 removed its body. In the case of public functions we may or may
2864 not introduce the reference. */
2865 if (!canonicalize_constructor_val (target
, NULL
)
2866 || !TREE_PUBLIC (target
))
2869 fprintf (dump_file
, "ipa-prop: Discovered call to a known target "
2870 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2871 xstrdup (ie
->caller
->name ()),
2873 xstrdup (ie
->callee
->name ()),
2877 callee
= cgraph_node::get_create (target
);
2880 if (!dbg_cnt (devirt
))
2883 ipa_check_create_node_params ();
2885 /* We can not make edges to inline clones. It is bug that someone removed
2886 the cgraph node too early. */
2887 gcc_assert (!callee
->global
.inlined_to
);
2889 if (dump_file
&& !unreachable
)
2891 fprintf (dump_file
, "ipa-prop: Discovered %s call to a %s target "
2892 "(%s/%i -> %s/%i), for stmt ",
2893 ie
->indirect_info
->polymorphic
? "a virtual" : "an indirect",
2894 speculative
? "speculative" : "known",
2895 xstrdup (ie
->caller
->name ()),
2897 xstrdup (callee
->name ()),
2900 print_gimple_stmt (dump_file
, ie
->call_stmt
, 2, TDF_SLIM
);
2902 fprintf (dump_file
, "with uid %i\n", ie
->lto_stmt_uid
);
2904 if (dump_enabled_p ())
2906 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2908 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2909 "converting indirect call in %s to direct call to %s\n",
2910 ie
->caller
->name (), callee
->name ());
2913 ie
= ie
->make_direct (callee
);
2916 if (!callee
->can_be_discarded_p ())
2919 alias
= dyn_cast
<cgraph_node
*> (callee
->noninterposable_alias ());
2923 ie
= ie
->make_speculative
2924 (callee
, ie
->count
* 8 / 10, ie
->frequency
* 8 / 10);
2926 es
= inline_edge_summary (ie
);
2927 es
->call_stmt_size
-= (eni_size_weights
.indirect_call_cost
2928 - eni_size_weights
.call_cost
);
2929 es
->call_stmt_time
-= (eni_time_weights
.indirect_call_cost
2930 - eni_time_weights
.call_cost
);
2935 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2936 return NULL if there is not any. BY_REF specifies whether the value has to
2937 be passed by reference or by value. */
2940 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function
*agg
,
2941 HOST_WIDE_INT offset
, bool by_ref
)
2943 struct ipa_agg_jf_item
*item
;
2946 if (by_ref
!= agg
->by_ref
)
2949 FOR_EACH_VEC_SAFE_ELT (agg
->items
, i
, item
)
2950 if (item
->offset
== offset
)
2952 /* Currently we do not have clobber values, return NULL for them once
2954 gcc_checking_assert (is_gimple_ip_invariant (item
->value
));
2960 /* Remove a reference to SYMBOL from the list of references of a node given by
2961 reference description RDESC. Return true if the reference has been
2962 successfully found and removed. */
2965 remove_described_reference (symtab_node
*symbol
, struct ipa_cst_ref_desc
*rdesc
)
2967 struct ipa_ref
*to_del
;
2968 struct cgraph_edge
*origin
;
2973 to_del
= origin
->caller
->find_reference (symbol
, origin
->call_stmt
,
2974 origin
->lto_stmt_uid
);
2978 to_del
->remove_reference ();
2980 fprintf (dump_file
, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2981 xstrdup (origin
->caller
->name ()),
2982 origin
->caller
->order
, xstrdup (symbol
->name ()));
2986 /* If JFUNC has a reference description with refcount different from
2987 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2988 NULL. JFUNC must be a constant jump function. */
2990 static struct ipa_cst_ref_desc
*
2991 jfunc_rdesc_usable (struct ipa_jump_func
*jfunc
)
2993 struct ipa_cst_ref_desc
*rdesc
= ipa_get_jf_constant_rdesc (jfunc
);
2994 if (rdesc
&& rdesc
->refcount
!= IPA_UNDESCRIBED_USE
)
3000 /* If the value of constant jump function JFUNC is an address of a function
3001 declaration, return the associated call graph node. Otherwise return
3004 static cgraph_node
*
3005 cgraph_node_for_jfunc (struct ipa_jump_func
*jfunc
)
3007 gcc_checking_assert (jfunc
->type
== IPA_JF_CONST
);
3008 tree cst
= ipa_get_jf_constant (jfunc
);
3009 if (TREE_CODE (cst
) != ADDR_EXPR
3010 || TREE_CODE (TREE_OPERAND (cst
, 0)) != FUNCTION_DECL
)
3013 return cgraph_node::get (TREE_OPERAND (cst
, 0));
3017 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3018 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3019 the edge specified in the rdesc. Return false if either the symbol or the
3020 reference could not be found, otherwise return true. */
3023 try_decrement_rdesc_refcount (struct ipa_jump_func
*jfunc
)
3025 struct ipa_cst_ref_desc
*rdesc
;
3026 if (jfunc
->type
== IPA_JF_CONST
3027 && (rdesc
= jfunc_rdesc_usable (jfunc
))
3028 && --rdesc
->refcount
== 0)
3030 symtab_node
*symbol
= cgraph_node_for_jfunc (jfunc
);
3034 return remove_described_reference (symbol
, rdesc
);
3039 /* Try to find a destination for indirect edge IE that corresponds to a simple
3040 call or a call of a member function pointer and where the destination is a
3041 pointer formal parameter described by jump function JFUNC. If it can be
3042 determined, return the newly direct edge, otherwise return NULL.
3043 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3045 static struct cgraph_edge
*
3046 try_make_edge_direct_simple_call (struct cgraph_edge
*ie
,
3047 struct ipa_jump_func
*jfunc
,
3048 struct ipa_node_params
*new_root_info
)
3050 struct cgraph_edge
*cs
;
3052 bool agg_contents
= ie
->indirect_info
->agg_contents
;
3054 if (ie
->indirect_info
->agg_contents
)
3055 target
= ipa_find_agg_cst_for_param (&jfunc
->agg
,
3056 ie
->indirect_info
->offset
,
3057 ie
->indirect_info
->by_ref
);
3059 target
= ipa_value_from_jfunc (new_root_info
, jfunc
);
3062 cs
= ipa_make_edge_direct_to_target (ie
, target
);
3064 if (cs
&& !agg_contents
)
3067 gcc_checking_assert (cs
->callee
3069 || jfunc
->type
!= IPA_JF_CONST
3070 || !cgraph_node_for_jfunc (jfunc
)
3071 || cs
->callee
== cgraph_node_for_jfunc (jfunc
)));
3072 ok
= try_decrement_rdesc_refcount (jfunc
);
3073 gcc_checking_assert (ok
);
3079 /* Return the target to be used in cases of impossible devirtualization. IE
3080 and target (the latter can be NULL) are dumped when dumping is enabled. */
3083 ipa_impossible_devirt_target (struct cgraph_edge
*ie
, tree target
)
3089 "Type inconsistent devirtualization: %s/%i->%s\n",
3090 ie
->caller
->name (), ie
->caller
->order
,
3091 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target
)));
3094 "No devirtualization target in %s/%i\n",
3095 ie
->caller
->name (), ie
->caller
->order
);
3097 tree new_target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
3098 cgraph_node::get_create (new_target
);
3102 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3103 call based on a formal parameter which is described by jump function JFUNC
3104 and if it can be determined, make it direct and return the direct edge.
3105 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
3108 static struct cgraph_edge
*
3109 try_make_edge_direct_virtual_call (struct cgraph_edge
*ie
,
3110 struct ipa_jump_func
*jfunc
,
3111 struct ipa_node_params
*new_root_info
,
3112 struct ipa_polymorphic_call_context
*ctx_ptr
)
3114 tree binfo
, target
= NULL
;
3115 bool speculative
= false;
3116 bool updated
= false;
3118 if (!flag_devirtualize
)
3121 /* If this is call of a function parameter, restrict its type
3122 based on knowlede of the context. */
3123 if (ctx_ptr
&& !ie
->indirect_info
->by_ref
)
3125 struct ipa_polymorphic_call_context ctx
= *ctx_ptr
;
3127 ctx
.offset_by (ie
->indirect_info
->offset
);
3129 /* TODO: We want to record if type change happens.
3130 Old code did not do that that seems like a bug. */
3131 ctx
.possible_dynamic_type_change (ie
->indirect_info
->otr_type
);
3133 updated
= ie
->indirect_info
->context
.combine_with
3134 (ctx
, ie
->indirect_info
->otr_type
);
3137 /* Try to do lookup via known virtual table pointer value. */
3138 if (!ie
->indirect_info
->by_ref
)
3141 unsigned HOST_WIDE_INT offset
;
3142 tree t
= ipa_find_agg_cst_for_param (&jfunc
->agg
,
3143 ie
->indirect_info
->offset
,
3145 if (t
&& vtable_pointer_value_to_vtable (t
, &vtable
, &offset
))
3147 target
= gimple_get_virt_method_for_vtable (ie
->indirect_info
->otr_token
,
3151 if ((TREE_CODE (TREE_TYPE (target
)) == FUNCTION_TYPE
3152 && DECL_FUNCTION_CODE (target
) == BUILT_IN_UNREACHABLE
)
3153 || !possible_polymorphic_call_target_p
3154 (ie
, cgraph_node::get (target
)))
3155 target
= ipa_impossible_devirt_target (ie
, target
);
3156 return ipa_make_edge_direct_to_target (ie
, target
);
3161 binfo
= ipa_value_from_jfunc (new_root_info
, jfunc
);
3163 if (binfo
&& TREE_CODE (binfo
) != TREE_BINFO
)
3165 struct ipa_polymorphic_call_context
ctx (binfo
,
3166 ie
->indirect_info
->otr_type
,
3167 ie
->indirect_info
->offset
);
3168 updated
|= ie
->indirect_info
->context
.combine_with
3169 (ctx
, ie
->indirect_info
->otr_type
);
3174 ipa_polymorphic_call_context
context (ie
);
3175 vec
<cgraph_node
*>targets
;
3178 targets
= possible_polymorphic_call_targets
3179 (ie
->indirect_info
->otr_type
,
3180 ie
->indirect_info
->otr_token
,
3182 if (final
&& targets
.length () <= 1)
3184 if (targets
.length () == 1)
3185 target
= targets
[0]->decl
;
3187 target
= ipa_impossible_devirt_target (ie
, NULL_TREE
);
3189 else if (flag_devirtualize_speculatively
3190 && !ie
->speculative
&& ie
->maybe_hot_p ())
3192 cgraph_node
*n
= try_speculative_devirtualization (ie
->indirect_info
->otr_type
,
3193 ie
->indirect_info
->otr_token
,
3194 ie
->indirect_info
->context
);
3203 if (binfo
&& TREE_CODE (binfo
) == TREE_BINFO
)
3205 binfo
= get_binfo_at_offset (binfo
, ie
->indirect_info
->offset
,
3206 ie
->indirect_info
->otr_type
);
3209 tree t
= gimple_get_virt_method_for_binfo (ie
->indirect_info
->otr_token
,
3213 gcc_assert (!target
|| speculative
|| target
== t
);
3215 speculative
= false;
3222 if (!possible_polymorphic_call_target_p (ie
, cgraph_node::get_create (target
)))
3223 target
= ipa_impossible_devirt_target (ie
, target
);
3224 return ipa_make_edge_direct_to_target (ie
, target
, speculative
);
3230 /* Update the param called notes associated with NODE when CS is being inlined,
3231 assuming NODE is (potentially indirectly) inlined into CS->callee.
3232 Moreover, if the callee is discovered to be constant, create a new cgraph
3233 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3234 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3237 update_indirect_edges_after_inlining (struct cgraph_edge
*cs
,
3238 struct cgraph_node
*node
,
3239 vec
<cgraph_edge
*> *new_edges
)
3241 struct ipa_edge_args
*top
;
3242 struct cgraph_edge
*ie
, *next_ie
, *new_direct_edge
;
3243 struct ipa_node_params
*new_root_info
;
3246 ipa_check_create_edge_args ();
3247 top
= IPA_EDGE_REF (cs
);
3248 new_root_info
= IPA_NODE_REF (cs
->caller
->global
.inlined_to
3249 ? cs
->caller
->global
.inlined_to
3252 for (ie
= node
->indirect_calls
; ie
; ie
= next_ie
)
3254 struct cgraph_indirect_call_info
*ici
= ie
->indirect_info
;
3255 struct ipa_jump_func
*jfunc
;
3258 next_ie
= ie
->next_callee
;
3260 if (ici
->param_index
== -1)
3263 /* We must check range due to calls with variable number of arguments: */
3264 if (ici
->param_index
>= ipa_get_cs_argument_count (top
))
3266 ici
->param_index
= -1;
3270 param_index
= ici
->param_index
;
3271 jfunc
= ipa_get_ith_jump_func (top
, param_index
);
3273 if (!flag_indirect_inlining
)
3274 new_direct_edge
= NULL
;
3275 else if (ici
->polymorphic
)
3277 ipa_polymorphic_call_context
*ctx
;
3278 ctx
= ipa_get_ith_polymorhic_call_context (top
, param_index
);
3279 new_direct_edge
= try_make_edge_direct_virtual_call (ie
, jfunc
,
3284 new_direct_edge
= try_make_edge_direct_simple_call (ie
, jfunc
,
3286 /* If speculation was removed, then we need to do nothing. */
3287 if (new_direct_edge
&& new_direct_edge
!= ie
)
3289 new_direct_edge
->indirect_inlining_edge
= 1;
3290 top
= IPA_EDGE_REF (cs
);
3293 else if (new_direct_edge
)
3295 new_direct_edge
->indirect_inlining_edge
= 1;
3296 if (new_direct_edge
->call_stmt
)
3297 new_direct_edge
->call_stmt_cannot_inline_p
3298 = !gimple_check_call_matching_types (
3299 new_direct_edge
->call_stmt
,
3300 new_direct_edge
->callee
->decl
, false);
3303 new_edges
->safe_push (new_direct_edge
);
3306 top
= IPA_EDGE_REF (cs
);
3308 else if (jfunc
->type
== IPA_JF_PASS_THROUGH
3309 && ipa_get_jf_pass_through_operation (jfunc
) == NOP_EXPR
)
3311 if ((ici
->agg_contents
3312 && !ipa_get_jf_pass_through_agg_preserved (jfunc
))
3313 || (ici
->polymorphic
3314 && !ipa_get_jf_pass_through_type_preserved (jfunc
)))
3315 ici
->param_index
= -1;
3317 ici
->param_index
= ipa_get_jf_pass_through_formal_id (jfunc
);
3319 else if (jfunc
->type
== IPA_JF_ANCESTOR
)
3321 if ((ici
->agg_contents
3322 && !ipa_get_jf_ancestor_agg_preserved (jfunc
))
3323 || (ici
->polymorphic
3324 && !ipa_get_jf_ancestor_type_preserved (jfunc
)))
3325 ici
->param_index
= -1;
3328 ici
->param_index
= ipa_get_jf_ancestor_formal_id (jfunc
);
3329 ici
->offset
+= ipa_get_jf_ancestor_offset (jfunc
);
3333 /* Either we can find a destination for this edge now or never. */
3334 ici
->param_index
= -1;
3340 /* Recursively traverse subtree of NODE (including node) made of inlined
3341 cgraph_edges when CS has been inlined and invoke
3342 update_indirect_edges_after_inlining on all nodes and
3343 update_jump_functions_after_inlining on all non-inlined edges that lead out
3344 of this subtree. Newly discovered indirect edges will be added to
3345 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3349 propagate_info_to_inlined_callees (struct cgraph_edge
*cs
,
3350 struct cgraph_node
*node
,
3351 vec
<cgraph_edge
*> *new_edges
)
3353 struct cgraph_edge
*e
;
3356 res
= update_indirect_edges_after_inlining (cs
, node
, new_edges
);
3358 for (e
= node
->callees
; e
; e
= e
->next_callee
)
3359 if (!e
->inline_failed
)
3360 res
|= propagate_info_to_inlined_callees (cs
, e
->callee
, new_edges
);
3362 update_jump_functions_after_inlining (cs
, e
);
3363 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
3364 update_jump_functions_after_inlining (cs
, e
);
3369 /* Combine two controlled uses counts as done during inlining. */
3372 combine_controlled_uses_counters (int c
, int d
)
3374 if (c
== IPA_UNDESCRIBED_USE
|| d
== IPA_UNDESCRIBED_USE
)
3375 return IPA_UNDESCRIBED_USE
;
3380 /* Propagate number of controlled users from CS->caleee to the new root of the
3381 tree of inlined nodes. */
3384 propagate_controlled_uses (struct cgraph_edge
*cs
)
3386 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
3387 struct cgraph_node
*new_root
= cs
->caller
->global
.inlined_to
3388 ? cs
->caller
->global
.inlined_to
: cs
->caller
;
3389 struct ipa_node_params
*new_root_info
= IPA_NODE_REF (new_root
);
3390 struct ipa_node_params
*old_root_info
= IPA_NODE_REF (cs
->callee
);
3393 count
= MIN (ipa_get_cs_argument_count (args
),
3394 ipa_get_param_count (old_root_info
));
3395 for (i
= 0; i
< count
; i
++)
3397 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
3398 struct ipa_cst_ref_desc
*rdesc
;
3400 if (jf
->type
== IPA_JF_PASS_THROUGH
)
3403 src_idx
= ipa_get_jf_pass_through_formal_id (jf
);
3404 c
= ipa_get_controlled_uses (new_root_info
, src_idx
);
3405 d
= ipa_get_controlled_uses (old_root_info
, i
);
3407 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf
)
3408 == NOP_EXPR
|| c
== IPA_UNDESCRIBED_USE
);
3409 c
= combine_controlled_uses_counters (c
, d
);
3410 ipa_set_controlled_uses (new_root_info
, src_idx
, c
);
3411 if (c
== 0 && new_root_info
->ipcp_orig_node
)
3413 struct cgraph_node
*n
;
3414 struct ipa_ref
*ref
;
3415 tree t
= new_root_info
->known_vals
[src_idx
];
3417 if (t
&& TREE_CODE (t
) == ADDR_EXPR
3418 && TREE_CODE (TREE_OPERAND (t
, 0)) == FUNCTION_DECL
3419 && (n
= cgraph_node::get (TREE_OPERAND (t
, 0)))
3420 && (ref
= new_root
->find_reference (n
, NULL
, 0)))
3423 fprintf (dump_file
, "ipa-prop: Removing cloning-created "
3424 "reference from %s/%i to %s/%i.\n",
3425 xstrdup (new_root
->name ()),
3427 xstrdup (n
->name ()), n
->order
);
3428 ref
->remove_reference ();
3432 else if (jf
->type
== IPA_JF_CONST
3433 && (rdesc
= jfunc_rdesc_usable (jf
)))
3435 int d
= ipa_get_controlled_uses (old_root_info
, i
);
3436 int c
= rdesc
->refcount
;
3437 rdesc
->refcount
= combine_controlled_uses_counters (c
, d
);
3438 if (rdesc
->refcount
== 0)
3440 tree cst
= ipa_get_jf_constant (jf
);
3441 struct cgraph_node
*n
;
3442 gcc_checking_assert (TREE_CODE (cst
) == ADDR_EXPR
3443 && TREE_CODE (TREE_OPERAND (cst
, 0))
3445 n
= cgraph_node::get (TREE_OPERAND (cst
, 0));
3448 struct cgraph_node
*clone
;
3450 ok
= remove_described_reference (n
, rdesc
);
3451 gcc_checking_assert (ok
);
3454 while (clone
->global
.inlined_to
3455 && clone
!= rdesc
->cs
->caller
3456 && IPA_NODE_REF (clone
)->ipcp_orig_node
)
3458 struct ipa_ref
*ref
;
3459 ref
= clone
->find_reference (n
, NULL
, 0);
3463 fprintf (dump_file
, "ipa-prop: Removing "
3464 "cloning-created reference "
3465 "from %s/%i to %s/%i.\n",
3466 xstrdup (clone
->name ()),
3468 xstrdup (n
->name ()),
3470 ref
->remove_reference ();
3472 clone
= clone
->callers
->caller
;
3479 for (i
= ipa_get_param_count (old_root_info
);
3480 i
< ipa_get_cs_argument_count (args
);
3483 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
3485 if (jf
->type
== IPA_JF_CONST
)
3487 struct ipa_cst_ref_desc
*rdesc
= jfunc_rdesc_usable (jf
);
3489 rdesc
->refcount
= IPA_UNDESCRIBED_USE
;
3491 else if (jf
->type
== IPA_JF_PASS_THROUGH
)
3492 ipa_set_controlled_uses (new_root_info
,
3493 jf
->value
.pass_through
.formal_id
,
3494 IPA_UNDESCRIBED_USE
);
3498 /* Update jump functions and call note functions on inlining the call site CS.
3499 CS is expected to lead to a node already cloned by
3500 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3501 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3505 ipa_propagate_indirect_call_infos (struct cgraph_edge
*cs
,
3506 vec
<cgraph_edge
*> *new_edges
)
3509 /* Do nothing if the preparation phase has not been carried out yet
3510 (i.e. during early inlining). */
3511 if (!ipa_node_params_vector
.exists ())
3513 gcc_assert (ipa_edge_args_vector
);
3515 propagate_controlled_uses (cs
);
3516 changed
= propagate_info_to_inlined_callees (cs
, cs
->callee
, new_edges
);
3521 /* Frees all dynamically allocated structures that the argument info points
3525 ipa_free_edge_args_substructures (struct ipa_edge_args
*args
)
3527 vec_free (args
->jump_functions
);
3528 memset (args
, 0, sizeof (*args
));
3531 /* Free all ipa_edge structures. */
3534 ipa_free_all_edge_args (void)
3537 struct ipa_edge_args
*args
;
3539 if (!ipa_edge_args_vector
)
3542 FOR_EACH_VEC_ELT (*ipa_edge_args_vector
, i
, args
)
3543 ipa_free_edge_args_substructures (args
);
3545 vec_free (ipa_edge_args_vector
);
3548 /* Frees all dynamically allocated structures that the param info points
3552 ipa_free_node_params_substructures (struct ipa_node_params
*info
)
3554 info
->descriptors
.release ();
3555 free (info
->lattices
);
3556 /* Lattice values and their sources are deallocated with their alocation
3558 info
->known_vals
.release ();
3559 memset (info
, 0, sizeof (*info
));
3562 /* Free all ipa_node_params structures. */
3565 ipa_free_all_node_params (void)
3568 struct ipa_node_params
*info
;
3570 FOR_EACH_VEC_ELT (ipa_node_params_vector
, i
, info
)
3571 ipa_free_node_params_substructures (info
);
3573 ipa_node_params_vector
.release ();
3576 /* Set the aggregate replacements of NODE to be AGGVALS. */
3579 ipa_set_node_agg_value_chain (struct cgraph_node
*node
,
3580 struct ipa_agg_replacement_value
*aggvals
)
3582 if (vec_safe_length (ipa_node_agg_replacements
)
3583 <= (unsigned) symtab
->cgraph_max_uid
)
3584 vec_safe_grow_cleared (ipa_node_agg_replacements
,
3585 symtab
->cgraph_max_uid
+ 1);
3587 (*ipa_node_agg_replacements
)[node
->uid
] = aggvals
;
3590 /* Hook that is called by cgraph.c when an edge is removed. */
3593 ipa_edge_removal_hook (struct cgraph_edge
*cs
, void *data ATTRIBUTE_UNUSED
)
3595 struct ipa_edge_args
*args
;
3597 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3598 if (vec_safe_length (ipa_edge_args_vector
) <= (unsigned)cs
->uid
)
3601 args
= IPA_EDGE_REF (cs
);
3602 if (args
->jump_functions
)
3604 struct ipa_jump_func
*jf
;
3606 FOR_EACH_VEC_ELT (*args
->jump_functions
, i
, jf
)
3608 struct ipa_cst_ref_desc
*rdesc
;
3609 try_decrement_rdesc_refcount (jf
);
3610 if (jf
->type
== IPA_JF_CONST
3611 && (rdesc
= ipa_get_jf_constant_rdesc (jf
))
3617 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs
));
3620 /* Hook that is called by cgraph.c when a node is removed. */
3623 ipa_node_removal_hook (struct cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
3625 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3626 if (ipa_node_params_vector
.length () > (unsigned)node
->uid
)
3627 ipa_free_node_params_substructures (IPA_NODE_REF (node
));
3628 if (vec_safe_length (ipa_node_agg_replacements
) > (unsigned)node
->uid
)
3629 (*ipa_node_agg_replacements
)[(unsigned)node
->uid
] = NULL
;
3632 /* Hook that is called by cgraph.c when an edge is duplicated. */
3635 ipa_edge_duplication_hook (struct cgraph_edge
*src
, struct cgraph_edge
*dst
,
3636 __attribute__((unused
)) void *data
)
3638 struct ipa_edge_args
*old_args
, *new_args
;
3641 ipa_check_create_edge_args ();
3643 old_args
= IPA_EDGE_REF (src
);
3644 new_args
= IPA_EDGE_REF (dst
);
3646 new_args
->jump_functions
= vec_safe_copy (old_args
->jump_functions
);
3647 if (old_args
->polymorphic_call_contexts
)
3648 new_args
->polymorphic_call_contexts
3649 = vec_safe_copy (old_args
->polymorphic_call_contexts
);
3651 for (i
= 0; i
< vec_safe_length (old_args
->jump_functions
); i
++)
3653 struct ipa_jump_func
*src_jf
= ipa_get_ith_jump_func (old_args
, i
);
3654 struct ipa_jump_func
*dst_jf
= ipa_get_ith_jump_func (new_args
, i
);
3656 dst_jf
->agg
.items
= vec_safe_copy (dst_jf
->agg
.items
);
3658 if (src_jf
->type
== IPA_JF_CONST
)
3660 struct ipa_cst_ref_desc
*src_rdesc
= jfunc_rdesc_usable (src_jf
);
3663 dst_jf
->value
.constant
.rdesc
= NULL
;
3664 else if (src
->caller
== dst
->caller
)
3666 struct ipa_ref
*ref
;
3667 symtab_node
*n
= cgraph_node_for_jfunc (src_jf
);
3668 gcc_checking_assert (n
);
3669 ref
= src
->caller
->find_reference (n
, src
->call_stmt
,
3671 gcc_checking_assert (ref
);
3672 dst
->caller
->clone_reference (ref
, ref
->stmt
);
3674 gcc_checking_assert (ipa_refdesc_pool
);
3675 struct ipa_cst_ref_desc
*dst_rdesc
3676 = (struct ipa_cst_ref_desc
*) pool_alloc (ipa_refdesc_pool
);
3677 dst_rdesc
->cs
= dst
;
3678 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3679 dst_rdesc
->next_duplicate
= NULL
;
3680 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3682 else if (src_rdesc
->cs
== src
)
3684 struct ipa_cst_ref_desc
*dst_rdesc
;
3685 gcc_checking_assert (ipa_refdesc_pool
);
3687 = (struct ipa_cst_ref_desc
*) pool_alloc (ipa_refdesc_pool
);
3688 dst_rdesc
->cs
= dst
;
3689 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3690 dst_rdesc
->next_duplicate
= src_rdesc
->next_duplicate
;
3691 src_rdesc
->next_duplicate
= dst_rdesc
;
3692 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3696 struct ipa_cst_ref_desc
*dst_rdesc
;
3697 /* This can happen during inlining, when a JFUNC can refer to a
3698 reference taken in a function up in the tree of inline clones.
3699 We need to find the duplicate that refers to our tree of
3702 gcc_assert (dst
->caller
->global
.inlined_to
);
3703 for (dst_rdesc
= src_rdesc
->next_duplicate
;
3705 dst_rdesc
= dst_rdesc
->next_duplicate
)
3707 struct cgraph_node
*top
;
3708 top
= dst_rdesc
->cs
->caller
->global
.inlined_to
3709 ? dst_rdesc
->cs
->caller
->global
.inlined_to
3710 : dst_rdesc
->cs
->caller
;
3711 if (dst
->caller
->global
.inlined_to
== top
)
3714 gcc_assert (dst_rdesc
);
3715 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3718 else if (dst_jf
->type
== IPA_JF_PASS_THROUGH
3719 && src
->caller
== dst
->caller
)
3721 struct cgraph_node
*inline_root
= dst
->caller
->global
.inlined_to
3722 ? dst
->caller
->global
.inlined_to
: dst
->caller
;
3723 struct ipa_node_params
*root_info
= IPA_NODE_REF (inline_root
);
3724 int idx
= ipa_get_jf_pass_through_formal_id (dst_jf
);
3726 int c
= ipa_get_controlled_uses (root_info
, idx
);
3727 if (c
!= IPA_UNDESCRIBED_USE
)
3730 ipa_set_controlled_uses (root_info
, idx
, c
);
3736 /* Hook that is called by cgraph.c when a node is duplicated. */
3739 ipa_node_duplication_hook (struct cgraph_node
*src
, struct cgraph_node
*dst
,
3740 ATTRIBUTE_UNUSED
void *data
)
3742 struct ipa_node_params
*old_info
, *new_info
;
3743 struct ipa_agg_replacement_value
*old_av
, *new_av
;
3745 ipa_check_create_node_params ();
3746 old_info
= IPA_NODE_REF (src
);
3747 new_info
= IPA_NODE_REF (dst
);
3749 new_info
->descriptors
= old_info
->descriptors
.copy ();
3750 new_info
->lattices
= NULL
;
3751 new_info
->ipcp_orig_node
= old_info
->ipcp_orig_node
;
3753 new_info
->analysis_done
= old_info
->analysis_done
;
3754 new_info
->node_enqueued
= old_info
->node_enqueued
;
3756 old_av
= ipa_get_agg_replacements_for_node (src
);
3763 struct ipa_agg_replacement_value
*v
;
3765 v
= ggc_alloc
<ipa_agg_replacement_value
> ();
3766 memcpy (v
, old_av
, sizeof (*v
));
3769 old_av
= old_av
->next
;
3771 ipa_set_node_agg_value_chain (dst
, new_av
);
3775 /* Analyze newly added function into callgraph. */
3778 ipa_add_new_function (struct cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
3780 if (node
->has_gimple_body_p ())
3781 ipa_analyze_node (node
);
3784 /* Register our cgraph hooks if they are not already there. */
3787 ipa_register_cgraph_hooks (void)
3789 if (!edge_removal_hook_holder
)
3790 edge_removal_hook_holder
=
3791 symtab
->add_edge_removal_hook (&ipa_edge_removal_hook
, NULL
);
3792 if (!node_removal_hook_holder
)
3793 node_removal_hook_holder
=
3794 symtab
->add_cgraph_removal_hook (&ipa_node_removal_hook
, NULL
);
3795 if (!edge_duplication_hook_holder
)
3796 edge_duplication_hook_holder
=
3797 symtab
->add_edge_duplication_hook (&ipa_edge_duplication_hook
, NULL
);
3798 if (!node_duplication_hook_holder
)
3799 node_duplication_hook_holder
=
3800 symtab
->add_cgraph_duplication_hook (&ipa_node_duplication_hook
, NULL
);
3801 function_insertion_hook_holder
=
3802 symtab
->add_cgraph_insertion_hook (&ipa_add_new_function
, NULL
);
3805 /* Unregister our cgraph hooks if they are not already there. */
3808 ipa_unregister_cgraph_hooks (void)
3810 symtab
->remove_edge_removal_hook (edge_removal_hook_holder
);
3811 edge_removal_hook_holder
= NULL
;
3812 symtab
->remove_cgraph_removal_hook (node_removal_hook_holder
);
3813 node_removal_hook_holder
= NULL
;
3814 symtab
->remove_edge_duplication_hook (edge_duplication_hook_holder
);
3815 edge_duplication_hook_holder
= NULL
;
3816 symtab
->remove_cgraph_duplication_hook (node_duplication_hook_holder
);
3817 node_duplication_hook_holder
= NULL
;
3818 symtab
->remove_cgraph_insertion_hook (function_insertion_hook_holder
);
3819 function_insertion_hook_holder
= NULL
;
3822 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3823 longer needed after ipa-cp. */
3826 ipa_free_all_structures_after_ipa_cp (void)
3830 ipa_free_all_edge_args ();
3831 ipa_free_all_node_params ();
3832 free_alloc_pool (ipcp_sources_pool
);
3833 free_alloc_pool (ipcp_values_pool
);
3834 free_alloc_pool (ipcp_agg_lattice_pool
);
3835 ipa_unregister_cgraph_hooks ();
3836 if (ipa_refdesc_pool
)
3837 free_alloc_pool (ipa_refdesc_pool
);
3841 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3842 longer needed after indirect inlining. */
3845 ipa_free_all_structures_after_iinln (void)
3847 ipa_free_all_edge_args ();
3848 ipa_free_all_node_params ();
3849 ipa_unregister_cgraph_hooks ();
3850 if (ipcp_sources_pool
)
3851 free_alloc_pool (ipcp_sources_pool
);
3852 if (ipcp_values_pool
)
3853 free_alloc_pool (ipcp_values_pool
);
3854 if (ipcp_agg_lattice_pool
)
3855 free_alloc_pool (ipcp_agg_lattice_pool
);
3856 if (ipa_refdesc_pool
)
3857 free_alloc_pool (ipa_refdesc_pool
);
3860 /* Print ipa_tree_map data structures of all functions in the
3864 ipa_print_node_params (FILE *f
, struct cgraph_node
*node
)
3867 struct ipa_node_params
*info
;
3869 if (!node
->definition
)
3871 info
= IPA_NODE_REF (node
);
3872 fprintf (f
, " function %s/%i parameter descriptors:\n",
3873 node
->name (), node
->order
);
3874 count
= ipa_get_param_count (info
);
3875 for (i
= 0; i
< count
; i
++)
3880 ipa_dump_param (f
, info
, i
);
3881 if (ipa_is_param_used (info
, i
))
3882 fprintf (f
, " used");
3883 c
= ipa_get_controlled_uses (info
, i
);
3884 if (c
== IPA_UNDESCRIBED_USE
)
3885 fprintf (f
, " undescribed_use");
3887 fprintf (f
, " controlled_uses=%i", c
);
3892 /* Print ipa_tree_map data structures of all functions in the
3896 ipa_print_all_params (FILE * f
)
3898 struct cgraph_node
*node
;
3900 fprintf (f
, "\nFunction parameters:\n");
3901 FOR_EACH_FUNCTION (node
)
3902 ipa_print_node_params (f
, node
);
3905 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3908 ipa_get_vector_of_formal_parms (tree fndecl
)
3914 gcc_assert (!flag_wpa
);
3915 count
= count_formal_params (fndecl
);
3916 args
.create (count
);
3917 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
3918 args
.quick_push (parm
);
3923 /* Return a heap allocated vector containing types of formal parameters of
3924 function type FNTYPE. */
3927 ipa_get_vector_of_formal_parm_types (tree fntype
)
3933 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
3936 types
.create (count
);
3937 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
3938 types
.quick_push (TREE_VALUE (t
));
3943 /* Modify the function declaration FNDECL and its type according to the plan in
3944 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3945 to reflect the actual parameters being modified which are determined by the
3946 base_index field. */
3949 ipa_modify_formal_parameters (tree fndecl
, ipa_parm_adjustment_vec adjustments
)
3951 vec
<tree
> oparms
= ipa_get_vector_of_formal_parms (fndecl
);
3952 tree orig_type
= TREE_TYPE (fndecl
);
3953 tree old_arg_types
= TYPE_ARG_TYPES (orig_type
);
3955 /* The following test is an ugly hack, some functions simply don't have any
3956 arguments in their type. This is probably a bug but well... */
3957 bool care_for_types
= (old_arg_types
!= NULL_TREE
);
3958 bool last_parm_void
;
3962 last_parm_void
= (TREE_VALUE (tree_last (old_arg_types
))
3964 otypes
= ipa_get_vector_of_formal_parm_types (orig_type
);
3966 gcc_assert (oparms
.length () + 1 == otypes
.length ());
3968 gcc_assert (oparms
.length () == otypes
.length ());
3972 last_parm_void
= false;
3976 int len
= adjustments
.length ();
3977 tree
*link
= &DECL_ARGUMENTS (fndecl
);
3978 tree new_arg_types
= NULL
;
3979 for (int i
= 0; i
< len
; i
++)
3981 struct ipa_parm_adjustment
*adj
;
3984 adj
= &adjustments
[i
];
3986 if (adj
->op
== IPA_PARM_OP_NEW
)
3989 parm
= oparms
[adj
->base_index
];
3992 if (adj
->op
== IPA_PARM_OP_COPY
)
3995 new_arg_types
= tree_cons (NULL_TREE
, otypes
[adj
->base_index
],
3998 link
= &DECL_CHAIN (parm
);
4000 else if (adj
->op
!= IPA_PARM_OP_REMOVE
)
4006 ptype
= build_pointer_type (adj
->type
);
4010 if (is_gimple_reg_type (ptype
))
4012 unsigned malign
= GET_MODE_ALIGNMENT (TYPE_MODE (ptype
));
4013 if (TYPE_ALIGN (ptype
) < malign
)
4014 ptype
= build_aligned_type (ptype
, malign
);
4019 new_arg_types
= tree_cons (NULL_TREE
, ptype
, new_arg_types
);
4021 new_parm
= build_decl (UNKNOWN_LOCATION
, PARM_DECL
, NULL_TREE
,
4023 const char *prefix
= adj
->arg_prefix
? adj
->arg_prefix
: "SYNTH";
4024 DECL_NAME (new_parm
) = create_tmp_var_name (prefix
);
4025 DECL_ARTIFICIAL (new_parm
) = 1;
4026 DECL_ARG_TYPE (new_parm
) = ptype
;
4027 DECL_CONTEXT (new_parm
) = fndecl
;
4028 TREE_USED (new_parm
) = 1;
4029 DECL_IGNORED_P (new_parm
) = 1;
4030 layout_decl (new_parm
, 0);
4032 if (adj
->op
== IPA_PARM_OP_NEW
)
4036 adj
->new_decl
= new_parm
;
4039 link
= &DECL_CHAIN (new_parm
);
4045 tree new_reversed
= NULL
;
4048 new_reversed
= nreverse (new_arg_types
);
4052 TREE_CHAIN (new_arg_types
) = void_list_node
;
4054 new_reversed
= void_list_node
;
4058 /* Use copy_node to preserve as much as possible from original type
4059 (debug info, attribute lists etc.)
4060 Exception is METHOD_TYPEs must have THIS argument.
4061 When we are asked to remove it, we need to build new FUNCTION_TYPE
4063 tree new_type
= NULL
;
4064 if (TREE_CODE (orig_type
) != METHOD_TYPE
4065 || (adjustments
[0].op
== IPA_PARM_OP_COPY
4066 && adjustments
[0].base_index
== 0))
4068 new_type
= build_distinct_type_copy (orig_type
);
4069 TYPE_ARG_TYPES (new_type
) = new_reversed
;
4074 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type
),
4076 TYPE_CONTEXT (new_type
) = TYPE_CONTEXT (orig_type
);
4077 DECL_VINDEX (fndecl
) = NULL_TREE
;
4080 /* When signature changes, we need to clear builtin info. */
4081 if (DECL_BUILT_IN (fndecl
))
4083 DECL_BUILT_IN_CLASS (fndecl
) = NOT_BUILT_IN
;
4084 DECL_FUNCTION_CODE (fndecl
) = (enum built_in_function
) 0;
4087 TREE_TYPE (fndecl
) = new_type
;
4088 DECL_VIRTUAL_P (fndecl
) = 0;
4089 DECL_LANG_SPECIFIC (fndecl
) = NULL
;
4094 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
4095 If this is a directly recursive call, CS must be NULL. Otherwise it must
4096 contain the corresponding call graph edge. */
4099 ipa_modify_call_arguments (struct cgraph_edge
*cs
, gimple stmt
,
4100 ipa_parm_adjustment_vec adjustments
)
4102 struct cgraph_node
*current_node
= cgraph_node::get (current_function_decl
);
4104 vec
<tree
, va_gc
> **debug_args
= NULL
;
4106 gimple_stmt_iterator gsi
, prev_gsi
;
4110 len
= adjustments
.length ();
4112 callee_decl
= !cs
? gimple_call_fndecl (stmt
) : cs
->callee
->decl
;
4113 current_node
->remove_stmt_references (stmt
);
4115 gsi
= gsi_for_stmt (stmt
);
4117 gsi_prev (&prev_gsi
);
4118 for (i
= 0; i
< len
; i
++)
4120 struct ipa_parm_adjustment
*adj
;
4122 adj
= &adjustments
[i
];
4124 if (adj
->op
== IPA_PARM_OP_COPY
)
4126 tree arg
= gimple_call_arg (stmt
, adj
->base_index
);
4128 vargs
.quick_push (arg
);
4130 else if (adj
->op
!= IPA_PARM_OP_REMOVE
)
4132 tree expr
, base
, off
;
4134 unsigned int deref_align
= 0;
4135 bool deref_base
= false;
4137 /* We create a new parameter out of the value of the old one, we can
4138 do the following kind of transformations:
4140 - A scalar passed by reference is converted to a scalar passed by
4141 value. (adj->by_ref is false and the type of the original
4142 actual argument is a pointer to a scalar).
4144 - A part of an aggregate is passed instead of the whole aggregate.
4145 The part can be passed either by value or by reference, this is
4146 determined by value of adj->by_ref. Moreover, the code below
4147 handles both situations when the original aggregate is passed by
4148 value (its type is not a pointer) and when it is passed by
4149 reference (it is a pointer to an aggregate).
4151 When the new argument is passed by reference (adj->by_ref is true)
4152 it must be a part of an aggregate and therefore we form it by
4153 simply taking the address of a reference inside the original
4156 gcc_checking_assert (adj
->offset
% BITS_PER_UNIT
== 0);
4157 base
= gimple_call_arg (stmt
, adj
->base_index
);
4158 loc
= DECL_P (base
) ? DECL_SOURCE_LOCATION (base
)
4159 : EXPR_LOCATION (base
);
4161 if (TREE_CODE (base
) != ADDR_EXPR
4162 && POINTER_TYPE_P (TREE_TYPE (base
)))
4163 off
= build_int_cst (adj
->alias_ptr_type
,
4164 adj
->offset
/ BITS_PER_UNIT
);
4167 HOST_WIDE_INT base_offset
;
4171 if (TREE_CODE (base
) == ADDR_EXPR
)
4173 base
= TREE_OPERAND (base
, 0);
4179 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
4180 /* Aggregate arguments can have non-invariant addresses. */
4183 base
= build_fold_addr_expr (prev_base
);
4184 off
= build_int_cst (adj
->alias_ptr_type
,
4185 adj
->offset
/ BITS_PER_UNIT
);
4187 else if (TREE_CODE (base
) == MEM_REF
)
4192 deref_align
= TYPE_ALIGN (TREE_TYPE (base
));
4194 off
= build_int_cst (adj
->alias_ptr_type
,
4196 + adj
->offset
/ BITS_PER_UNIT
);
4197 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1),
4199 base
= TREE_OPERAND (base
, 0);
4203 off
= build_int_cst (adj
->alias_ptr_type
,
4205 + adj
->offset
/ BITS_PER_UNIT
);
4206 base
= build_fold_addr_expr (base
);
4212 tree type
= adj
->type
;
4214 unsigned HOST_WIDE_INT misalign
;
4218 align
= deref_align
;
4223 get_pointer_alignment_1 (base
, &align
, &misalign
);
4224 if (TYPE_ALIGN (type
) > align
)
4225 align
= TYPE_ALIGN (type
);
4227 misalign
+= (offset_int::from (off
, SIGNED
).to_short_addr ()
4229 misalign
= misalign
& (align
- 1);
4231 align
= (misalign
& -misalign
);
4232 if (align
< TYPE_ALIGN (type
))
4233 type
= build_aligned_type (type
, align
);
4234 base
= force_gimple_operand_gsi (&gsi
, base
,
4235 true, NULL
, true, GSI_SAME_STMT
);
4236 expr
= fold_build2_loc (loc
, MEM_REF
, type
, base
, off
);
4237 /* If expr is not a valid gimple call argument emit
4238 a load into a temporary. */
4239 if (is_gimple_reg_type (TREE_TYPE (expr
)))
4241 gimple tem
= gimple_build_assign (NULL_TREE
, expr
);
4242 if (gimple_in_ssa_p (cfun
))
4244 gimple_set_vuse (tem
, gimple_vuse (stmt
));
4245 expr
= make_ssa_name (TREE_TYPE (expr
), tem
);
4248 expr
= create_tmp_reg (TREE_TYPE (expr
), NULL
);
4249 gimple_assign_set_lhs (tem
, expr
);
4250 gsi_insert_before (&gsi
, tem
, GSI_SAME_STMT
);
4255 expr
= fold_build2_loc (loc
, MEM_REF
, adj
->type
, base
, off
);
4256 expr
= build_fold_addr_expr (expr
);
4257 expr
= force_gimple_operand_gsi (&gsi
, expr
,
4258 true, NULL
, true, GSI_SAME_STMT
);
4260 vargs
.quick_push (expr
);
4262 if (adj
->op
!= IPA_PARM_OP_COPY
&& MAY_HAVE_DEBUG_STMTS
)
4265 tree ddecl
= NULL_TREE
, origin
= DECL_ORIGIN (adj
->base
), arg
;
4268 arg
= gimple_call_arg (stmt
, adj
->base_index
);
4269 if (!useless_type_conversion_p (TREE_TYPE (origin
), TREE_TYPE (arg
)))
4271 if (!fold_convertible_p (TREE_TYPE (origin
), arg
))
4273 arg
= fold_convert_loc (gimple_location (stmt
),
4274 TREE_TYPE (origin
), arg
);
4276 if (debug_args
== NULL
)
4277 debug_args
= decl_debug_args_insert (callee_decl
);
4278 for (ix
= 0; vec_safe_iterate (*debug_args
, ix
, &ddecl
); ix
+= 2)
4279 if (ddecl
== origin
)
4281 ddecl
= (**debug_args
)[ix
+ 1];
4286 ddecl
= make_node (DEBUG_EXPR_DECL
);
4287 DECL_ARTIFICIAL (ddecl
) = 1;
4288 TREE_TYPE (ddecl
) = TREE_TYPE (origin
);
4289 DECL_MODE (ddecl
) = DECL_MODE (origin
);
4291 vec_safe_push (*debug_args
, origin
);
4292 vec_safe_push (*debug_args
, ddecl
);
4294 def_temp
= gimple_build_debug_bind (ddecl
, unshare_expr (arg
), stmt
);
4295 gsi_insert_before (&gsi
, def_temp
, GSI_SAME_STMT
);
4299 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4301 fprintf (dump_file
, "replacing stmt:");
4302 print_gimple_stmt (dump_file
, gsi_stmt (gsi
), 0, 0);
4305 new_stmt
= gimple_build_call_vec (callee_decl
, vargs
);
4307 if (gimple_call_lhs (stmt
))
4308 gimple_call_set_lhs (new_stmt
, gimple_call_lhs (stmt
));
4310 gimple_set_block (new_stmt
, gimple_block (stmt
));
4311 if (gimple_has_location (stmt
))
4312 gimple_set_location (new_stmt
, gimple_location (stmt
));
4313 gimple_call_set_chain (new_stmt
, gimple_call_chain (stmt
));
4314 gimple_call_copy_flags (new_stmt
, stmt
);
4315 if (gimple_in_ssa_p (cfun
))
4317 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
4318 if (gimple_vdef (stmt
))
4320 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
4321 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
4325 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4327 fprintf (dump_file
, "with stmt:");
4328 print_gimple_stmt (dump_file
, new_stmt
, 0, 0);
4329 fprintf (dump_file
, "\n");
4331 gsi_replace (&gsi
, new_stmt
, true);
4333 cs
->set_call_stmt (new_stmt
);
4336 current_node
->record_stmt_references (gsi_stmt (gsi
));
4339 while (gsi_stmt (gsi
) != gsi_stmt (prev_gsi
));
4342 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4343 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4344 specifies whether the function should care about type incompatibility the
4345 current and new expressions. If it is false, the function will leave
4346 incompatibility issues to the caller. Return true iff the expression
4350 ipa_modify_expr (tree
*expr
, bool convert
,
4351 ipa_parm_adjustment_vec adjustments
)
4353 struct ipa_parm_adjustment
*cand
4354 = ipa_get_adjustment_candidate (&expr
, &convert
, adjustments
, false);
4360 src
= build_simple_mem_ref (cand
->new_decl
);
4362 src
= cand
->new_decl
;
4364 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4366 fprintf (dump_file
, "About to replace expr ");
4367 print_generic_expr (dump_file
, *expr
, 0);
4368 fprintf (dump_file
, " with ");
4369 print_generic_expr (dump_file
, src
, 0);
4370 fprintf (dump_file
, "\n");
4373 if (convert
&& !useless_type_conversion_p (TREE_TYPE (*expr
), cand
->type
))
4375 tree vce
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (*expr
), src
);
4383 /* If T is an SSA_NAME, return NULL if it is not a default def or
4384 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4385 the base variable is always returned, regardless if it is a default
4386 def. Return T if it is not an SSA_NAME. */
4389 get_ssa_base_param (tree t
, bool ignore_default_def
)
4391 if (TREE_CODE (t
) == SSA_NAME
)
4393 if (ignore_default_def
|| SSA_NAME_IS_DEFAULT_DEF (t
))
4394 return SSA_NAME_VAR (t
);
4401 /* Given an expression, return an adjustment entry specifying the
4402 transformation to be done on EXPR. If no suitable adjustment entry
4403 was found, returns NULL.
4405 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4406 default def, otherwise bail on them.
4408 If CONVERT is non-NULL, this function will set *CONVERT if the
4409 expression provided is a component reference. ADJUSTMENTS is the
4410 adjustments vector. */
4412 ipa_parm_adjustment
*
4413 ipa_get_adjustment_candidate (tree
**expr
, bool *convert
,
4414 ipa_parm_adjustment_vec adjustments
,
4415 bool ignore_default_def
)
4417 if (TREE_CODE (**expr
) == BIT_FIELD_REF
4418 || TREE_CODE (**expr
) == IMAGPART_EXPR
4419 || TREE_CODE (**expr
) == REALPART_EXPR
)
4421 *expr
= &TREE_OPERAND (**expr
, 0);
4426 HOST_WIDE_INT offset
, size
, max_size
;
4427 tree base
= get_ref_base_and_extent (**expr
, &offset
, &size
, &max_size
);
4428 if (!base
|| size
== -1 || max_size
== -1)
4431 if (TREE_CODE (base
) == MEM_REF
)
4433 offset
+= mem_ref_offset (base
).to_short_addr () * BITS_PER_UNIT
;
4434 base
= TREE_OPERAND (base
, 0);
4437 base
= get_ssa_base_param (base
, ignore_default_def
);
4438 if (!base
|| TREE_CODE (base
) != PARM_DECL
)
4441 struct ipa_parm_adjustment
*cand
= NULL
;
4442 unsigned int len
= adjustments
.length ();
4443 for (unsigned i
= 0; i
< len
; i
++)
4445 struct ipa_parm_adjustment
*adj
= &adjustments
[i
];
4447 if (adj
->base
== base
4448 && (adj
->offset
== offset
|| adj
->op
== IPA_PARM_OP_REMOVE
))
4455 if (!cand
|| cand
->op
== IPA_PARM_OP_COPY
|| cand
->op
== IPA_PARM_OP_REMOVE
)
4460 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4463 index_in_adjustments_multiple_times_p (int base_index
,
4464 ipa_parm_adjustment_vec adjustments
)
4466 int i
, len
= adjustments
.length ();
4469 for (i
= 0; i
< len
; i
++)
4471 struct ipa_parm_adjustment
*adj
;
4472 adj
= &adjustments
[i
];
4474 if (adj
->base_index
== base_index
)
4486 /* Return adjustments that should have the same effect on function parameters
4487 and call arguments as if they were first changed according to adjustments in
4488 INNER and then by adjustments in OUTER. */
4490 ipa_parm_adjustment_vec
4491 ipa_combine_adjustments (ipa_parm_adjustment_vec inner
,
4492 ipa_parm_adjustment_vec outer
)
4494 int i
, outlen
= outer
.length ();
4495 int inlen
= inner
.length ();
4497 ipa_parm_adjustment_vec adjustments
, tmp
;
4500 for (i
= 0; i
< inlen
; i
++)
4502 struct ipa_parm_adjustment
*n
;
4505 if (n
->op
== IPA_PARM_OP_REMOVE
)
4509 /* FIXME: Handling of new arguments are not implemented yet. */
4510 gcc_assert (n
->op
!= IPA_PARM_OP_NEW
);
4511 tmp
.quick_push (*n
);
4515 adjustments
.create (outlen
+ removals
);
4516 for (i
= 0; i
< outlen
; i
++)
4518 struct ipa_parm_adjustment r
;
4519 struct ipa_parm_adjustment
*out
= &outer
[i
];
4520 struct ipa_parm_adjustment
*in
= &tmp
[out
->base_index
];
4522 memset (&r
, 0, sizeof (r
));
4523 gcc_assert (in
->op
!= IPA_PARM_OP_REMOVE
);
4524 if (out
->op
== IPA_PARM_OP_REMOVE
)
4526 if (!index_in_adjustments_multiple_times_p (in
->base_index
, tmp
))
4528 r
.op
= IPA_PARM_OP_REMOVE
;
4529 adjustments
.quick_push (r
);
4535 /* FIXME: Handling of new arguments are not implemented yet. */
4536 gcc_assert (out
->op
!= IPA_PARM_OP_NEW
);
4539 r
.base_index
= in
->base_index
;
4542 /* FIXME: Create nonlocal value too. */
4544 if (in
->op
== IPA_PARM_OP_COPY
&& out
->op
== IPA_PARM_OP_COPY
)
4545 r
.op
= IPA_PARM_OP_COPY
;
4546 else if (in
->op
== IPA_PARM_OP_COPY
)
4547 r
.offset
= out
->offset
;
4548 else if (out
->op
== IPA_PARM_OP_COPY
)
4549 r
.offset
= in
->offset
;
4551 r
.offset
= in
->offset
+ out
->offset
;
4552 adjustments
.quick_push (r
);
4555 for (i
= 0; i
< inlen
; i
++)
4557 struct ipa_parm_adjustment
*n
= &inner
[i
];
4559 if (n
->op
== IPA_PARM_OP_REMOVE
)
4560 adjustments
.quick_push (*n
);
4567 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4568 friendly way, assuming they are meant to be applied to FNDECL. */
4571 ipa_dump_param_adjustments (FILE *file
, ipa_parm_adjustment_vec adjustments
,
4574 int i
, len
= adjustments
.length ();
4576 vec
<tree
> parms
= ipa_get_vector_of_formal_parms (fndecl
);
4578 fprintf (file
, "IPA param adjustments: ");
4579 for (i
= 0; i
< len
; i
++)
4581 struct ipa_parm_adjustment
*adj
;
4582 adj
= &adjustments
[i
];
4585 fprintf (file
, " ");
4589 fprintf (file
, "%i. base_index: %i - ", i
, adj
->base_index
);
4590 print_generic_expr (file
, parms
[adj
->base_index
], 0);
4593 fprintf (file
, ", base: ");
4594 print_generic_expr (file
, adj
->base
, 0);
4598 fprintf (file
, ", new_decl: ");
4599 print_generic_expr (file
, adj
->new_decl
, 0);
4601 if (adj
->new_ssa_base
)
4603 fprintf (file
, ", new_ssa_base: ");
4604 print_generic_expr (file
, adj
->new_ssa_base
, 0);
4607 if (adj
->op
== IPA_PARM_OP_COPY
)
4608 fprintf (file
, ", copy_param");
4609 else if (adj
->op
== IPA_PARM_OP_REMOVE
)
4610 fprintf (file
, ", remove_param");
4612 fprintf (file
, ", offset %li", (long) adj
->offset
);
4614 fprintf (file
, ", by_ref");
4615 print_node_brief (file
, ", type: ", adj
->type
, 0);
4616 fprintf (file
, "\n");
4621 /* Dump the AV linked list. */
4624 ipa_dump_agg_replacement_values (FILE *f
, struct ipa_agg_replacement_value
*av
)
4627 fprintf (f
, " Aggregate replacements:");
4628 for (; av
; av
= av
->next
)
4630 fprintf (f
, "%s %i[" HOST_WIDE_INT_PRINT_DEC
"]=", comma
? "," : "",
4631 av
->index
, av
->offset
);
4632 print_generic_expr (f
, av
->value
, 0);
4638 /* Stream out jump function JUMP_FUNC to OB. */
4641 ipa_write_jump_function (struct output_block
*ob
,
4642 struct ipa_jump_func
*jump_func
)
4644 struct ipa_agg_jf_item
*item
;
4645 struct bitpack_d bp
;
4648 streamer_write_uhwi (ob
, jump_func
->type
);
4649 switch (jump_func
->type
)
4651 case IPA_JF_UNKNOWN
:
4653 case IPA_JF_KNOWN_TYPE
:
4654 streamer_write_uhwi (ob
, jump_func
->value
.known_type
.offset
);
4655 stream_write_tree (ob
, jump_func
->value
.known_type
.base_type
, true);
4656 stream_write_tree (ob
, jump_func
->value
.known_type
.component_type
, true);
4660 EXPR_LOCATION (jump_func
->value
.constant
.value
) == UNKNOWN_LOCATION
);
4661 stream_write_tree (ob
, jump_func
->value
.constant
.value
, true);
4663 case IPA_JF_PASS_THROUGH
:
4664 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.operation
);
4665 if (jump_func
->value
.pass_through
.operation
== NOP_EXPR
)
4667 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4668 bp
= bitpack_create (ob
->main_stream
);
4669 bp_pack_value (&bp
, jump_func
->value
.pass_through
.agg_preserved
, 1);
4670 bp_pack_value (&bp
, jump_func
->value
.pass_through
.type_preserved
, 1);
4671 streamer_write_bitpack (&bp
);
4675 stream_write_tree (ob
, jump_func
->value
.pass_through
.operand
, true);
4676 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4679 case IPA_JF_ANCESTOR
:
4680 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.offset
);
4681 stream_write_tree (ob
, jump_func
->value
.ancestor
.type
, true);
4682 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.formal_id
);
4683 bp
= bitpack_create (ob
->main_stream
);
4684 bp_pack_value (&bp
, jump_func
->value
.ancestor
.agg_preserved
, 1);
4685 bp_pack_value (&bp
, jump_func
->value
.ancestor
.type_preserved
, 1);
4686 streamer_write_bitpack (&bp
);
4690 count
= vec_safe_length (jump_func
->agg
.items
);
4691 streamer_write_uhwi (ob
, count
);
4694 bp
= bitpack_create (ob
->main_stream
);
4695 bp_pack_value (&bp
, jump_func
->agg
.by_ref
, 1);
4696 streamer_write_bitpack (&bp
);
4699 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, i
, item
)
4701 streamer_write_uhwi (ob
, item
->offset
);
4702 stream_write_tree (ob
, item
->value
, true);
4706 /* Read in jump function JUMP_FUNC from IB. */
4709 ipa_read_jump_function (struct lto_input_block
*ib
,
4710 struct ipa_jump_func
*jump_func
,
4711 struct cgraph_edge
*cs
,
4712 struct data_in
*data_in
)
4714 enum jump_func_type jftype
;
4715 enum tree_code operation
;
4718 jftype
= (enum jump_func_type
) streamer_read_uhwi (ib
);
4721 case IPA_JF_UNKNOWN
:
4722 jump_func
->type
= IPA_JF_UNKNOWN
;
4724 case IPA_JF_KNOWN_TYPE
:
4726 HOST_WIDE_INT offset
= streamer_read_uhwi (ib
);
4727 tree base_type
= stream_read_tree (ib
, data_in
);
4728 tree component_type
= stream_read_tree (ib
, data_in
);
4730 ipa_set_jf_known_type (jump_func
, offset
, base_type
, component_type
);
4734 ipa_set_jf_constant (jump_func
, stream_read_tree (ib
, data_in
), cs
);
4736 case IPA_JF_PASS_THROUGH
:
4737 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
4738 if (operation
== NOP_EXPR
)
4740 int formal_id
= streamer_read_uhwi (ib
);
4741 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4742 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4743 bool type_preserved
= bp_unpack_value (&bp
, 1);
4744 ipa_set_jf_simple_pass_through (jump_func
, formal_id
, agg_preserved
,
4749 tree operand
= stream_read_tree (ib
, data_in
);
4750 int formal_id
= streamer_read_uhwi (ib
);
4751 ipa_set_jf_arith_pass_through (jump_func
, formal_id
, operand
,
4755 case IPA_JF_ANCESTOR
:
4757 HOST_WIDE_INT offset
= streamer_read_uhwi (ib
);
4758 tree type
= stream_read_tree (ib
, data_in
);
4759 int formal_id
= streamer_read_uhwi (ib
);
4760 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4761 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4762 bool type_preserved
= bp_unpack_value (&bp
, 1);
4764 ipa_set_ancestor_jf (jump_func
, offset
, type
, formal_id
, agg_preserved
,
4770 count
= streamer_read_uhwi (ib
);
4771 vec_alloc (jump_func
->agg
.items
, count
);
4774 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4775 jump_func
->agg
.by_ref
= bp_unpack_value (&bp
, 1);
4777 for (i
= 0; i
< count
; i
++)
4779 struct ipa_agg_jf_item item
;
4780 item
.offset
= streamer_read_uhwi (ib
);
4781 item
.value
= stream_read_tree (ib
, data_in
);
4782 jump_func
->agg
.items
->quick_push (item
);
4786 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4787 relevant to indirect inlining to OB. */
4790 ipa_write_indirect_edge_info (struct output_block
*ob
,
4791 struct cgraph_edge
*cs
)
4793 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4794 struct bitpack_d bp
;
4796 streamer_write_hwi (ob
, ii
->param_index
);
4797 bp
= bitpack_create (ob
->main_stream
);
4798 bp_pack_value (&bp
, ii
->polymorphic
, 1);
4799 bp_pack_value (&bp
, ii
->agg_contents
, 1);
4800 bp_pack_value (&bp
, ii
->member_ptr
, 1);
4801 bp_pack_value (&bp
, ii
->by_ref
, 1);
4802 streamer_write_bitpack (&bp
);
4803 if (ii
->agg_contents
|| ii
->polymorphic
)
4804 streamer_write_hwi (ob
, ii
->offset
);
4806 gcc_assert (ii
->offset
== 0);
4808 if (ii
->polymorphic
)
4810 streamer_write_hwi (ob
, ii
->otr_token
);
4811 stream_write_tree (ob
, ii
->otr_type
, true);
4812 ii
->context
.stream_out (ob
);
4816 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4817 relevant to indirect inlining from IB. */
4820 ipa_read_indirect_edge_info (struct lto_input_block
*ib
,
4821 struct data_in
*data_in
,
4822 struct cgraph_edge
*cs
)
4824 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4825 struct bitpack_d bp
;
4827 ii
->param_index
= (int) streamer_read_hwi (ib
);
4828 bp
= streamer_read_bitpack (ib
);
4829 ii
->polymorphic
= bp_unpack_value (&bp
, 1);
4830 ii
->agg_contents
= bp_unpack_value (&bp
, 1);
4831 ii
->member_ptr
= bp_unpack_value (&bp
, 1);
4832 ii
->by_ref
= bp_unpack_value (&bp
, 1);
4833 if (ii
->agg_contents
|| ii
->polymorphic
)
4834 ii
->offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4837 if (ii
->polymorphic
)
4839 ii
->otr_token
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4840 ii
->otr_type
= stream_read_tree (ib
, data_in
);
4841 ii
->context
.stream_in (ib
, data_in
);
4845 /* Stream out NODE info to OB. */
4848 ipa_write_node_info (struct output_block
*ob
, struct cgraph_node
*node
)
4851 lto_symtab_encoder_t encoder
;
4852 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
4854 struct cgraph_edge
*e
;
4855 struct bitpack_d bp
;
4857 encoder
= ob
->decl_state
->symtab_node_encoder
;
4858 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4859 streamer_write_uhwi (ob
, node_ref
);
4861 streamer_write_uhwi (ob
, ipa_get_param_count (info
));
4862 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4863 streamer_write_uhwi (ob
, ipa_get_param_move_cost (info
, j
));
4864 bp
= bitpack_create (ob
->main_stream
);
4865 gcc_assert (info
->analysis_done
4866 || ipa_get_param_count (info
) == 0);
4867 gcc_assert (!info
->node_enqueued
);
4868 gcc_assert (!info
->ipcp_orig_node
);
4869 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4870 bp_pack_value (&bp
, ipa_is_param_used (info
, j
), 1);
4871 streamer_write_bitpack (&bp
);
4872 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4873 streamer_write_hwi (ob
, ipa_get_controlled_uses (info
, j
));
4874 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4876 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4878 streamer_write_uhwi (ob
,
4879 ipa_get_cs_argument_count (args
) * 2
4880 + (args
->polymorphic_call_contexts
!= NULL
));
4881 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4883 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4884 if (args
->polymorphic_call_contexts
!= NULL
)
4885 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
4888 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4890 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4892 streamer_write_uhwi (ob
,
4893 ipa_get_cs_argument_count (args
) * 2
4894 + (args
->polymorphic_call_contexts
!= NULL
));
4895 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4897 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4898 if (args
->polymorphic_call_contexts
!= NULL
)
4899 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
4901 ipa_write_indirect_edge_info (ob
, e
);
4905 /* Stream in NODE info from IB. */
4908 ipa_read_node_info (struct lto_input_block
*ib
, struct cgraph_node
*node
,
4909 struct data_in
*data_in
)
4911 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
4913 struct cgraph_edge
*e
;
4914 struct bitpack_d bp
;
4916 ipa_alloc_node_params (node
, streamer_read_uhwi (ib
));
4918 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4919 info
->descriptors
[k
].move_cost
= streamer_read_uhwi (ib
);
4921 bp
= streamer_read_bitpack (ib
);
4922 if (ipa_get_param_count (info
) != 0)
4923 info
->analysis_done
= true;
4924 info
->node_enqueued
= false;
4925 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4926 ipa_set_param_used (info
, k
, bp_unpack_value (&bp
, 1));
4927 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4928 ipa_set_controlled_uses (info
, k
, streamer_read_hwi (ib
));
4929 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4931 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4932 int count
= streamer_read_uhwi (ib
);
4933 bool contexts_computed
= count
& 1;
4938 vec_safe_grow_cleared (args
->jump_functions
, count
);
4939 if (contexts_computed
)
4940 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
);
4942 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
4944 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4946 if (contexts_computed
)
4947 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in (ib
, data_in
);
4950 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4952 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4953 int count
= streamer_read_uhwi (ib
);
4954 bool contexts_computed
= count
& 1;
4959 vec_safe_grow_cleared (args
->jump_functions
, count
);
4960 if (contexts_computed
)
4961 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
);
4962 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
4964 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4966 if (contexts_computed
)
4967 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in (ib
, data_in
);
4970 ipa_read_indirect_edge_info (ib
, data_in
, e
);
4974 /* Write jump functions for nodes in SET. */
4977 ipa_prop_write_jump_functions (void)
4979 struct cgraph_node
*node
;
4980 struct output_block
*ob
;
4981 unsigned int count
= 0;
4982 lto_symtab_encoder_iterator lsei
;
4983 lto_symtab_encoder_t encoder
;
4986 if (!ipa_node_params_vector
.exists ())
4989 ob
= create_output_block (LTO_section_jump_functions
);
4990 encoder
= ob
->decl_state
->symtab_node_encoder
;
4992 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4993 lsei_next_function_in_partition (&lsei
))
4995 node
= lsei_cgraph_node (lsei
);
4996 if (node
->has_gimple_body_p ()
4997 && IPA_NODE_REF (node
) != NULL
)
5001 streamer_write_uhwi (ob
, count
);
5003 /* Process all of the functions. */
5004 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5005 lsei_next_function_in_partition (&lsei
))
5007 node
= lsei_cgraph_node (lsei
);
5008 if (node
->has_gimple_body_p ()
5009 && IPA_NODE_REF (node
) != NULL
)
5010 ipa_write_node_info (ob
, node
);
5012 streamer_write_char_stream (ob
->main_stream
, 0);
5013 produce_asm (ob
, NULL
);
5014 destroy_output_block (ob
);
5017 /* Read section in file FILE_DATA of length LEN with data DATA. */
5020 ipa_prop_read_section (struct lto_file_decl_data
*file_data
, const char *data
,
5023 const struct lto_function_header
*header
=
5024 (const struct lto_function_header
*) data
;
5025 const int cfg_offset
= sizeof (struct lto_function_header
);
5026 const int main_offset
= cfg_offset
+ header
->cfg_size
;
5027 const int string_offset
= main_offset
+ header
->main_size
;
5028 struct data_in
*data_in
;
5032 lto_input_block
ib_main ((const char *) data
+ main_offset
,
5036 lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
5037 header
->string_size
, vNULL
);
5038 count
= streamer_read_uhwi (&ib_main
);
5040 for (i
= 0; i
< count
; i
++)
5043 struct cgraph_node
*node
;
5044 lto_symtab_encoder_t encoder
;
5046 index
= streamer_read_uhwi (&ib_main
);
5047 encoder
= file_data
->symtab_node_encoder
;
5048 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
5050 gcc_assert (node
->definition
);
5051 ipa_read_node_info (&ib_main
, node
, data_in
);
5053 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
5055 lto_data_in_delete (data_in
);
5058 /* Read ipcp jump functions. */
5061 ipa_prop_read_jump_functions (void)
5063 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
5064 struct lto_file_decl_data
*file_data
;
5067 ipa_check_create_node_params ();
5068 ipa_check_create_edge_args ();
5069 ipa_register_cgraph_hooks ();
5071 while ((file_data
= file_data_vec
[j
++]))
5074 const char *data
= lto_get_section_data (file_data
, LTO_section_jump_functions
, NULL
, &len
);
5077 ipa_prop_read_section (file_data
, data
, len
);
5081 /* After merging units, we can get mismatch in argument counts.
5082 Also decl merging might've rendered parameter lists obsolete.
5083 Also compute called_with_variable_arg info. */
5086 ipa_update_after_lto_read (void)
5088 ipa_check_create_node_params ();
5089 ipa_check_create_edge_args ();
5093 write_agg_replacement_chain (struct output_block
*ob
, struct cgraph_node
*node
)
5096 unsigned int count
= 0;
5097 lto_symtab_encoder_t encoder
;
5098 struct ipa_agg_replacement_value
*aggvals
, *av
;
5100 aggvals
= ipa_get_agg_replacements_for_node (node
);
5101 encoder
= ob
->decl_state
->symtab_node_encoder
;
5102 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
5103 streamer_write_uhwi (ob
, node_ref
);
5105 for (av
= aggvals
; av
; av
= av
->next
)
5107 streamer_write_uhwi (ob
, count
);
5109 for (av
= aggvals
; av
; av
= av
->next
)
5111 struct bitpack_d bp
;
5113 streamer_write_uhwi (ob
, av
->offset
);
5114 streamer_write_uhwi (ob
, av
->index
);
5115 stream_write_tree (ob
, av
->value
, true);
5117 bp
= bitpack_create (ob
->main_stream
);
5118 bp_pack_value (&bp
, av
->by_ref
, 1);
5119 streamer_write_bitpack (&bp
);
5123 /* Stream in the aggregate value replacement chain for NODE from IB. */
5126 read_agg_replacement_chain (struct lto_input_block
*ib
,
5127 struct cgraph_node
*node
,
5128 struct data_in
*data_in
)
5130 struct ipa_agg_replacement_value
*aggvals
= NULL
;
5131 unsigned int count
, i
;
5133 count
= streamer_read_uhwi (ib
);
5134 for (i
= 0; i
<count
; i
++)
5136 struct ipa_agg_replacement_value
*av
;
5137 struct bitpack_d bp
;
5139 av
= ggc_alloc
<ipa_agg_replacement_value
> ();
5140 av
->offset
= streamer_read_uhwi (ib
);
5141 av
->index
= streamer_read_uhwi (ib
);
5142 av
->value
= stream_read_tree (ib
, data_in
);
5143 bp
= streamer_read_bitpack (ib
);
5144 av
->by_ref
= bp_unpack_value (&bp
, 1);
5148 ipa_set_node_agg_value_chain (node
, aggvals
);
5151 /* Write all aggregate replacement for nodes in set. */
5154 ipa_prop_write_all_agg_replacement (void)
5156 struct cgraph_node
*node
;
5157 struct output_block
*ob
;
5158 unsigned int count
= 0;
5159 lto_symtab_encoder_iterator lsei
;
5160 lto_symtab_encoder_t encoder
;
5162 if (!ipa_node_agg_replacements
)
5165 ob
= create_output_block (LTO_section_ipcp_transform
);
5166 encoder
= ob
->decl_state
->symtab_node_encoder
;
5168 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5169 lsei_next_function_in_partition (&lsei
))
5171 node
= lsei_cgraph_node (lsei
);
5172 if (node
->has_gimple_body_p ()
5173 && ipa_get_agg_replacements_for_node (node
) != NULL
)
5177 streamer_write_uhwi (ob
, count
);
5179 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5180 lsei_next_function_in_partition (&lsei
))
5182 node
= lsei_cgraph_node (lsei
);
5183 if (node
->has_gimple_body_p ()
5184 && ipa_get_agg_replacements_for_node (node
) != NULL
)
5185 write_agg_replacement_chain (ob
, node
);
5187 streamer_write_char_stream (ob
->main_stream
, 0);
5188 produce_asm (ob
, NULL
);
5189 destroy_output_block (ob
);
5192 /* Read replacements section in file FILE_DATA of length LEN with data
5196 read_replacements_section (struct lto_file_decl_data
*file_data
,
5200 const struct lto_function_header
*header
=
5201 (const struct lto_function_header
*) data
;
5202 const int cfg_offset
= sizeof (struct lto_function_header
);
5203 const int main_offset
= cfg_offset
+ header
->cfg_size
;
5204 const int string_offset
= main_offset
+ header
->main_size
;
5205 struct data_in
*data_in
;
5209 lto_input_block
ib_main ((const char *) data
+ main_offset
,
5212 data_in
= lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
5213 header
->string_size
, vNULL
);
5214 count
= streamer_read_uhwi (&ib_main
);
5216 for (i
= 0; i
< count
; i
++)
5219 struct cgraph_node
*node
;
5220 lto_symtab_encoder_t encoder
;
5222 index
= streamer_read_uhwi (&ib_main
);
5223 encoder
= file_data
->symtab_node_encoder
;
5224 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
5226 gcc_assert (node
->definition
);
5227 read_agg_replacement_chain (&ib_main
, node
, data_in
);
5229 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
5231 lto_data_in_delete (data_in
);
5234 /* Read IPA-CP aggregate replacements. */
5237 ipa_prop_read_all_agg_replacement (void)
5239 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
5240 struct lto_file_decl_data
*file_data
;
5243 while ((file_data
= file_data_vec
[j
++]))
5246 const char *data
= lto_get_section_data (file_data
,
5247 LTO_section_ipcp_transform
,
5250 read_replacements_section (file_data
, data
, len
);
5254 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5258 adjust_agg_replacement_values (struct cgraph_node
*node
,
5259 struct ipa_agg_replacement_value
*aggval
)
5261 struct ipa_agg_replacement_value
*v
;
5262 int i
, c
= 0, d
= 0, *adj
;
5264 if (!node
->clone
.combined_args_to_skip
)
5267 for (v
= aggval
; v
; v
= v
->next
)
5269 gcc_assert (v
->index
>= 0);
5275 adj
= XALLOCAVEC (int, c
);
5276 for (i
= 0; i
< c
; i
++)
5277 if (bitmap_bit_p (node
->clone
.combined_args_to_skip
, i
))
5285 for (v
= aggval
; v
; v
= v
->next
)
5286 v
->index
= adj
[v
->index
];
5289 /* Dominator walker driving the ipcp modification phase. */
5291 class ipcp_modif_dom_walker
: public dom_walker
5294 ipcp_modif_dom_walker (struct func_body_info
*fbi
,
5295 vec
<ipa_param_descriptor
> descs
,
5296 struct ipa_agg_replacement_value
*av
,
5298 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
), m_descriptors (descs
),
5299 m_aggval (av
), m_something_changed (sc
), m_cfg_changed (cc
) {}
5301 virtual void before_dom_children (basic_block
);
5304 struct func_body_info
*m_fbi
;
5305 vec
<ipa_param_descriptor
> m_descriptors
;
5306 struct ipa_agg_replacement_value
*m_aggval
;
5307 bool *m_something_changed
, *m_cfg_changed
;
5311 ipcp_modif_dom_walker::before_dom_children (basic_block bb
)
5313 gimple_stmt_iterator gsi
;
5314 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5316 struct ipa_agg_replacement_value
*v
;
5317 gimple stmt
= gsi_stmt (gsi
);
5319 HOST_WIDE_INT offset
, size
;
5323 if (!gimple_assign_load_p (stmt
))
5325 rhs
= gimple_assign_rhs1 (stmt
);
5326 if (!is_gimple_reg_type (TREE_TYPE (rhs
)))
5331 while (handled_component_p (t
))
5333 /* V_C_E can do things like convert an array of integers to one
5334 bigger integer and similar things we do not handle below. */
5335 if (TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
)
5340 t
= TREE_OPERAND (t
, 0);
5345 if (!ipa_load_from_parm_agg_1 (m_fbi
, m_descriptors
, stmt
, rhs
, &index
,
5346 &offset
, &size
, &by_ref
))
5348 for (v
= m_aggval
; v
; v
= v
->next
)
5349 if (v
->index
== index
5350 && v
->offset
== offset
)
5353 || v
->by_ref
!= by_ref
5354 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v
->value
))) != size
)
5357 gcc_checking_assert (is_gimple_ip_invariant (v
->value
));
5358 if (!useless_type_conversion_p (TREE_TYPE (rhs
), TREE_TYPE (v
->value
)))
5360 if (fold_convertible_p (TREE_TYPE (rhs
), v
->value
))
5361 val
= fold_build1 (NOP_EXPR
, TREE_TYPE (rhs
), v
->value
);
5362 else if (TYPE_SIZE (TREE_TYPE (rhs
))
5363 == TYPE_SIZE (TREE_TYPE (v
->value
)))
5364 val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (rhs
), v
->value
);
5369 fprintf (dump_file
, " const ");
5370 print_generic_expr (dump_file
, v
->value
, 0);
5371 fprintf (dump_file
, " can't be converted to type of ");
5372 print_generic_expr (dump_file
, rhs
, 0);
5373 fprintf (dump_file
, "\n");
5381 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5383 fprintf (dump_file
, "Modifying stmt:\n ");
5384 print_gimple_stmt (dump_file
, stmt
, 0, 0);
5386 gimple_assign_set_rhs_from_tree (&gsi
, val
);
5389 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5391 fprintf (dump_file
, "into:\n ");
5392 print_gimple_stmt (dump_file
, stmt
, 0, 0);
5393 fprintf (dump_file
, "\n");
5396 *m_something_changed
= true;
5397 if (maybe_clean_eh_stmt (stmt
)
5398 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
5399 *m_cfg_changed
= true;
5404 /* IPCP transformation phase doing propagation of aggregate values. */
5407 ipcp_transform_function (struct cgraph_node
*node
)
5409 vec
<ipa_param_descriptor
> descriptors
= vNULL
;
5410 struct func_body_info fbi
;
5411 struct ipa_agg_replacement_value
*aggval
;
5413 bool cfg_changed
= false, something_changed
= false;
5415 gcc_checking_assert (cfun
);
5416 gcc_checking_assert (current_function_decl
);
5419 fprintf (dump_file
, "Modification phase of node %s/%i\n",
5420 node
->name (), node
->order
);
5422 aggval
= ipa_get_agg_replacements_for_node (node
);
5425 param_count
= count_formal_params (node
->decl
);
5426 if (param_count
== 0)
5428 adjust_agg_replacement_values (node
, aggval
);
5430 ipa_dump_agg_replacement_values (dump_file
, aggval
);
5434 fbi
.bb_infos
= vNULL
;
5435 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
));
5436 fbi
.param_count
= param_count
;
5439 descriptors
.safe_grow_cleared (param_count
);
5440 ipa_populate_param_decls (node
, descriptors
);
5441 calculate_dominance_info (CDI_DOMINATORS
);
5442 ipcp_modif_dom_walker (&fbi
, descriptors
, aggval
, &something_changed
,
5443 &cfg_changed
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5446 struct ipa_bb_info
*bi
;
5447 FOR_EACH_VEC_ELT (fbi
.bb_infos
, i
, bi
)
5448 free_ipa_bb_info (bi
);
5449 fbi
.bb_infos
.release ();
5450 free_dominance_info (CDI_DOMINATORS
);
5451 (*ipa_node_agg_replacements
)[node
->uid
] = NULL
;
5452 descriptors
.release ();
5454 if (!something_changed
)
5456 else if (cfg_changed
)
5457 return TODO_update_ssa_only_virtuals
| TODO_cleanup_cfg
;
5459 return TODO_update_ssa_only_virtuals
;