1 /* Interprocedural analyses.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
24 #include "basic-block.h"
25 #include "tree-ssa-alias.h"
26 #include "internal-fn.h"
27 #include "gimple-fold.h"
29 #include "gimple-expr.h"
33 #include "stor-layout.h"
34 #include "print-tree.h"
36 #include "gimple-iterator.h"
37 #include "gimplify-me.h"
38 #include "gimple-walk.h"
39 #include "langhooks.h"
43 #include "gimple-ssa.h"
45 #include "tree-phinodes.h"
46 #include "ssa-iterators.h"
47 #include "tree-into-ssa.h"
49 #include "tree-pass.h"
50 #include "tree-inline.h"
51 #include "ipa-inline.h"
53 #include "diagnostic.h"
54 #include "gimple-pretty-print.h"
55 #include "lto-streamer.h"
56 #include "data-streamer.h"
57 #include "tree-streamer.h"
59 #include "ipa-utils.h"
60 #include "stringpool.h"
61 #include "tree-ssanames.h"
67 /* Intermediate information that we get from alias analysis about a particular
68 parameter in a particular basic_block. When a parameter or the memory it
69 references is marked modified, we use that information in all dominatd
70 blocks without cosulting alias analysis oracle. */
72 struct param_aa_status
74 /* Set when this structure contains meaningful information. If not, the
75 structure describing a dominating BB should be used instead. */
78 /* Whether we have seen something which might have modified the data in
79 question. PARM is for the parameter itself, REF is for data it points to
80 but using the alias type of individual accesses and PT is the same thing
81 but for computing aggregate pass-through functions using a very inclusive
83 bool parm_modified
, ref_modified
, pt_modified
;
86 /* Information related to a given BB that used only when looking at function
91 /* Call graph edges going out of this BB. */
92 vec
<cgraph_edge_p
> cg_edges
;
93 /* Alias analysis statuses of each formal parameter at this bb. */
94 vec
<param_aa_status
> param_aa_statuses
;
97 /* Structure with global information that is only used when looking at function
100 struct func_body_info
102 /* The node that is being analyzed. */
106 struct ipa_node_params
*info
;
108 /* Information about individual BBs. */
109 vec
<ipa_bb_info
> bb_infos
;
111 /* Number of parameters. */
114 /* Number of statements already walked by when analyzing this function. */
115 unsigned int aa_walked
;
118 /* Vector where the parameter infos are actually stored. */
119 vec
<ipa_node_params
> ipa_node_params_vector
;
120 /* Vector of known aggregate values in cloned nodes. */
121 vec
<ipa_agg_replacement_value_p
, va_gc
> *ipa_node_agg_replacements
;
122 /* Vector where the parameter infos are actually stored. */
123 vec
<ipa_edge_args
, va_gc
> *ipa_edge_args_vector
;
125 /* Holders of ipa cgraph hooks: */
126 static struct cgraph_edge_hook_list
*edge_removal_hook_holder
;
127 static struct cgraph_node_hook_list
*node_removal_hook_holder
;
128 static struct cgraph_2edge_hook_list
*edge_duplication_hook_holder
;
129 static struct cgraph_2node_hook_list
*node_duplication_hook_holder
;
130 static struct cgraph_node_hook_list
*function_insertion_hook_holder
;
132 /* Description of a reference to an IPA constant. */
133 struct ipa_cst_ref_desc
135 /* Edge that corresponds to the statement which took the reference. */
136 struct cgraph_edge
*cs
;
137 /* Linked list of duplicates created when call graph edges are cloned. */
138 struct ipa_cst_ref_desc
*next_duplicate
;
139 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
140 if out of control. */
144 /* Allocation pool for reference descriptions. */
146 static alloc_pool ipa_refdesc_pool
;
148 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
149 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
152 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node
*node
)
154 tree fs_opts
= DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node
->decl
);
155 struct cl_optimization
*os
;
159 os
= TREE_OPTIMIZATION (fs_opts
);
160 return !os
->x_optimize
|| !os
->x_flag_ipa_cp
;
163 /* Return index of the formal whose tree is PTREE in function which corresponds
167 ipa_get_param_decl_index_1 (vec
<ipa_param_descriptor
> descriptors
, tree ptree
)
171 count
= descriptors
.length ();
172 for (i
= 0; i
< count
; i
++)
173 if (descriptors
[i
].decl
== ptree
)
179 /* Return index of the formal whose tree is PTREE in function which corresponds
183 ipa_get_param_decl_index (struct ipa_node_params
*info
, tree ptree
)
185 return ipa_get_param_decl_index_1 (info
->descriptors
, ptree
);
188 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
192 ipa_populate_param_decls (struct cgraph_node
*node
,
193 vec
<ipa_param_descriptor
> &descriptors
)
201 gcc_assert (gimple_has_body_p (fndecl
));
202 fnargs
= DECL_ARGUMENTS (fndecl
);
204 for (parm
= fnargs
; parm
; parm
= DECL_CHAIN (parm
))
206 descriptors
[param_num
].decl
= parm
;
207 descriptors
[param_num
].move_cost
= estimate_move_cost (TREE_TYPE (parm
),
213 /* Return how many formal parameters FNDECL has. */
216 count_formal_params (tree fndecl
)
220 gcc_assert (gimple_has_body_p (fndecl
));
222 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
228 /* Return the declaration of Ith formal parameter of the function corresponding
229 to INFO. Note there is no setter function as this array is built just once
230 using ipa_initialize_node_params. */
233 ipa_dump_param (FILE *file
, struct ipa_node_params
*info
, int i
)
235 fprintf (file
, "param #%i", i
);
236 if (info
->descriptors
[i
].decl
)
239 print_generic_expr (file
, info
->descriptors
[i
].decl
, 0);
243 /* Initialize the ipa_node_params structure associated with NODE
244 to hold PARAM_COUNT parameters. */
247 ipa_alloc_node_params (struct cgraph_node
*node
, int param_count
)
249 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
251 if (!info
->descriptors
.exists () && param_count
)
252 info
->descriptors
.safe_grow_cleared (param_count
);
255 /* Initialize the ipa_node_params structure associated with NODE by counting
256 the function parameters, creating the descriptors and populating their
260 ipa_initialize_node_params (struct cgraph_node
*node
)
262 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
264 if (!info
->descriptors
.exists ())
266 ipa_alloc_node_params (node
, count_formal_params (node
->decl
));
267 ipa_populate_param_decls (node
, info
->descriptors
);
271 /* Print the jump functions associated with call graph edge CS to file F. */
274 ipa_print_node_jump_functions_for_edge (FILE *f
, struct cgraph_edge
*cs
)
278 count
= ipa_get_cs_argument_count (IPA_EDGE_REF (cs
));
279 for (i
= 0; i
< count
; i
++)
281 struct ipa_jump_func
*jump_func
;
282 enum jump_func_type type
;
284 jump_func
= ipa_get_ith_jump_func (IPA_EDGE_REF (cs
), i
);
285 type
= jump_func
->type
;
287 fprintf (f
, " param %d: ", i
);
288 if (type
== IPA_JF_UNKNOWN
)
289 fprintf (f
, "UNKNOWN\n");
290 else if (type
== IPA_JF_KNOWN_TYPE
)
292 fprintf (f
, "KNOWN TYPE: base ");
293 print_generic_expr (f
, jump_func
->value
.known_type
.base_type
, 0);
294 fprintf (f
, ", offset "HOST_WIDE_INT_PRINT_DEC
", component ",
295 jump_func
->value
.known_type
.offset
);
296 print_generic_expr (f
, jump_func
->value
.known_type
.component_type
, 0);
299 else if (type
== IPA_JF_CONST
)
301 tree val
= jump_func
->value
.constant
.value
;
302 fprintf (f
, "CONST: ");
303 print_generic_expr (f
, val
, 0);
304 if (TREE_CODE (val
) == ADDR_EXPR
305 && TREE_CODE (TREE_OPERAND (val
, 0)) == CONST_DECL
)
308 print_generic_expr (f
, DECL_INITIAL (TREE_OPERAND (val
, 0)),
313 else if (type
== IPA_JF_PASS_THROUGH
)
315 fprintf (f
, "PASS THROUGH: ");
316 fprintf (f
, "%d, op %s",
317 jump_func
->value
.pass_through
.formal_id
,
318 get_tree_code_name(jump_func
->value
.pass_through
.operation
));
319 if (jump_func
->value
.pass_through
.operation
!= NOP_EXPR
)
322 print_generic_expr (f
,
323 jump_func
->value
.pass_through
.operand
, 0);
325 if (jump_func
->value
.pass_through
.agg_preserved
)
326 fprintf (f
, ", agg_preserved");
327 if (jump_func
->value
.pass_through
.type_preserved
)
328 fprintf (f
, ", type_preserved");
331 else if (type
== IPA_JF_ANCESTOR
)
333 fprintf (f
, "ANCESTOR: ");
334 fprintf (f
, "%d, offset "HOST_WIDE_INT_PRINT_DEC
", ",
335 jump_func
->value
.ancestor
.formal_id
,
336 jump_func
->value
.ancestor
.offset
);
337 print_generic_expr (f
, jump_func
->value
.ancestor
.type
, 0);
338 if (jump_func
->value
.ancestor
.agg_preserved
)
339 fprintf (f
, ", agg_preserved");
340 if (jump_func
->value
.ancestor
.type_preserved
)
341 fprintf (f
, ", type_preserved");
345 if (jump_func
->agg
.items
)
347 struct ipa_agg_jf_item
*item
;
350 fprintf (f
, " Aggregate passed by %s:\n",
351 jump_func
->agg
.by_ref
? "reference" : "value");
352 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, j
, item
)
354 fprintf (f
, " offset: " HOST_WIDE_INT_PRINT_DEC
", ",
356 if (TYPE_P (item
->value
))
357 fprintf (f
, "clobber of " HOST_WIDE_INT_PRINT_DEC
" bits",
358 tree_to_uhwi (TYPE_SIZE (item
->value
)));
361 fprintf (f
, "cst: ");
362 print_generic_expr (f
, item
->value
, 0);
371 /* Print the jump functions of all arguments on all call graph edges going from
375 ipa_print_node_jump_functions (FILE *f
, struct cgraph_node
*node
)
377 struct cgraph_edge
*cs
;
379 fprintf (f
, " Jump functions of caller %s/%i:\n", node
->name (),
381 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
383 if (!ipa_edge_args_info_available_for_edge_p (cs
))
386 fprintf (f
, " callsite %s/%i -> %s/%i : \n",
387 xstrdup (node
->name ()), node
->order
,
388 xstrdup (cs
->callee
->name ()),
390 ipa_print_node_jump_functions_for_edge (f
, cs
);
393 for (cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
395 struct cgraph_indirect_call_info
*ii
;
396 if (!ipa_edge_args_info_available_for_edge_p (cs
))
399 ii
= cs
->indirect_info
;
400 if (ii
->agg_contents
)
401 fprintf (f
, " indirect %s callsite, calling param %i, "
402 "offset " HOST_WIDE_INT_PRINT_DEC
", %s",
403 ii
->member_ptr
? "member ptr" : "aggregate",
404 ii
->param_index
, ii
->offset
,
405 ii
->by_ref
? "by reference" : "by_value");
407 fprintf (f
, " indirect %s callsite, calling param %i, "
408 "offset " HOST_WIDE_INT_PRINT_DEC
,
409 ii
->polymorphic
? "polymorphic" : "simple", ii
->param_index
,
414 fprintf (f
, ", for stmt ");
415 print_gimple_stmt (f
, cs
->call_stmt
, 0, TDF_SLIM
);
419 ipa_print_node_jump_functions_for_edge (f
, cs
);
423 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
426 ipa_print_all_jump_functions (FILE *f
)
428 struct cgraph_node
*node
;
430 fprintf (f
, "\nJump functions:\n");
431 FOR_EACH_FUNCTION (node
)
433 ipa_print_node_jump_functions (f
, node
);
437 /* Set JFUNC to be a known type jump function. */
440 ipa_set_jf_known_type (struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
,
441 tree base_type
, tree component_type
)
443 /* Recording and propagating main variants increases change that types
445 base_type
= TYPE_MAIN_VARIANT (base_type
);
446 component_type
= TYPE_MAIN_VARIANT (component_type
);
448 gcc_assert (contains_polymorphic_type_p (base_type
)
449 && contains_polymorphic_type_p (component_type
));
450 if (!flag_devirtualize
)
452 jfunc
->type
= IPA_JF_KNOWN_TYPE
;
453 jfunc
->value
.known_type
.offset
= offset
,
454 jfunc
->value
.known_type
.base_type
= base_type
;
455 jfunc
->value
.known_type
.component_type
= component_type
;
456 gcc_assert (component_type
);
459 /* Set JFUNC to be a copy of another jmp (to be used by jump function
460 combination code). The two functions will share their rdesc. */
463 ipa_set_jf_cst_copy (struct ipa_jump_func
*dst
,
464 struct ipa_jump_func
*src
)
467 gcc_checking_assert (src
->type
== IPA_JF_CONST
);
468 dst
->type
= IPA_JF_CONST
;
469 dst
->value
.constant
= src
->value
.constant
;
472 /* Set JFUNC to be a constant jmp function. */
475 ipa_set_jf_constant (struct ipa_jump_func
*jfunc
, tree constant
,
476 struct cgraph_edge
*cs
)
478 constant
= unshare_expr (constant
);
479 if (constant
&& EXPR_P (constant
))
480 SET_EXPR_LOCATION (constant
, UNKNOWN_LOCATION
);
481 jfunc
->type
= IPA_JF_CONST
;
482 jfunc
->value
.constant
.value
= unshare_expr_without_location (constant
);
484 if (TREE_CODE (constant
) == ADDR_EXPR
485 && TREE_CODE (TREE_OPERAND (constant
, 0)) == FUNCTION_DECL
)
487 struct ipa_cst_ref_desc
*rdesc
;
488 if (!ipa_refdesc_pool
)
489 ipa_refdesc_pool
= create_alloc_pool ("IPA-PROP ref descriptions",
490 sizeof (struct ipa_cst_ref_desc
), 32);
492 rdesc
= (struct ipa_cst_ref_desc
*) pool_alloc (ipa_refdesc_pool
);
494 rdesc
->next_duplicate
= NULL
;
496 jfunc
->value
.constant
.rdesc
= rdesc
;
499 jfunc
->value
.constant
.rdesc
= NULL
;
502 /* Set JFUNC to be a simple pass-through jump function. */
504 ipa_set_jf_simple_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
505 bool agg_preserved
, bool type_preserved
)
507 jfunc
->type
= IPA_JF_PASS_THROUGH
;
508 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
509 jfunc
->value
.pass_through
.formal_id
= formal_id
;
510 jfunc
->value
.pass_through
.operation
= NOP_EXPR
;
511 jfunc
->value
.pass_through
.agg_preserved
= agg_preserved
;
512 jfunc
->value
.pass_through
.type_preserved
= type_preserved
;
515 /* Set JFUNC to be an arithmetic pass through jump function. */
518 ipa_set_jf_arith_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
519 tree operand
, enum tree_code operation
)
521 jfunc
->type
= IPA_JF_PASS_THROUGH
;
522 jfunc
->value
.pass_through
.operand
= unshare_expr_without_location (operand
);
523 jfunc
->value
.pass_through
.formal_id
= formal_id
;
524 jfunc
->value
.pass_through
.operation
= operation
;
525 jfunc
->value
.pass_through
.agg_preserved
= false;
526 jfunc
->value
.pass_through
.type_preserved
= false;
529 /* Set JFUNC to be an ancestor jump function. */
532 ipa_set_ancestor_jf (struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
,
533 tree type
, int formal_id
, bool agg_preserved
,
536 if (!flag_devirtualize
)
537 type_preserved
= false;
541 type
= TYPE_MAIN_VARIANT (type
);
542 gcc_assert (!type_preserved
|| contains_polymorphic_type_p (type
));
543 jfunc
->type
= IPA_JF_ANCESTOR
;
544 jfunc
->value
.ancestor
.formal_id
= formal_id
;
545 jfunc
->value
.ancestor
.offset
= offset
;
546 jfunc
->value
.ancestor
.type
= type_preserved
? type
: NULL
;
547 jfunc
->value
.ancestor
.agg_preserved
= agg_preserved
;
548 jfunc
->value
.ancestor
.type_preserved
= type_preserved
;
551 /* Extract the acual BINFO being described by JFUNC which must be a known type
555 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func
*jfunc
)
557 if (!RECORD_OR_UNION_TYPE_P (jfunc
->value
.known_type
.base_type
))
560 tree base_binfo
= TYPE_BINFO (jfunc
->value
.known_type
.base_type
);
564 /* FIXME: At LTO we can't propagate to non-polymorphic type, because
565 we have no ODR equivalency on those. This should be fixed by
566 propagating on types rather than binfos that would make type
567 matching here unnecesary. */
569 && (TREE_CODE (jfunc
->value
.known_type
.component_type
) != RECORD_TYPE
570 || !TYPE_BINFO (jfunc
->value
.known_type
.component_type
)
571 || !BINFO_VTABLE (TYPE_BINFO (jfunc
->value
.known_type
.component_type
))))
573 if (!jfunc
->value
.known_type
.offset
)
577 return get_binfo_at_offset (base_binfo
,
578 jfunc
->value
.known_type
.offset
,
579 jfunc
->value
.known_type
.component_type
);
582 /* Get IPA BB information about the given BB. FBI is the context of analyzis
583 of this function body. */
585 static struct ipa_bb_info
*
586 ipa_get_bb_info (struct func_body_info
*fbi
, basic_block bb
)
588 gcc_checking_assert (fbi
);
589 return &fbi
->bb_infos
[bb
->index
];
592 /* Structure to be passed in between detect_type_change and
593 check_stmt_for_type_change. */
595 struct type_change_info
597 /* Offset into the object where there is the virtual method pointer we are
599 HOST_WIDE_INT offset
;
600 /* The declaration or SSA_NAME pointer of the base that we are checking for
603 /* If we actually can tell the type that the object has changed to, it is
604 stored in this field. Otherwise it remains NULL_TREE. */
605 tree known_current_type
;
606 /* Set to true if dynamic type change has been detected. */
607 bool type_maybe_changed
;
608 /* Set to true if multiple types have been encountered. known_current_type
609 must be disregarded in that case. */
610 bool multiple_types_encountered
;
613 /* Return true if STMT can modify a virtual method table pointer.
615 This function makes special assumptions about both constructors and
616 destructors which are all the functions that are allowed to alter the VMT
617 pointers. It assumes that destructors begin with assignment into all VMT
618 pointers and that constructors essentially look in the following way:
620 1) The very first thing they do is that they call constructors of ancestor
621 sub-objects that have them.
623 2) Then VMT pointers of this and all its ancestors is set to new values
624 corresponding to the type corresponding to the constructor.
626 3) Only afterwards, other stuff such as constructor of member sub-objects
627 and the code written by the user is run. Only this may include calling
628 virtual functions, directly or indirectly.
630 There is no way to call a constructor of an ancestor sub-object in any
633 This means that we do not have to care whether constructors get the correct
634 type information because they will always change it (in fact, if we define
635 the type to be given by the VMT pointer, it is undefined).
637 The most important fact to derive from the above is that if, for some
638 statement in the section 3, we try to detect whether the dynamic type has
639 changed, we can safely ignore all calls as we examine the function body
640 backwards until we reach statements in section 2 because these calls cannot
641 be ancestor constructors or destructors (if the input is not bogus) and so
642 do not change the dynamic type (this holds true only for automatically
643 allocated objects but at the moment we devirtualize only these). We then
644 must detect that statements in section 2 change the dynamic type and can try
645 to derive the new type. That is enough and we can stop, we will never see
646 the calls into constructors of sub-objects in this code. Therefore we can
647 safely ignore all call statements that we traverse.
651 stmt_may_be_vtbl_ptr_store (gimple stmt
)
653 if (is_gimple_call (stmt
))
655 if (gimple_clobber_p (stmt
))
657 else if (is_gimple_assign (stmt
))
659 tree lhs
= gimple_assign_lhs (stmt
);
661 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs
)))
663 if (flag_strict_aliasing
664 && !POINTER_TYPE_P (TREE_TYPE (lhs
)))
667 if (TREE_CODE (lhs
) == COMPONENT_REF
668 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
670 /* In the future we might want to use get_base_ref_and_offset to find
671 if there is a field corresponding to the offset and if so, proceed
672 almost like if it was a component ref. */
678 /* If STMT can be proved to be an assignment to the virtual method table
679 pointer of ANALYZED_OBJ and the type associated with the new table
680 identified, return the type. Otherwise return NULL_TREE. */
683 extr_type_from_vtbl_ptr_store (gimple stmt
, struct type_change_info
*tci
)
685 HOST_WIDE_INT offset
, size
, max_size
;
686 tree lhs
, rhs
, base
, binfo
;
688 if (!gimple_assign_single_p (stmt
))
691 lhs
= gimple_assign_lhs (stmt
);
692 rhs
= gimple_assign_rhs1 (stmt
);
693 if (TREE_CODE (lhs
) != COMPONENT_REF
694 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
697 base
= get_ref_base_and_extent (lhs
, &offset
, &size
, &max_size
);
698 if (offset
!= tci
->offset
699 || size
!= POINTER_SIZE
700 || max_size
!= POINTER_SIZE
)
702 if (TREE_CODE (base
) == MEM_REF
)
704 if (TREE_CODE (tci
->object
) != MEM_REF
705 || TREE_OPERAND (tci
->object
, 0) != TREE_OPERAND (base
, 0)
706 || !tree_int_cst_equal (TREE_OPERAND (tci
->object
, 1),
707 TREE_OPERAND (base
, 1)))
710 else if (tci
->object
!= base
)
713 binfo
= vtable_pointer_value_to_binfo (rhs
);
715 /* FIXME: vtable_pointer_value_to_binfo may return BINFO of a
716 base of outer type. In this case we would need to either
717 work on binfos or translate it back to outer type and offset.
718 KNOWN_TYPE jump functions are not ready for that, yet. */
719 if (!binfo
|| TYPE_BINFO (BINFO_TYPE (binfo
)) != binfo
)
722 return BINFO_TYPE (binfo
);
725 /* Callback of walk_aliased_vdefs and a helper function for
726 detect_type_change to check whether a particular statement may modify
727 the virtual table pointer, and if possible also determine the new type of
728 the (sub-)object. It stores its result into DATA, which points to a
729 type_change_info structure. */
732 check_stmt_for_type_change (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef
, void *data
)
734 gimple stmt
= SSA_NAME_DEF_STMT (vdef
);
735 struct type_change_info
*tci
= (struct type_change_info
*) data
;
737 if (stmt_may_be_vtbl_ptr_store (stmt
))
741 type
= extr_type_from_vtbl_ptr_store (stmt
, tci
);
742 gcc_assert (!type
|| TYPE_MAIN_VARIANT (type
) == type
);
743 if (tci
->type_maybe_changed
744 && type
!= tci
->known_current_type
)
745 tci
->multiple_types_encountered
= true;
746 tci
->known_current_type
= type
;
747 tci
->type_maybe_changed
= true;
754 /* See if ARG is PARAM_DECl describing instance passed by pointer
755 or reference in FUNCTION. Return false if the dynamic type may change
756 in between beggining of the function until CALL is invoked.
758 Generally functions are not allowed to change type of such instances,
759 but they call destructors. We assume that methods can not destroy the THIS
760 pointer. Also as a special cases, constructor and destructors may change
761 type of the THIS pointer. */
764 param_type_may_change_p (tree function
, tree arg
, gimple call
)
766 /* Pure functions can not do any changes on the dynamic type;
767 that require writting to memory. */
768 if (flags_from_decl_or_type (function
) & (ECF_PURE
| ECF_CONST
))
770 /* We need to check if we are within inlined consturctor
771 or destructor (ideally we would have way to check that the
772 inline cdtor is actually working on ARG, but we don't have
773 easy tie on this, so punt on all non-pure cdtors.
774 We may also record the types of cdtors and once we know type
775 of the instance match them.
777 Also code unification optimizations may merge calls from
778 different blocks making return values unreliable. So
779 do nothing during late optimization. */
780 if (DECL_STRUCT_FUNCTION (function
)->after_inlining
)
782 if (TREE_CODE (arg
) == SSA_NAME
783 && SSA_NAME_IS_DEFAULT_DEF (arg
)
784 && TREE_CODE (SSA_NAME_VAR (arg
)) == PARM_DECL
)
786 /* Normal (non-THIS) argument. */
787 if ((SSA_NAME_VAR (arg
) != DECL_ARGUMENTS (function
)
788 || TREE_CODE (TREE_TYPE (function
)) != METHOD_TYPE
)
789 /* THIS pointer of an method - here we we want to watch constructors
790 and destructors as those definitely may change the dynamic
792 || (TREE_CODE (TREE_TYPE (function
)) == METHOD_TYPE
793 && !DECL_CXX_CONSTRUCTOR_P (function
)
794 && !DECL_CXX_DESTRUCTOR_P (function
)
795 && (SSA_NAME_VAR (arg
) == DECL_ARGUMENTS (function
))))
797 /* Walk the inline stack and watch out for ctors/dtors. */
798 for (tree block
= gimple_block (call
); block
&& TREE_CODE (block
) == BLOCK
;
799 block
= BLOCK_SUPERCONTEXT (block
))
800 if (BLOCK_ABSTRACT_ORIGIN (block
)
801 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block
)) == FUNCTION_DECL
)
803 tree fn
= BLOCK_ABSTRACT_ORIGIN (block
);
805 if (flags_from_decl_or_type (fn
) & (ECF_PURE
| ECF_CONST
))
807 if (TREE_CODE (TREE_TYPE (fn
)) == METHOD_TYPE
808 && (DECL_CXX_CONSTRUCTOR_P (fn
)
809 || DECL_CXX_DESTRUCTOR_P (fn
)))
818 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
819 callsite CALL) by looking for assignments to its virtual table pointer. If
820 it is, return true and fill in the jump function JFUNC with relevant type
821 information or set it to unknown. ARG is the object itself (not a pointer
822 to it, unless dereferenced). BASE is the base of the memory access as
823 returned by get_ref_base_and_extent, as is the offset.
825 This is helper function for detect_type_change and detect_type_change_ssa
826 that does the heavy work which is usually unnecesary. */
829 detect_type_change_from_memory_writes (tree arg
, tree base
, tree comp_type
,
830 gimple call
, struct ipa_jump_func
*jfunc
,
831 HOST_WIDE_INT offset
)
833 struct type_change_info tci
;
835 bool entry_reached
= false;
837 gcc_checking_assert (DECL_P (arg
)
838 || TREE_CODE (arg
) == MEM_REF
839 || handled_component_p (arg
));
841 comp_type
= TYPE_MAIN_VARIANT (comp_type
);
843 /* Const calls cannot call virtual methods through VMT and so type changes do
845 if (!flag_devirtualize
|| !gimple_vuse (call
)
846 /* Be sure expected_type is polymorphic. */
848 || TREE_CODE (comp_type
) != RECORD_TYPE
849 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))
850 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))))
853 ao_ref_init (&ao
, arg
);
856 ao
.size
= POINTER_SIZE
;
857 ao
.max_size
= ao
.size
;
860 tci
.object
= get_base_address (arg
);
861 tci
.known_current_type
= NULL_TREE
;
862 tci
.type_maybe_changed
= false;
863 tci
.multiple_types_encountered
= false;
865 walk_aliased_vdefs (&ao
, gimple_vuse (call
), check_stmt_for_type_change
,
866 &tci
, NULL
, &entry_reached
);
867 if (!tci
.type_maybe_changed
)
870 if (!tci
.known_current_type
871 || tci
.multiple_types_encountered
873 /* When the walk reached function entry, it means that type
874 is set along some paths but not along others. */
876 jfunc
->type
= IPA_JF_UNKNOWN
;
878 ipa_set_jf_known_type (jfunc
, 0, tci
.known_current_type
, comp_type
);
883 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
884 If it is, return true and fill in the jump function JFUNC with relevant type
885 information or set it to unknown. ARG is the object itself (not a pointer
886 to it, unless dereferenced). BASE is the base of the memory access as
887 returned by get_ref_base_and_extent, as is the offset. */
890 detect_type_change (tree arg
, tree base
, tree comp_type
, gimple call
,
891 struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
)
893 if (!flag_devirtualize
)
896 if (TREE_CODE (base
) == MEM_REF
897 && !param_type_may_change_p (current_function_decl
,
898 TREE_OPERAND (base
, 0),
901 return detect_type_change_from_memory_writes (arg
, base
, comp_type
,
902 call
, jfunc
, offset
);
905 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
906 SSA name (its dereference will become the base and the offset is assumed to
910 detect_type_change_ssa (tree arg
, tree comp_type
,
911 gimple call
, struct ipa_jump_func
*jfunc
)
913 gcc_checking_assert (TREE_CODE (arg
) == SSA_NAME
);
914 if (!flag_devirtualize
915 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
918 if (!param_type_may_change_p (current_function_decl
, arg
, call
))
921 arg
= build2 (MEM_REF
, ptr_type_node
, arg
,
922 build_int_cst (ptr_type_node
, 0));
924 return detect_type_change_from_memory_writes (arg
, arg
, comp_type
,
928 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
929 boolean variable pointed to by DATA. */
932 mark_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
935 bool *b
= (bool *) data
;
940 /* Return true if we have already walked so many statements in AA that we
941 should really just start giving up. */
944 aa_overwalked (struct func_body_info
*fbi
)
946 gcc_checking_assert (fbi
);
947 return fbi
->aa_walked
> (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS
);
950 /* Find the nearest valid aa status for parameter specified by INDEX that
953 static struct param_aa_status
*
954 find_dominating_aa_status (struct func_body_info
*fbi
, basic_block bb
,
959 bb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
962 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
963 if (!bi
->param_aa_statuses
.is_empty ()
964 && bi
->param_aa_statuses
[index
].valid
)
965 return &bi
->param_aa_statuses
[index
];
969 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
970 structures and/or intialize the result with a dominating description as
973 static struct param_aa_status
*
974 parm_bb_aa_status_for_bb (struct func_body_info
*fbi
, basic_block bb
,
977 gcc_checking_assert (fbi
);
978 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
979 if (bi
->param_aa_statuses
.is_empty ())
980 bi
->param_aa_statuses
.safe_grow_cleared (fbi
->param_count
);
981 struct param_aa_status
*paa
= &bi
->param_aa_statuses
[index
];
984 gcc_checking_assert (!paa
->parm_modified
985 && !paa
->ref_modified
986 && !paa
->pt_modified
);
987 struct param_aa_status
*dom_paa
;
988 dom_paa
= find_dominating_aa_status (fbi
, bb
, index
);
998 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
999 a value known not to be modified in this function before reaching the
1000 statement STMT. FBI holds information about the function we have so far
1001 gathered but do not survive the summary building stage. */
1004 parm_preserved_before_stmt_p (struct func_body_info
*fbi
, int index
,
1005 gimple stmt
, tree parm_load
)
1007 struct param_aa_status
*paa
;
1008 bool modified
= false;
1011 /* FIXME: FBI can be NULL if we are being called from outside
1012 ipa_node_analysis or ipcp_transform_function, which currently happens
1013 during inlining analysis. It would be great to extend fbi's lifetime and
1014 always have it. Currently, we are just not afraid of too much walking in
1018 if (aa_overwalked (fbi
))
1020 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
1021 if (paa
->parm_modified
)
1027 gcc_checking_assert (gimple_vuse (stmt
) != NULL_TREE
);
1028 ao_ref_init (&refd
, parm_load
);
1029 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
1032 fbi
->aa_walked
+= walked
;
1033 if (paa
&& modified
)
1034 paa
->parm_modified
= true;
1038 /* If STMT is an assignment that loads a value from an parameter declaration,
1039 return the index of the parameter in ipa_node_params which has not been
1040 modified. Otherwise return -1. */
1043 load_from_unmodified_param (struct func_body_info
*fbi
,
1044 vec
<ipa_param_descriptor
> descriptors
,
1050 if (!gimple_assign_single_p (stmt
))
1053 op1
= gimple_assign_rhs1 (stmt
);
1054 if (TREE_CODE (op1
) != PARM_DECL
)
1057 index
= ipa_get_param_decl_index_1 (descriptors
, op1
);
1059 || !parm_preserved_before_stmt_p (fbi
, index
, stmt
, op1
))
1065 /* Return true if memory reference REF (which must be a load through parameter
1066 with INDEX) loads data that are known to be unmodified in this function
1067 before reaching statement STMT. */
1070 parm_ref_data_preserved_p (struct func_body_info
*fbi
,
1071 int index
, gimple stmt
, tree ref
)
1073 struct param_aa_status
*paa
;
1074 bool modified
= false;
1077 /* FIXME: FBI can be NULL if we are being called from outside
1078 ipa_node_analysis or ipcp_transform_function, which currently happens
1079 during inlining analysis. It would be great to extend fbi's lifetime and
1080 always have it. Currently, we are just not afraid of too much walking in
1084 if (aa_overwalked (fbi
))
1086 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
1087 if (paa
->ref_modified
)
1093 gcc_checking_assert (gimple_vuse (stmt
));
1094 ao_ref_init (&refd
, ref
);
1095 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
1098 fbi
->aa_walked
+= walked
;
1099 if (paa
&& modified
)
1100 paa
->ref_modified
= true;
1104 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1105 is known to be unmodified in this function before reaching call statement
1106 CALL into which it is passed. FBI describes the function body. */
1109 parm_ref_data_pass_through_p (struct func_body_info
*fbi
, int index
,
1110 gimple call
, tree parm
)
1112 bool modified
= false;
1115 /* It's unnecessary to calculate anything about memory contnets for a const
1116 function because it is not goin to use it. But do not cache the result
1117 either. Also, no such calculations for non-pointers. */
1118 if (!gimple_vuse (call
)
1119 || !POINTER_TYPE_P (TREE_TYPE (parm
))
1120 || aa_overwalked (fbi
))
1123 struct param_aa_status
*paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (call
),
1125 if (paa
->pt_modified
)
1128 ao_ref_init_from_ptr_and_size (&refd
, parm
, NULL_TREE
);
1129 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (call
), mark_modified
,
1131 fbi
->aa_walked
+= walked
;
1133 paa
->pt_modified
= true;
1137 /* Return true if we can prove that OP is a memory reference loading unmodified
1138 data from an aggregate passed as a parameter and if the aggregate is passed
1139 by reference, that the alias type of the load corresponds to the type of the
1140 formal parameter (so that we can rely on this type for TBAA in callers).
1141 INFO and PARMS_AINFO describe parameters of the current function (but the
1142 latter can be NULL), STMT is the load statement. If function returns true,
1143 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1144 within the aggregate and whether it is a load from a value passed by
1145 reference respectively. */
1148 ipa_load_from_parm_agg_1 (struct func_body_info
*fbi
,
1149 vec
<ipa_param_descriptor
> descriptors
,
1150 gimple stmt
, tree op
, int *index_p
,
1151 HOST_WIDE_INT
*offset_p
, HOST_WIDE_INT
*size_p
,
1155 HOST_WIDE_INT size
, max_size
;
1156 tree base
= get_ref_base_and_extent (op
, offset_p
, &size
, &max_size
);
1158 if (max_size
== -1 || max_size
!= size
|| *offset_p
< 0)
1163 int index
= ipa_get_param_decl_index_1 (descriptors
, base
);
1165 && parm_preserved_before_stmt_p (fbi
, index
, stmt
, op
))
1176 if (TREE_CODE (base
) != MEM_REF
1177 || TREE_CODE (TREE_OPERAND (base
, 0)) != SSA_NAME
1178 || !integer_zerop (TREE_OPERAND (base
, 1)))
1181 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base
, 0)))
1183 tree parm
= SSA_NAME_VAR (TREE_OPERAND (base
, 0));
1184 index
= ipa_get_param_decl_index_1 (descriptors
, parm
);
1188 /* This branch catches situations where a pointer parameter is not a
1189 gimple register, for example:
1191 void hip7(S*) (struct S * p)
1193 void (*<T2e4>) (struct S *) D.1867;
1198 D.1867_2 = p.1_1->f;
1203 gimple def
= SSA_NAME_DEF_STMT (TREE_OPERAND (base
, 0));
1204 index
= load_from_unmodified_param (fbi
, descriptors
, def
);
1208 && parm_ref_data_preserved_p (fbi
, index
, stmt
, op
))
1219 /* Just like the previous function, just without the param_analysis_info
1220 pointer, for users outside of this file. */
1223 ipa_load_from_parm_agg (struct ipa_node_params
*info
, gimple stmt
,
1224 tree op
, int *index_p
, HOST_WIDE_INT
*offset_p
,
1227 return ipa_load_from_parm_agg_1 (NULL
, info
->descriptors
, stmt
, op
, index_p
,
1228 offset_p
, NULL
, by_ref_p
);
1231 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1232 of an assignment statement STMT, try to determine whether we are actually
1233 handling any of the following cases and construct an appropriate jump
1234 function into JFUNC if so:
1236 1) The passed value is loaded from a formal parameter which is not a gimple
1237 register (most probably because it is addressable, the value has to be
1238 scalar) and we can guarantee the value has not changed. This case can
1239 therefore be described by a simple pass-through jump function. For example:
1248 2) The passed value can be described by a simple arithmetic pass-through
1255 D.2064_4 = a.1(D) + 4;
1258 This case can also occur in combination of the previous one, e.g.:
1266 D.2064_4 = a.0_3 + 4;
1269 3) The passed value is an address of an object within another one (which
1270 also passed by reference). Such situations are described by an ancestor
1271 jump function and describe situations such as:
1273 B::foo() (struct B * const this)
1277 D.1845_2 = &this_1(D)->D.1748;
1280 INFO is the structure describing individual parameters access different
1281 stages of IPA optimizations. PARMS_AINFO contains the information that is
1282 only needed for intraprocedural analysis. */
1285 compute_complex_assign_jump_func (struct func_body_info
*fbi
,
1286 struct ipa_node_params
*info
,
1287 struct ipa_jump_func
*jfunc
,
1288 gimple call
, gimple stmt
, tree name
,
1291 HOST_WIDE_INT offset
, size
, max_size
;
1292 tree op1
, tc_ssa
, base
, ssa
;
1295 op1
= gimple_assign_rhs1 (stmt
);
1297 if (TREE_CODE (op1
) == SSA_NAME
)
1299 if (SSA_NAME_IS_DEFAULT_DEF (op1
))
1300 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (op1
));
1302 index
= load_from_unmodified_param (fbi
, info
->descriptors
,
1303 SSA_NAME_DEF_STMT (op1
));
1308 index
= load_from_unmodified_param (fbi
, info
->descriptors
, stmt
);
1309 tc_ssa
= gimple_assign_lhs (stmt
);
1314 tree op2
= gimple_assign_rhs2 (stmt
);
1318 if (!is_gimple_ip_invariant (op2
)
1319 || (TREE_CODE_CLASS (gimple_expr_code (stmt
)) != tcc_comparison
1320 && !useless_type_conversion_p (TREE_TYPE (name
),
1324 ipa_set_jf_arith_pass_through (jfunc
, index
, op2
,
1325 gimple_assign_rhs_code (stmt
));
1327 else if (gimple_assign_single_p (stmt
))
1329 bool agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
, tc_ssa
);
1330 bool type_p
= false;
1332 if (param_type
&& POINTER_TYPE_P (param_type
))
1333 type_p
= !detect_type_change_ssa (tc_ssa
, TREE_TYPE (param_type
),
1335 if (type_p
|| jfunc
->type
== IPA_JF_UNKNOWN
)
1336 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
, type_p
);
1341 if (TREE_CODE (op1
) != ADDR_EXPR
)
1343 op1
= TREE_OPERAND (op1
, 0);
1344 if (TREE_CODE (TREE_TYPE (op1
)) != RECORD_TYPE
)
1346 base
= get_ref_base_and_extent (op1
, &offset
, &size
, &max_size
);
1347 if (TREE_CODE (base
) != MEM_REF
1348 /* If this is a varying address, punt. */
1350 || max_size
!= size
)
1352 offset
+= mem_ref_offset (base
).to_short_addr () * BITS_PER_UNIT
;
1353 ssa
= TREE_OPERAND (base
, 0);
1354 if (TREE_CODE (ssa
) != SSA_NAME
1355 || !SSA_NAME_IS_DEFAULT_DEF (ssa
)
1359 /* Dynamic types are changed in constructors and destructors. */
1360 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (ssa
));
1361 if (index
>= 0 && param_type
&& POINTER_TYPE_P (param_type
))
1363 bool type_p
= (contains_polymorphic_type_p (TREE_TYPE (param_type
))
1364 && !detect_type_change (op1
, base
, TREE_TYPE (param_type
),
1365 call
, jfunc
, offset
));
1366 if (type_p
|| jfunc
->type
== IPA_JF_UNKNOWN
)
1367 ipa_set_ancestor_jf (jfunc
, offset
,
1368 type_p
? TREE_TYPE (param_type
) : NULL
, index
,
1369 parm_ref_data_pass_through_p (fbi
, index
,
1370 call
, ssa
), type_p
);
1374 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1377 iftmp.1_3 = &obj_2(D)->D.1762;
1379 The base of the MEM_REF must be a default definition SSA NAME of a
1380 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1381 whole MEM_REF expression is returned and the offset calculated from any
1382 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1383 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1386 get_ancestor_addr_info (gimple assign
, tree
*obj_p
, HOST_WIDE_INT
*offset
)
1388 HOST_WIDE_INT size
, max_size
;
1389 tree expr
, parm
, obj
;
1391 if (!gimple_assign_single_p (assign
))
1393 expr
= gimple_assign_rhs1 (assign
);
1395 if (TREE_CODE (expr
) != ADDR_EXPR
)
1397 expr
= TREE_OPERAND (expr
, 0);
1399 expr
= get_ref_base_and_extent (expr
, offset
, &size
, &max_size
);
1401 if (TREE_CODE (expr
) != MEM_REF
1402 /* If this is a varying address, punt. */
1407 parm
= TREE_OPERAND (expr
, 0);
1408 if (TREE_CODE (parm
) != SSA_NAME
1409 || !SSA_NAME_IS_DEFAULT_DEF (parm
)
1410 || TREE_CODE (SSA_NAME_VAR (parm
)) != PARM_DECL
)
1413 *offset
+= mem_ref_offset (expr
).to_short_addr () * BITS_PER_UNIT
;
1419 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1420 statement PHI, try to find out whether NAME is in fact a
1421 multiple-inheritance typecast from a descendant into an ancestor of a formal
1422 parameter and thus can be described by an ancestor jump function and if so,
1423 write the appropriate function into JFUNC.
1425 Essentially we want to match the following pattern:
1433 iftmp.1_3 = &obj_2(D)->D.1762;
1436 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1437 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1441 compute_complex_ancestor_jump_func (struct func_body_info
*fbi
,
1442 struct ipa_node_params
*info
,
1443 struct ipa_jump_func
*jfunc
,
1444 gimple call
, gimple phi
, tree param_type
)
1446 HOST_WIDE_INT offset
;
1447 gimple assign
, cond
;
1448 basic_block phi_bb
, assign_bb
, cond_bb
;
1449 tree tmp
, parm
, expr
, obj
;
1452 if (gimple_phi_num_args (phi
) != 2)
1455 if (integer_zerop (PHI_ARG_DEF (phi
, 1)))
1456 tmp
= PHI_ARG_DEF (phi
, 0);
1457 else if (integer_zerop (PHI_ARG_DEF (phi
, 0)))
1458 tmp
= PHI_ARG_DEF (phi
, 1);
1461 if (TREE_CODE (tmp
) != SSA_NAME
1462 || SSA_NAME_IS_DEFAULT_DEF (tmp
)
1463 || !POINTER_TYPE_P (TREE_TYPE (tmp
))
1464 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp
))) != RECORD_TYPE
)
1467 assign
= SSA_NAME_DEF_STMT (tmp
);
1468 assign_bb
= gimple_bb (assign
);
1469 if (!single_pred_p (assign_bb
))
1471 expr
= get_ancestor_addr_info (assign
, &obj
, &offset
);
1474 parm
= TREE_OPERAND (expr
, 0);
1475 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (parm
));
1479 cond_bb
= single_pred (assign_bb
);
1480 cond
= last_stmt (cond_bb
);
1482 || gimple_code (cond
) != GIMPLE_COND
1483 || gimple_cond_code (cond
) != NE_EXPR
1484 || gimple_cond_lhs (cond
) != parm
1485 || !integer_zerop (gimple_cond_rhs (cond
)))
1488 phi_bb
= gimple_bb (phi
);
1489 for (i
= 0; i
< 2; i
++)
1491 basic_block pred
= EDGE_PRED (phi_bb
, i
)->src
;
1492 if (pred
!= assign_bb
&& pred
!= cond_bb
)
1496 bool type_p
= false;
1497 if (param_type
&& POINTER_TYPE_P (param_type
)
1498 && contains_polymorphic_type_p (TREE_TYPE (param_type
)))
1499 type_p
= !detect_type_change (obj
, expr
, TREE_TYPE (param_type
),
1500 call
, jfunc
, offset
);
1501 if (type_p
|| jfunc
->type
== IPA_JF_UNKNOWN
)
1502 ipa_set_ancestor_jf (jfunc
, offset
, type_p
? TREE_TYPE (param_type
) : NULL
,
1504 parm_ref_data_pass_through_p (fbi
, index
, call
, parm
),
1508 /* Given OP which is passed as an actual argument to a called function,
1509 determine if it is possible to construct a KNOWN_TYPE jump function for it
1510 and if so, create one and store it to JFUNC.
1511 EXPECTED_TYPE represents a type the argument should be in */
1514 compute_known_type_jump_func (tree op
, struct ipa_jump_func
*jfunc
,
1515 gimple call
, tree expected_type
)
1517 HOST_WIDE_INT offset
, size
, max_size
;
1520 if (!flag_devirtualize
1521 || TREE_CODE (op
) != ADDR_EXPR
1522 || !contains_polymorphic_type_p (TREE_TYPE (TREE_TYPE (op
)))
1523 /* Be sure expected_type is polymorphic. */
1525 || !contains_polymorphic_type_p (expected_type
))
1528 op
= TREE_OPERAND (op
, 0);
1529 base
= get_ref_base_and_extent (op
, &offset
, &size
, &max_size
);
1533 || !contains_polymorphic_type_p (TREE_TYPE (base
)))
1536 if (decl_maybe_in_construction_p (base
, TREE_TYPE (base
),
1537 call
, current_function_decl
)
1538 /* Even if the var seems to be in construction by inline call stack,
1539 we may work out the actual type by walking memory writes. */
1540 && (!is_global_var (base
)
1541 && detect_type_change (op
, base
, expected_type
, call
, jfunc
, offset
)))
1544 ipa_set_jf_known_type (jfunc
, offset
, TREE_TYPE (base
),
1548 /* Inspect the given TYPE and return true iff it has the same structure (the
1549 same number of fields of the same types) as a C++ member pointer. If
1550 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1551 corresponding fields there. */
1554 type_like_member_ptr_p (tree type
, tree
*method_ptr
, tree
*delta
)
1558 if (TREE_CODE (type
) != RECORD_TYPE
)
1561 fld
= TYPE_FIELDS (type
);
1562 if (!fld
|| !POINTER_TYPE_P (TREE_TYPE (fld
))
1563 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld
))) != METHOD_TYPE
1564 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1570 fld
= DECL_CHAIN (fld
);
1571 if (!fld
|| INTEGRAL_TYPE_P (fld
)
1572 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1577 if (DECL_CHAIN (fld
))
1583 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1584 return the rhs of its defining statement. Otherwise return RHS as it
1588 get_ssa_def_if_simple_copy (tree rhs
)
1590 while (TREE_CODE (rhs
) == SSA_NAME
&& !SSA_NAME_IS_DEFAULT_DEF (rhs
))
1592 gimple def_stmt
= SSA_NAME_DEF_STMT (rhs
);
1594 if (gimple_assign_single_p (def_stmt
))
1595 rhs
= gimple_assign_rhs1 (def_stmt
);
1602 /* Simple linked list, describing known contents of an aggregate beforere
1605 struct ipa_known_agg_contents_list
1607 /* Offset and size of the described part of the aggregate. */
1608 HOST_WIDE_INT offset
, size
;
1609 /* Known constant value or NULL if the contents is known to be unknown. */
1611 /* Pointer to the next structure in the list. */
1612 struct ipa_known_agg_contents_list
*next
;
1615 /* Find the proper place in linked list of ipa_known_agg_contents_list
1616 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1617 unless there is a partial overlap, in which case return NULL, or such
1618 element is already there, in which case set *ALREADY_THERE to true. */
1620 static struct ipa_known_agg_contents_list
**
1621 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list
**list
,
1622 HOST_WIDE_INT lhs_offset
,
1623 HOST_WIDE_INT lhs_size
,
1624 bool *already_there
)
1626 struct ipa_known_agg_contents_list
**p
= list
;
1627 while (*p
&& (*p
)->offset
< lhs_offset
)
1629 if ((*p
)->offset
+ (*p
)->size
> lhs_offset
)
1634 if (*p
&& (*p
)->offset
< lhs_offset
+ lhs_size
)
1636 if ((*p
)->offset
== lhs_offset
&& (*p
)->size
== lhs_size
)
1637 /* We already know this value is subsequently overwritten with
1639 *already_there
= true;
1641 /* Otherwise this is a partial overlap which we cannot
1648 /* Build aggregate jump function from LIST, assuming there are exactly
1649 CONST_COUNT constant entries there and that th offset of the passed argument
1650 is ARG_OFFSET and store it into JFUNC. */
1653 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list
*list
,
1654 int const_count
, HOST_WIDE_INT arg_offset
,
1655 struct ipa_jump_func
*jfunc
)
1657 vec_alloc (jfunc
->agg
.items
, const_count
);
1662 struct ipa_agg_jf_item item
;
1663 item
.offset
= list
->offset
- arg_offset
;
1664 gcc_assert ((item
.offset
% BITS_PER_UNIT
) == 0);
1665 item
.value
= unshare_expr_without_location (list
->constant
);
1666 jfunc
->agg
.items
->quick_push (item
);
1672 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1673 in ARG is filled in with constant values. ARG can either be an aggregate
1674 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1675 aggregate. JFUNC is the jump function into which the constants are
1676 subsequently stored. */
1679 determine_locally_known_aggregate_parts (gimple call
, tree arg
, tree arg_type
,
1680 struct ipa_jump_func
*jfunc
)
1682 struct ipa_known_agg_contents_list
*list
= NULL
;
1683 int item_count
= 0, const_count
= 0;
1684 HOST_WIDE_INT arg_offset
, arg_size
;
1685 gimple_stmt_iterator gsi
;
1687 bool check_ref
, by_ref
;
1690 /* The function operates in three stages. First, we prepare check_ref, r,
1691 arg_base and arg_offset based on what is actually passed as an actual
1694 if (POINTER_TYPE_P (arg_type
))
1697 if (TREE_CODE (arg
) == SSA_NAME
)
1700 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type
))))
1705 type_size
= TYPE_SIZE (TREE_TYPE (arg_type
));
1706 arg_size
= tree_to_uhwi (type_size
);
1707 ao_ref_init_from_ptr_and_size (&r
, arg_base
, NULL_TREE
);
1709 else if (TREE_CODE (arg
) == ADDR_EXPR
)
1711 HOST_WIDE_INT arg_max_size
;
1713 arg
= TREE_OPERAND (arg
, 0);
1714 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1716 if (arg_max_size
== -1
1717 || arg_max_size
!= arg_size
1720 if (DECL_P (arg_base
))
1723 ao_ref_init (&r
, arg_base
);
1733 HOST_WIDE_INT arg_max_size
;
1735 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg
)));
1739 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1741 if (arg_max_size
== -1
1742 || arg_max_size
!= arg_size
1746 ao_ref_init (&r
, arg
);
1749 /* Second stage walks back the BB, looks at individual statements and as long
1750 as it is confident of how the statements affect contents of the
1751 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1753 gsi
= gsi_for_stmt (call
);
1755 for (; !gsi_end_p (gsi
); gsi_prev (&gsi
))
1757 struct ipa_known_agg_contents_list
*n
, **p
;
1758 gimple stmt
= gsi_stmt (gsi
);
1759 HOST_WIDE_INT lhs_offset
, lhs_size
, lhs_max_size
;
1760 tree lhs
, rhs
, lhs_base
;
1762 if (!stmt_may_clobber_ref_p_1 (stmt
, &r
))
1764 if (!gimple_assign_single_p (stmt
))
1767 lhs
= gimple_assign_lhs (stmt
);
1768 rhs
= gimple_assign_rhs1 (stmt
);
1769 if (!is_gimple_reg_type (TREE_TYPE (rhs
))
1770 || TREE_CODE (lhs
) == BIT_FIELD_REF
1771 || contains_bitfld_component_ref_p (lhs
))
1774 lhs_base
= get_ref_base_and_extent (lhs
, &lhs_offset
, &lhs_size
,
1776 if (lhs_max_size
== -1
1777 || lhs_max_size
!= lhs_size
)
1782 if (TREE_CODE (lhs_base
) != MEM_REF
1783 || TREE_OPERAND (lhs_base
, 0) != arg_base
1784 || !integer_zerop (TREE_OPERAND (lhs_base
, 1)))
1787 else if (lhs_base
!= arg_base
)
1789 if (DECL_P (lhs_base
))
1795 bool already_there
= false;
1796 p
= get_place_in_agg_contents_list (&list
, lhs_offset
, lhs_size
,
1803 rhs
= get_ssa_def_if_simple_copy (rhs
);
1804 n
= XALLOCA (struct ipa_known_agg_contents_list
);
1806 n
->offset
= lhs_offset
;
1807 if (is_gimple_ip_invariant (rhs
))
1813 n
->constant
= NULL_TREE
;
1818 if (const_count
== PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
)
1819 || item_count
== 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
))
1823 /* Third stage just goes over the list and creates an appropriate vector of
1824 ipa_agg_jf_item structures out of it, of sourse only if there are
1825 any known constants to begin with. */
1829 jfunc
->agg
.by_ref
= by_ref
;
1830 build_agg_jump_func_from_list (list
, const_count
, arg_offset
, jfunc
);
1835 ipa_get_callee_param_type (struct cgraph_edge
*e
, int i
)
1838 tree type
= (e
->callee
1839 ? TREE_TYPE (e
->callee
->decl
)
1840 : gimple_call_fntype (e
->call_stmt
));
1841 tree t
= TYPE_ARG_TYPES (type
);
1843 for (n
= 0; n
< i
; n
++)
1850 return TREE_VALUE (t
);
1853 t
= DECL_ARGUMENTS (e
->callee
->decl
);
1854 for (n
= 0; n
< i
; n
++)
1861 return TREE_TYPE (t
);
1865 /* Compute jump function for all arguments of callsite CS and insert the
1866 information in the jump_functions array in the ipa_edge_args corresponding
1867 to this callsite. */
1870 ipa_compute_jump_functions_for_edge (struct func_body_info
*fbi
,
1871 struct cgraph_edge
*cs
)
1873 struct ipa_node_params
*info
= IPA_NODE_REF (cs
->caller
);
1874 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
1875 gimple call
= cs
->call_stmt
;
1876 int n
, arg_num
= gimple_call_num_args (call
);
1878 if (arg_num
== 0 || args
->jump_functions
)
1880 vec_safe_grow_cleared (args
->jump_functions
, arg_num
);
1882 if (gimple_call_internal_p (call
))
1884 if (ipa_func_spec_opts_forbid_analysis_p (cs
->caller
))
1887 for (n
= 0; n
< arg_num
; n
++)
1889 struct ipa_jump_func
*jfunc
= ipa_get_ith_jump_func (args
, n
);
1890 tree arg
= gimple_call_arg (call
, n
);
1891 tree param_type
= ipa_get_callee_param_type (cs
, n
);
1893 if (is_gimple_ip_invariant (arg
))
1894 ipa_set_jf_constant (jfunc
, arg
, cs
);
1895 else if (!is_gimple_reg_type (TREE_TYPE (arg
))
1896 && TREE_CODE (arg
) == PARM_DECL
)
1898 int index
= ipa_get_param_decl_index (info
, arg
);
1900 gcc_assert (index
>=0);
1901 /* Aggregate passed by value, check for pass-through, otherwise we
1902 will attempt to fill in aggregate contents later in this
1904 if (parm_preserved_before_stmt_p (fbi
, index
, call
, arg
))
1906 ipa_set_jf_simple_pass_through (jfunc
, index
, false, false);
1910 else if (TREE_CODE (arg
) == SSA_NAME
)
1912 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
1914 int index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (arg
));
1918 agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
, arg
);
1919 if (param_type
&& POINTER_TYPE_P (param_type
))
1920 type_p
= !detect_type_change_ssa (arg
, TREE_TYPE (param_type
),
1924 if (type_p
|| jfunc
->type
== IPA_JF_UNKNOWN
)
1925 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
,
1931 gimple stmt
= SSA_NAME_DEF_STMT (arg
);
1932 if (is_gimple_assign (stmt
))
1933 compute_complex_assign_jump_func (fbi
, info
, jfunc
,
1934 call
, stmt
, arg
, param_type
);
1935 else if (gimple_code (stmt
) == GIMPLE_PHI
)
1936 compute_complex_ancestor_jump_func (fbi
, info
, jfunc
,
1937 call
, stmt
, param_type
);
1941 compute_known_type_jump_func (arg
, jfunc
, call
,
1943 && POINTER_TYPE_P (param_type
)
1944 ? TREE_TYPE (param_type
)
1947 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1948 passed (because type conversions are ignored in gimple). Usually we can
1949 safely get type from function declaration, but in case of K&R prototypes or
1950 variadic functions we can try our luck with type of the pointer passed.
1951 TODO: Since we look for actual initialization of the memory object, we may better
1952 work out the type based on the memory stores we find. */
1954 param_type
= TREE_TYPE (arg
);
1956 if ((jfunc
->type
!= IPA_JF_PASS_THROUGH
1957 || !ipa_get_jf_pass_through_agg_preserved (jfunc
))
1958 && (jfunc
->type
!= IPA_JF_ANCESTOR
1959 || !ipa_get_jf_ancestor_agg_preserved (jfunc
))
1960 && (AGGREGATE_TYPE_P (TREE_TYPE (arg
))
1961 || POINTER_TYPE_P (param_type
)))
1962 determine_locally_known_aggregate_parts (call
, arg
, param_type
, jfunc
);
1966 /* Compute jump functions for all edges - both direct and indirect - outgoing
1970 ipa_compute_jump_functions_for_bb (struct func_body_info
*fbi
, basic_block bb
)
1972 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
1974 struct cgraph_edge
*cs
;
1976 FOR_EACH_VEC_ELT_REVERSE (bi
->cg_edges
, i
, cs
)
1978 struct cgraph_node
*callee
= cs
->callee
;
1982 cgraph_function_or_thunk_node (callee
, NULL
);
1983 /* We do not need to bother analyzing calls to unknown functions
1984 unless they may become known during lto/whopr. */
1985 if (!callee
->definition
&& !flag_lto
)
1988 ipa_compute_jump_functions_for_edge (fbi
, cs
);
1992 /* If STMT looks like a statement loading a value from a member pointer formal
1993 parameter, return that parameter and store the offset of the field to
1994 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1995 might be clobbered). If USE_DELTA, then we look for a use of the delta
1996 field rather than the pfn. */
1999 ipa_get_stmt_member_ptr_load_param (gimple stmt
, bool use_delta
,
2000 HOST_WIDE_INT
*offset_p
)
2002 tree rhs
, rec
, ref_field
, ref_offset
, fld
, ptr_field
, delta_field
;
2004 if (!gimple_assign_single_p (stmt
))
2007 rhs
= gimple_assign_rhs1 (stmt
);
2008 if (TREE_CODE (rhs
) == COMPONENT_REF
)
2010 ref_field
= TREE_OPERAND (rhs
, 1);
2011 rhs
= TREE_OPERAND (rhs
, 0);
2014 ref_field
= NULL_TREE
;
2015 if (TREE_CODE (rhs
) != MEM_REF
)
2017 rec
= TREE_OPERAND (rhs
, 0);
2018 if (TREE_CODE (rec
) != ADDR_EXPR
)
2020 rec
= TREE_OPERAND (rec
, 0);
2021 if (TREE_CODE (rec
) != PARM_DECL
2022 || !type_like_member_ptr_p (TREE_TYPE (rec
), &ptr_field
, &delta_field
))
2024 ref_offset
= TREE_OPERAND (rhs
, 1);
2031 *offset_p
= int_bit_position (fld
);
2035 if (integer_nonzerop (ref_offset
))
2037 return ref_field
== fld
? rec
: NULL_TREE
;
2040 return tree_int_cst_equal (byte_position (fld
), ref_offset
) ? rec
2044 /* Returns true iff T is an SSA_NAME defined by a statement. */
2047 ipa_is_ssa_with_stmt_def (tree t
)
2049 if (TREE_CODE (t
) == SSA_NAME
2050 && !SSA_NAME_IS_DEFAULT_DEF (t
))
2056 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2057 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2058 indirect call graph edge. */
2060 static struct cgraph_edge
*
2061 ipa_note_param_call (struct cgraph_node
*node
, int param_index
, gimple stmt
)
2063 struct cgraph_edge
*cs
;
2065 cs
= cgraph_edge (node
, stmt
);
2066 cs
->indirect_info
->param_index
= param_index
;
2067 cs
->indirect_info
->agg_contents
= 0;
2068 cs
->indirect_info
->member_ptr
= 0;
2072 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2073 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2074 intermediate information about each formal parameter. Currently it checks
2075 whether the call calls a pointer that is a formal parameter and if so, the
2076 parameter is marked with the called flag and an indirect call graph edge
2077 describing the call is created. This is very simple for ordinary pointers
2078 represented in SSA but not-so-nice when it comes to member pointers. The
2079 ugly part of this function does nothing more than trying to match the
2080 pattern of such a call. An example of such a pattern is the gimple dump
2081 below, the call is on the last line:
2084 f$__delta_5 = f.__delta;
2085 f$__pfn_24 = f.__pfn;
2089 f$__delta_5 = MEM[(struct *)&f];
2090 f$__pfn_24 = MEM[(struct *)&f + 4B];
2092 and a few lines below:
2095 D.2496_3 = (int) f$__pfn_24;
2096 D.2497_4 = D.2496_3 & 1;
2103 D.2500_7 = (unsigned int) f$__delta_5;
2104 D.2501_8 = &S + D.2500_7;
2105 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2106 D.2503_10 = *D.2502_9;
2107 D.2504_12 = f$__pfn_24 + -1;
2108 D.2505_13 = (unsigned int) D.2504_12;
2109 D.2506_14 = D.2503_10 + D.2505_13;
2110 D.2507_15 = *D.2506_14;
2111 iftmp.11_16 = (String:: *) D.2507_15;
2114 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2115 D.2500_19 = (unsigned int) f$__delta_5;
2116 D.2508_20 = &S + D.2500_19;
2117 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2119 Such patterns are results of simple calls to a member pointer:
2121 int doprinting (int (MyString::* f)(int) const)
2123 MyString S ("somestring");
2128 Moreover, the function also looks for called pointers loaded from aggregates
2129 passed by value or reference. */
2132 ipa_analyze_indirect_call_uses (struct func_body_info
*fbi
, gimple call
,
2135 struct ipa_node_params
*info
= fbi
->info
;
2136 HOST_WIDE_INT offset
;
2139 if (SSA_NAME_IS_DEFAULT_DEF (target
))
2141 tree var
= SSA_NAME_VAR (target
);
2142 int index
= ipa_get_param_decl_index (info
, var
);
2144 ipa_note_param_call (fbi
->node
, index
, call
);
2149 gimple def
= SSA_NAME_DEF_STMT (target
);
2150 if (gimple_assign_single_p (def
)
2151 && ipa_load_from_parm_agg_1 (fbi
, info
->descriptors
, def
,
2152 gimple_assign_rhs1 (def
), &index
, &offset
,
2155 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2156 if (cs
->indirect_info
->offset
!= offset
)
2157 cs
->indirect_info
->outer_type
= NULL
;
2158 cs
->indirect_info
->offset
= offset
;
2159 cs
->indirect_info
->agg_contents
= 1;
2160 cs
->indirect_info
->by_ref
= by_ref
;
2164 /* Now we need to try to match the complex pattern of calling a member
2166 if (gimple_code (def
) != GIMPLE_PHI
2167 || gimple_phi_num_args (def
) != 2
2168 || !POINTER_TYPE_P (TREE_TYPE (target
))
2169 || TREE_CODE (TREE_TYPE (TREE_TYPE (target
))) != METHOD_TYPE
)
2172 /* First, we need to check whether one of these is a load from a member
2173 pointer that is a parameter to this function. */
2174 tree n1
= PHI_ARG_DEF (def
, 0);
2175 tree n2
= PHI_ARG_DEF (def
, 1);
2176 if (!ipa_is_ssa_with_stmt_def (n1
) || !ipa_is_ssa_with_stmt_def (n2
))
2178 gimple d1
= SSA_NAME_DEF_STMT (n1
);
2179 gimple d2
= SSA_NAME_DEF_STMT (n2
);
2182 basic_block bb
, virt_bb
;
2183 basic_block join
= gimple_bb (def
);
2184 if ((rec
= ipa_get_stmt_member_ptr_load_param (d1
, false, &offset
)))
2186 if (ipa_get_stmt_member_ptr_load_param (d2
, false, NULL
))
2189 bb
= EDGE_PRED (join
, 0)->src
;
2190 virt_bb
= gimple_bb (d2
);
2192 else if ((rec
= ipa_get_stmt_member_ptr_load_param (d2
, false, &offset
)))
2194 bb
= EDGE_PRED (join
, 1)->src
;
2195 virt_bb
= gimple_bb (d1
);
2200 /* Second, we need to check that the basic blocks are laid out in the way
2201 corresponding to the pattern. */
2203 if (!single_pred_p (virt_bb
) || !single_succ_p (virt_bb
)
2204 || single_pred (virt_bb
) != bb
2205 || single_succ (virt_bb
) != join
)
2208 /* Third, let's see that the branching is done depending on the least
2209 significant bit of the pfn. */
2211 gimple branch
= last_stmt (bb
);
2212 if (!branch
|| gimple_code (branch
) != GIMPLE_COND
)
2215 if ((gimple_cond_code (branch
) != NE_EXPR
2216 && gimple_cond_code (branch
) != EQ_EXPR
)
2217 || !integer_zerop (gimple_cond_rhs (branch
)))
2220 tree cond
= gimple_cond_lhs (branch
);
2221 if (!ipa_is_ssa_with_stmt_def (cond
))
2224 def
= SSA_NAME_DEF_STMT (cond
);
2225 if (!is_gimple_assign (def
)
2226 || gimple_assign_rhs_code (def
) != BIT_AND_EXPR
2227 || !integer_onep (gimple_assign_rhs2 (def
)))
2230 cond
= gimple_assign_rhs1 (def
);
2231 if (!ipa_is_ssa_with_stmt_def (cond
))
2234 def
= SSA_NAME_DEF_STMT (cond
);
2236 if (is_gimple_assign (def
)
2237 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
2239 cond
= gimple_assign_rhs1 (def
);
2240 if (!ipa_is_ssa_with_stmt_def (cond
))
2242 def
= SSA_NAME_DEF_STMT (cond
);
2246 rec2
= ipa_get_stmt_member_ptr_load_param (def
,
2247 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2248 == ptrmemfunc_vbit_in_delta
),
2253 index
= ipa_get_param_decl_index (info
, rec
);
2255 && parm_preserved_before_stmt_p (fbi
, index
, call
, rec
))
2257 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2258 if (cs
->indirect_info
->offset
!= offset
)
2259 cs
->indirect_info
->outer_type
= NULL
;
2260 cs
->indirect_info
->offset
= offset
;
2261 cs
->indirect_info
->agg_contents
= 1;
2262 cs
->indirect_info
->member_ptr
= 1;
2268 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2269 object referenced in the expression is a formal parameter of the caller
2270 FBI->node (described by FBI->info), create a call note for the
2274 ipa_analyze_virtual_call_uses (struct func_body_info
*fbi
,
2275 gimple call
, tree target
)
2277 tree obj
= OBJ_TYPE_REF_OBJECT (target
);
2279 HOST_WIDE_INT anc_offset
;
2281 if (!flag_devirtualize
)
2284 if (TREE_CODE (obj
) != SSA_NAME
)
2287 struct ipa_node_params
*info
= fbi
->info
;
2288 if (SSA_NAME_IS_DEFAULT_DEF (obj
))
2290 struct ipa_jump_func jfunc
;
2291 if (TREE_CODE (SSA_NAME_VAR (obj
)) != PARM_DECL
)
2295 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (obj
));
2296 gcc_assert (index
>= 0);
2297 if (detect_type_change_ssa (obj
, obj_type_ref_class (target
),
2303 struct ipa_jump_func jfunc
;
2304 gimple stmt
= SSA_NAME_DEF_STMT (obj
);
2307 expr
= get_ancestor_addr_info (stmt
, &obj
, &anc_offset
);
2310 index
= ipa_get_param_decl_index (info
,
2311 SSA_NAME_VAR (TREE_OPERAND (expr
, 0)));
2312 gcc_assert (index
>= 0);
2313 if (detect_type_change (obj
, expr
, obj_type_ref_class (target
),
2314 call
, &jfunc
, anc_offset
))
2318 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2319 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
2320 ii
->offset
= anc_offset
;
2321 ii
->otr_token
= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target
));
2322 ii
->otr_type
= obj_type_ref_class (target
);
2323 ii
->polymorphic
= 1;
2326 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2327 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2328 containing intermediate information about each formal parameter. */
2331 ipa_analyze_call_uses (struct func_body_info
*fbi
, gimple call
)
2333 tree target
= gimple_call_fn (call
);
2336 || (TREE_CODE (target
) != SSA_NAME
2337 && !virtual_method_call_p (target
)))
2340 /* If we previously turned the call into a direct call, there is
2341 no need to analyze. */
2342 struct cgraph_edge
*cs
= cgraph_edge (fbi
->node
, call
);
2343 if (cs
&& !cs
->indirect_unknown_callee
)
2345 if (TREE_CODE (target
) == SSA_NAME
)
2346 ipa_analyze_indirect_call_uses (fbi
, call
, target
);
2347 else if (virtual_method_call_p (target
))
2348 ipa_analyze_virtual_call_uses (fbi
, call
, target
);
2352 /* Analyze the call statement STMT with respect to formal parameters (described
2353 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2354 formal parameters are called. */
2357 ipa_analyze_stmt_uses (struct func_body_info
*fbi
, gimple stmt
)
2359 if (is_gimple_call (stmt
))
2360 ipa_analyze_call_uses (fbi
, stmt
);
2363 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2364 If OP is a parameter declaration, mark it as used in the info structure
2368 visit_ref_for_mod_analysis (gimple
, tree op
, tree
, void *data
)
2370 struct ipa_node_params
*info
= (struct ipa_node_params
*) data
;
2372 op
= get_base_address (op
);
2374 && TREE_CODE (op
) == PARM_DECL
)
2376 int index
= ipa_get_param_decl_index (info
, op
);
2377 gcc_assert (index
>= 0);
2378 ipa_set_param_used (info
, index
, true);
2384 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2385 the findings in various structures of the associated ipa_node_params
2386 structure, such as parameter flags, notes etc. FBI holds various data about
2387 the function being analyzed. */
2390 ipa_analyze_params_uses_in_bb (struct func_body_info
*fbi
, basic_block bb
)
2392 gimple_stmt_iterator gsi
;
2393 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2395 gimple stmt
= gsi_stmt (gsi
);
2397 if (is_gimple_debug (stmt
))
2400 ipa_analyze_stmt_uses (fbi
, stmt
);
2401 walk_stmt_load_store_addr_ops (stmt
, fbi
->info
,
2402 visit_ref_for_mod_analysis
,
2403 visit_ref_for_mod_analysis
,
2404 visit_ref_for_mod_analysis
);
2406 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2407 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), fbi
->info
,
2408 visit_ref_for_mod_analysis
,
2409 visit_ref_for_mod_analysis
,
2410 visit_ref_for_mod_analysis
);
2413 /* Calculate controlled uses of parameters of NODE. */
2416 ipa_analyze_controlled_uses (struct cgraph_node
*node
)
2418 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
2420 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2422 tree parm
= ipa_get_param (info
, i
);
2423 int controlled_uses
= 0;
2425 /* For SSA regs see if parameter is used. For non-SSA we compute
2426 the flag during modification analysis. */
2427 if (is_gimple_reg (parm
))
2429 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
),
2431 if (ddef
&& !has_zero_uses (ddef
))
2433 imm_use_iterator imm_iter
;
2434 use_operand_p use_p
;
2436 ipa_set_param_used (info
, i
, true);
2437 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, ddef
)
2438 if (!is_gimple_call (USE_STMT (use_p
)))
2440 if (!is_gimple_debug (USE_STMT (use_p
)))
2442 controlled_uses
= IPA_UNDESCRIBED_USE
;
2450 controlled_uses
= 0;
2453 controlled_uses
= IPA_UNDESCRIBED_USE
;
2454 ipa_set_controlled_uses (info
, i
, controlled_uses
);
2458 /* Free stuff in BI. */
2461 free_ipa_bb_info (struct ipa_bb_info
*bi
)
2463 bi
->cg_edges
.release ();
2464 bi
->param_aa_statuses
.release ();
2467 /* Dominator walker driving the analysis. */
2469 class analysis_dom_walker
: public dom_walker
2472 analysis_dom_walker (struct func_body_info
*fbi
)
2473 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
) {}
2475 virtual void before_dom_children (basic_block
);
2478 struct func_body_info
*m_fbi
;
2482 analysis_dom_walker::before_dom_children (basic_block bb
)
2484 ipa_analyze_params_uses_in_bb (m_fbi
, bb
);
2485 ipa_compute_jump_functions_for_bb (m_fbi
, bb
);
2488 /* Initialize the array describing properties of of formal parameters
2489 of NODE, analyze their uses and compute jump functions associated
2490 with actual arguments of calls from within NODE. */
2493 ipa_analyze_node (struct cgraph_node
*node
)
2495 struct func_body_info fbi
;
2496 struct ipa_node_params
*info
;
2498 ipa_check_create_node_params ();
2499 ipa_check_create_edge_args ();
2500 info
= IPA_NODE_REF (node
);
2502 if (info
->analysis_done
)
2504 info
->analysis_done
= 1;
2506 if (ipa_func_spec_opts_forbid_analysis_p (node
))
2508 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2510 ipa_set_param_used (info
, i
, true);
2511 ipa_set_controlled_uses (info
, i
, IPA_UNDESCRIBED_USE
);
2516 struct function
*func
= DECL_STRUCT_FUNCTION (node
->decl
);
2518 calculate_dominance_info (CDI_DOMINATORS
);
2519 ipa_initialize_node_params (node
);
2520 ipa_analyze_controlled_uses (node
);
2523 fbi
.info
= IPA_NODE_REF (node
);
2524 fbi
.bb_infos
= vNULL
;
2525 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
));
2526 fbi
.param_count
= ipa_get_param_count (info
);
2529 for (struct cgraph_edge
*cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
2531 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
2532 bi
->cg_edges
.safe_push (cs
);
2535 for (struct cgraph_edge
*cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
2537 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
2538 bi
->cg_edges
.safe_push (cs
);
2541 analysis_dom_walker (&fbi
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
2544 struct ipa_bb_info
*bi
;
2545 FOR_EACH_VEC_ELT (fbi
.bb_infos
, i
, bi
)
2546 free_ipa_bb_info (bi
);
2547 fbi
.bb_infos
.release ();
2548 free_dominance_info (CDI_DOMINATORS
);
2552 /* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2553 attempt a type-based devirtualization. If successful, return the
2554 target function declaration, otherwise return NULL. */
2557 ipa_intraprocedural_devirtualization (gimple call
)
2559 tree binfo
, token
, fndecl
;
2560 struct ipa_jump_func jfunc
;
2561 tree otr
= gimple_call_fn (call
);
2563 jfunc
.type
= IPA_JF_UNKNOWN
;
2564 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr
), &jfunc
,
2565 call
, obj_type_ref_class (otr
));
2566 if (jfunc
.type
!= IPA_JF_KNOWN_TYPE
)
2568 binfo
= ipa_binfo_from_known_type_jfunc (&jfunc
);
2571 token
= OBJ_TYPE_REF_TOKEN (otr
);
2572 fndecl
= gimple_get_virt_method_for_binfo (tree_to_uhwi (token
),
2574 #ifdef ENABLE_CHECKING
2576 gcc_assert (possible_polymorphic_call_target_p
2577 (otr
, cgraph_get_node (fndecl
)));
2582 /* Update the jump function DST when the call graph edge corresponding to SRC is
2583 is being inlined, knowing that DST is of type ancestor and src of known
2587 combine_known_type_and_ancestor_jfs (struct ipa_jump_func
*src
,
2588 struct ipa_jump_func
*dst
)
2590 HOST_WIDE_INT combined_offset
;
2593 if (!ipa_get_jf_ancestor_type_preserved (dst
))
2595 dst
->type
= IPA_JF_UNKNOWN
;
2599 combined_offset
= ipa_get_jf_known_type_offset (src
)
2600 + ipa_get_jf_ancestor_offset (dst
);
2601 combined_type
= ipa_get_jf_ancestor_type (dst
);
2603 ipa_set_jf_known_type (dst
, combined_offset
,
2604 ipa_get_jf_known_type_base_type (src
),
2608 /* Update the jump functions associated with call graph edge E when the call
2609 graph edge CS is being inlined, assuming that E->caller is already (possibly
2610 indirectly) inlined into CS->callee and that E has not been inlined. */
2613 update_jump_functions_after_inlining (struct cgraph_edge
*cs
,
2614 struct cgraph_edge
*e
)
2616 struct ipa_edge_args
*top
= IPA_EDGE_REF (cs
);
2617 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
2618 int count
= ipa_get_cs_argument_count (args
);
2621 for (i
= 0; i
< count
; i
++)
2623 struct ipa_jump_func
*dst
= ipa_get_ith_jump_func (args
, i
);
2625 if (dst
->type
== IPA_JF_ANCESTOR
)
2627 struct ipa_jump_func
*src
;
2628 int dst_fid
= dst
->value
.ancestor
.formal_id
;
2630 /* Variable number of arguments can cause havoc if we try to access
2631 one that does not exist in the inlined edge. So make sure we
2633 if (dst_fid
>= ipa_get_cs_argument_count (top
))
2635 dst
->type
= IPA_JF_UNKNOWN
;
2639 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2642 && (dst
->value
.ancestor
.agg_preserved
|| !src
->agg
.by_ref
))
2644 struct ipa_agg_jf_item
*item
;
2647 /* Currently we do not produce clobber aggregate jump functions,
2648 replace with merging when we do. */
2649 gcc_assert (!dst
->agg
.items
);
2651 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2652 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2653 FOR_EACH_VEC_SAFE_ELT (dst
->agg
.items
, j
, item
)
2654 item
->offset
-= dst
->value
.ancestor
.offset
;
2657 if (src
->type
== IPA_JF_KNOWN_TYPE
)
2658 combine_known_type_and_ancestor_jfs (src
, dst
);
2659 else if (src
->type
== IPA_JF_PASS_THROUGH
2660 && src
->value
.pass_through
.operation
== NOP_EXPR
)
2662 dst
->value
.ancestor
.formal_id
= src
->value
.pass_through
.formal_id
;
2663 dst
->value
.ancestor
.agg_preserved
&=
2664 src
->value
.pass_through
.agg_preserved
;
2665 dst
->value
.ancestor
.type_preserved
&=
2666 src
->value
.pass_through
.type_preserved
;
2668 else if (src
->type
== IPA_JF_ANCESTOR
)
2670 dst
->value
.ancestor
.formal_id
= src
->value
.ancestor
.formal_id
;
2671 dst
->value
.ancestor
.offset
+= src
->value
.ancestor
.offset
;
2672 dst
->value
.ancestor
.agg_preserved
&=
2673 src
->value
.ancestor
.agg_preserved
;
2674 dst
->value
.ancestor
.type_preserved
&=
2675 src
->value
.ancestor
.type_preserved
;
2678 dst
->type
= IPA_JF_UNKNOWN
;
2680 else if (dst
->type
== IPA_JF_PASS_THROUGH
)
2682 struct ipa_jump_func
*src
;
2683 /* We must check range due to calls with variable number of arguments
2684 and we cannot combine jump functions with operations. */
2685 if (dst
->value
.pass_through
.operation
== NOP_EXPR
2686 && (dst
->value
.pass_through
.formal_id
2687 < ipa_get_cs_argument_count (top
)))
2689 int dst_fid
= dst
->value
.pass_through
.formal_id
;
2690 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2691 bool dst_agg_p
= ipa_get_jf_pass_through_agg_preserved (dst
);
2695 case IPA_JF_UNKNOWN
:
2696 dst
->type
= IPA_JF_UNKNOWN
;
2698 case IPA_JF_KNOWN_TYPE
:
2699 if (ipa_get_jf_pass_through_type_preserved (dst
))
2700 ipa_set_jf_known_type (dst
,
2701 ipa_get_jf_known_type_offset (src
),
2702 ipa_get_jf_known_type_base_type (src
),
2703 ipa_get_jf_known_type_component_type (src
));
2705 dst
->type
= IPA_JF_UNKNOWN
;
2708 ipa_set_jf_cst_copy (dst
, src
);
2711 case IPA_JF_PASS_THROUGH
:
2713 int formal_id
= ipa_get_jf_pass_through_formal_id (src
);
2714 enum tree_code operation
;
2715 operation
= ipa_get_jf_pass_through_operation (src
);
2717 if (operation
== NOP_EXPR
)
2721 && ipa_get_jf_pass_through_agg_preserved (src
);
2722 type_p
= ipa_get_jf_pass_through_type_preserved (src
)
2723 && ipa_get_jf_pass_through_type_preserved (dst
);
2724 ipa_set_jf_simple_pass_through (dst
, formal_id
,
2729 tree operand
= ipa_get_jf_pass_through_operand (src
);
2730 ipa_set_jf_arith_pass_through (dst
, formal_id
, operand
,
2735 case IPA_JF_ANCESTOR
:
2739 && ipa_get_jf_ancestor_agg_preserved (src
);
2740 type_p
= ipa_get_jf_ancestor_type_preserved (src
)
2741 && ipa_get_jf_pass_through_type_preserved (dst
);
2742 ipa_set_ancestor_jf (dst
,
2743 ipa_get_jf_ancestor_offset (src
),
2744 ipa_get_jf_ancestor_type (src
),
2745 ipa_get_jf_ancestor_formal_id (src
),
2754 && (dst_agg_p
|| !src
->agg
.by_ref
))
2756 /* Currently we do not produce clobber aggregate jump
2757 functions, replace with merging when we do. */
2758 gcc_assert (!dst
->agg
.items
);
2760 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2761 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2765 dst
->type
= IPA_JF_UNKNOWN
;
2770 /* If TARGET is an addr_expr of a function declaration, make it the destination
2771 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2773 struct cgraph_edge
*
2774 ipa_make_edge_direct_to_target (struct cgraph_edge
*ie
, tree target
)
2776 struct cgraph_node
*callee
;
2777 struct inline_edge_summary
*es
= inline_edge_summary (ie
);
2778 bool unreachable
= false;
2780 if (TREE_CODE (target
) == ADDR_EXPR
)
2781 target
= TREE_OPERAND (target
, 0);
2782 if (TREE_CODE (target
) != FUNCTION_DECL
)
2784 target
= canonicalize_constructor_val (target
, NULL
);
2785 if (!target
|| TREE_CODE (target
) != FUNCTION_DECL
)
2787 if (ie
->indirect_info
->member_ptr
)
2788 /* Member pointer call that goes through a VMT lookup. */
2791 if (dump_enabled_p ())
2793 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2794 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2795 "discovered direct call to non-function in %s/%i, "
2796 "making it __builtin_unreachable\n",
2797 ie
->caller
->name (), ie
->caller
->order
);
2800 target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2801 callee
= cgraph_get_create_node (target
);
2805 callee
= cgraph_get_node (target
);
2808 callee
= cgraph_get_node (target
);
2810 /* Because may-edges are not explicitely represented and vtable may be external,
2811 we may create the first reference to the object in the unit. */
2812 if (!callee
|| callee
->global
.inlined_to
)
2815 /* We are better to ensure we can refer to it.
2816 In the case of static functions we are out of luck, since we already
2817 removed its body. In the case of public functions we may or may
2818 not introduce the reference. */
2819 if (!canonicalize_constructor_val (target
, NULL
)
2820 || !TREE_PUBLIC (target
))
2823 fprintf (dump_file
, "ipa-prop: Discovered call to a known target "
2824 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2825 xstrdup (ie
->caller
->name ()),
2827 xstrdup (ie
->callee
->name ()),
2831 callee
= cgraph_get_create_node (target
);
2834 if (!dbg_cnt (devirt
))
2837 ipa_check_create_node_params ();
2839 /* We can not make edges to inline clones. It is bug that someone removed
2840 the cgraph node too early. */
2841 gcc_assert (!callee
->global
.inlined_to
);
2843 if (dump_file
&& !unreachable
)
2845 fprintf (dump_file
, "ipa-prop: Discovered %s call to a known target "
2846 "(%s/%i -> %s/%i), for stmt ",
2847 ie
->indirect_info
->polymorphic
? "a virtual" : "an indirect",
2848 xstrdup (ie
->caller
->name ()),
2850 xstrdup (callee
->name ()),
2853 print_gimple_stmt (dump_file
, ie
->call_stmt
, 2, TDF_SLIM
);
2855 fprintf (dump_file
, "with uid %i\n", ie
->lto_stmt_uid
);
2857 if (dump_enabled_p ())
2859 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2861 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2862 "converting indirect call in %s to direct call to %s\n",
2863 ie
->caller
->name (), callee
->name ());
2865 ie
= cgraph_make_edge_direct (ie
, callee
);
2866 es
= inline_edge_summary (ie
);
2867 es
->call_stmt_size
-= (eni_size_weights
.indirect_call_cost
2868 - eni_size_weights
.call_cost
);
2869 es
->call_stmt_time
-= (eni_time_weights
.indirect_call_cost
2870 - eni_time_weights
.call_cost
);
2875 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2876 return NULL if there is not any. BY_REF specifies whether the value has to
2877 be passed by reference or by value. */
2880 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function
*agg
,
2881 HOST_WIDE_INT offset
, bool by_ref
)
2883 struct ipa_agg_jf_item
*item
;
2886 if (by_ref
!= agg
->by_ref
)
2889 FOR_EACH_VEC_SAFE_ELT (agg
->items
, i
, item
)
2890 if (item
->offset
== offset
)
2892 /* Currently we do not have clobber values, return NULL for them once
2894 gcc_checking_assert (is_gimple_ip_invariant (item
->value
));
2900 /* Remove a reference to SYMBOL from the list of references of a node given by
2901 reference description RDESC. Return true if the reference has been
2902 successfully found and removed. */
2905 remove_described_reference (symtab_node
*symbol
, struct ipa_cst_ref_desc
*rdesc
)
2907 struct ipa_ref
*to_del
;
2908 struct cgraph_edge
*origin
;
2913 to_del
= origin
->caller
->find_reference (symbol
, origin
->call_stmt
,
2914 origin
->lto_stmt_uid
);
2918 to_del
->remove_reference ();
2920 fprintf (dump_file
, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2921 xstrdup (origin
->caller
->name ()),
2922 origin
->caller
->order
, xstrdup (symbol
->name ()));
2926 /* If JFUNC has a reference description with refcount different from
2927 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2928 NULL. JFUNC must be a constant jump function. */
2930 static struct ipa_cst_ref_desc
*
2931 jfunc_rdesc_usable (struct ipa_jump_func
*jfunc
)
2933 struct ipa_cst_ref_desc
*rdesc
= ipa_get_jf_constant_rdesc (jfunc
);
2934 if (rdesc
&& rdesc
->refcount
!= IPA_UNDESCRIBED_USE
)
2940 /* If the value of constant jump function JFUNC is an address of a function
2941 declaration, return the associated call graph node. Otherwise return
2944 static cgraph_node
*
2945 cgraph_node_for_jfunc (struct ipa_jump_func
*jfunc
)
2947 gcc_checking_assert (jfunc
->type
== IPA_JF_CONST
);
2948 tree cst
= ipa_get_jf_constant (jfunc
);
2949 if (TREE_CODE (cst
) != ADDR_EXPR
2950 || TREE_CODE (TREE_OPERAND (cst
, 0)) != FUNCTION_DECL
)
2953 return cgraph_get_node (TREE_OPERAND (cst
, 0));
2957 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2958 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2959 the edge specified in the rdesc. Return false if either the symbol or the
2960 reference could not be found, otherwise return true. */
2963 try_decrement_rdesc_refcount (struct ipa_jump_func
*jfunc
)
2965 struct ipa_cst_ref_desc
*rdesc
;
2966 if (jfunc
->type
== IPA_JF_CONST
2967 && (rdesc
= jfunc_rdesc_usable (jfunc
))
2968 && --rdesc
->refcount
== 0)
2970 symtab_node
*symbol
= cgraph_node_for_jfunc (jfunc
);
2974 return remove_described_reference (symbol
, rdesc
);
2979 /* Try to find a destination for indirect edge IE that corresponds to a simple
2980 call or a call of a member function pointer and where the destination is a
2981 pointer formal parameter described by jump function JFUNC. If it can be
2982 determined, return the newly direct edge, otherwise return NULL.
2983 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2985 static struct cgraph_edge
*
2986 try_make_edge_direct_simple_call (struct cgraph_edge
*ie
,
2987 struct ipa_jump_func
*jfunc
,
2988 struct ipa_node_params
*new_root_info
)
2990 struct cgraph_edge
*cs
;
2992 bool agg_contents
= ie
->indirect_info
->agg_contents
;
2994 if (ie
->indirect_info
->agg_contents
)
2995 target
= ipa_find_agg_cst_for_param (&jfunc
->agg
,
2996 ie
->indirect_info
->offset
,
2997 ie
->indirect_info
->by_ref
);
2999 target
= ipa_value_from_jfunc (new_root_info
, jfunc
);
3002 cs
= ipa_make_edge_direct_to_target (ie
, target
);
3004 if (cs
&& !agg_contents
)
3007 gcc_checking_assert (cs
->callee
3009 || jfunc
->type
!= IPA_JF_CONST
3010 || !cgraph_node_for_jfunc (jfunc
)
3011 || cs
->callee
== cgraph_node_for_jfunc (jfunc
)));
3012 ok
= try_decrement_rdesc_refcount (jfunc
);
3013 gcc_checking_assert (ok
);
3019 /* Return the target to be used in cases of impossible devirtualization. IE
3020 and target (the latter can be NULL) are dumped when dumping is enabled. */
3023 ipa_impossible_devirt_target (struct cgraph_edge
*ie
, tree target
)
3029 "Type inconsistent devirtualization: %s/%i->%s\n",
3030 ie
->caller
->name (), ie
->caller
->order
,
3031 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target
)));
3034 "No devirtualization target in %s/%i\n",
3035 ie
->caller
->name (), ie
->caller
->order
);
3037 tree new_target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
3038 cgraph_get_create_node (new_target
);
3042 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3043 call based on a formal parameter which is described by jump function JFUNC
3044 and if it can be determined, make it direct and return the direct edge.
3045 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
3048 static struct cgraph_edge
*
3049 try_make_edge_direct_virtual_call (struct cgraph_edge
*ie
,
3050 struct ipa_jump_func
*jfunc
,
3051 struct ipa_node_params
*new_root_info
)
3055 if (!flag_devirtualize
)
3058 /* First try to do lookup via known virtual table pointer value. */
3059 if (!ie
->indirect_info
->by_ref
)
3062 unsigned HOST_WIDE_INT offset
;
3063 tree t
= ipa_find_agg_cst_for_param (&jfunc
->agg
,
3064 ie
->indirect_info
->offset
,
3066 if (t
&& vtable_pointer_value_to_vtable (t
, &vtable
, &offset
))
3068 target
= gimple_get_virt_method_for_vtable (ie
->indirect_info
->otr_token
,
3072 if ((TREE_CODE (TREE_TYPE (target
)) == FUNCTION_TYPE
3073 && DECL_FUNCTION_CODE (target
) == BUILT_IN_UNREACHABLE
)
3074 || !possible_polymorphic_call_target_p
3075 (ie
, cgraph_get_node (target
)))
3076 target
= ipa_impossible_devirt_target (ie
, target
);
3077 return ipa_make_edge_direct_to_target (ie
, target
);
3082 binfo
= ipa_value_from_jfunc (new_root_info
, jfunc
);
3087 if (TREE_CODE (binfo
) != TREE_BINFO
)
3089 ipa_polymorphic_call_context context
;
3090 vec
<cgraph_node
*>targets
;
3093 if (!get_polymorphic_call_info_from_invariant
3094 (&context
, binfo
, ie
->indirect_info
->otr_type
,
3095 ie
->indirect_info
->offset
))
3097 targets
= possible_polymorphic_call_targets
3098 (ie
->indirect_info
->otr_type
,
3099 ie
->indirect_info
->otr_token
,
3101 if (!final
|| targets
.length () > 1)
3103 if (targets
.length () == 1)
3104 target
= targets
[0]->decl
;
3106 target
= ipa_impossible_devirt_target (ie
, NULL_TREE
);
3110 binfo
= get_binfo_at_offset (binfo
, ie
->indirect_info
->offset
,
3111 ie
->indirect_info
->otr_type
);
3113 target
= gimple_get_virt_method_for_binfo (ie
->indirect_info
->otr_token
,
3121 if (!possible_polymorphic_call_target_p (ie
, cgraph_get_node (target
)))
3122 target
= ipa_impossible_devirt_target (ie
, target
);
3123 return ipa_make_edge_direct_to_target (ie
, target
);
3129 /* Update the param called notes associated with NODE when CS is being inlined,
3130 assuming NODE is (potentially indirectly) inlined into CS->callee.
3131 Moreover, if the callee is discovered to be constant, create a new cgraph
3132 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3133 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3136 update_indirect_edges_after_inlining (struct cgraph_edge
*cs
,
3137 struct cgraph_node
*node
,
3138 vec
<cgraph_edge_p
> *new_edges
)
3140 struct ipa_edge_args
*top
;
3141 struct cgraph_edge
*ie
, *next_ie
, *new_direct_edge
;
3142 struct ipa_node_params
*new_root_info
;
3145 ipa_check_create_edge_args ();
3146 top
= IPA_EDGE_REF (cs
);
3147 new_root_info
= IPA_NODE_REF (cs
->caller
->global
.inlined_to
3148 ? cs
->caller
->global
.inlined_to
3151 for (ie
= node
->indirect_calls
; ie
; ie
= next_ie
)
3153 struct cgraph_indirect_call_info
*ici
= ie
->indirect_info
;
3154 struct ipa_jump_func
*jfunc
;
3157 next_ie
= ie
->next_callee
;
3159 if (ici
->param_index
== -1)
3162 /* We must check range due to calls with variable number of arguments: */
3163 if (ici
->param_index
>= ipa_get_cs_argument_count (top
))
3165 ici
->param_index
= -1;
3169 param_index
= ici
->param_index
;
3170 jfunc
= ipa_get_ith_jump_func (top
, param_index
);
3172 if (!flag_indirect_inlining
)
3173 new_direct_edge
= NULL
;
3174 else if (ici
->polymorphic
)
3175 new_direct_edge
= try_make_edge_direct_virtual_call (ie
, jfunc
,
3178 new_direct_edge
= try_make_edge_direct_simple_call (ie
, jfunc
,
3180 /* If speculation was removed, then we need to do nothing. */
3181 if (new_direct_edge
&& new_direct_edge
!= ie
)
3183 new_direct_edge
->indirect_inlining_edge
= 1;
3184 top
= IPA_EDGE_REF (cs
);
3187 else if (new_direct_edge
)
3189 new_direct_edge
->indirect_inlining_edge
= 1;
3190 if (new_direct_edge
->call_stmt
)
3191 new_direct_edge
->call_stmt_cannot_inline_p
3192 = !gimple_check_call_matching_types (
3193 new_direct_edge
->call_stmt
,
3194 new_direct_edge
->callee
->decl
, false);
3197 new_edges
->safe_push (new_direct_edge
);
3200 top
= IPA_EDGE_REF (cs
);
3202 else if (jfunc
->type
== IPA_JF_PASS_THROUGH
3203 && ipa_get_jf_pass_through_operation (jfunc
) == NOP_EXPR
)
3205 if ((ici
->agg_contents
3206 && !ipa_get_jf_pass_through_agg_preserved (jfunc
))
3207 || (ici
->polymorphic
3208 && !ipa_get_jf_pass_through_type_preserved (jfunc
)))
3209 ici
->param_index
= -1;
3211 ici
->param_index
= ipa_get_jf_pass_through_formal_id (jfunc
);
3213 else if (jfunc
->type
== IPA_JF_ANCESTOR
)
3215 if ((ici
->agg_contents
3216 && !ipa_get_jf_ancestor_agg_preserved (jfunc
))
3217 || (ici
->polymorphic
3218 && !ipa_get_jf_ancestor_type_preserved (jfunc
)))
3219 ici
->param_index
= -1;
3222 ici
->param_index
= ipa_get_jf_ancestor_formal_id (jfunc
);
3223 if (ipa_get_jf_ancestor_offset (jfunc
))
3224 ici
->outer_type
= NULL
;
3225 ici
->offset
+= ipa_get_jf_ancestor_offset (jfunc
);
3229 /* Either we can find a destination for this edge now or never. */
3230 ici
->param_index
= -1;
3236 /* Recursively traverse subtree of NODE (including node) made of inlined
3237 cgraph_edges when CS has been inlined and invoke
3238 update_indirect_edges_after_inlining on all nodes and
3239 update_jump_functions_after_inlining on all non-inlined edges that lead out
3240 of this subtree. Newly discovered indirect edges will be added to
3241 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3245 propagate_info_to_inlined_callees (struct cgraph_edge
*cs
,
3246 struct cgraph_node
*node
,
3247 vec
<cgraph_edge_p
> *new_edges
)
3249 struct cgraph_edge
*e
;
3252 res
= update_indirect_edges_after_inlining (cs
, node
, new_edges
);
3254 for (e
= node
->callees
; e
; e
= e
->next_callee
)
3255 if (!e
->inline_failed
)
3256 res
|= propagate_info_to_inlined_callees (cs
, e
->callee
, new_edges
);
3258 update_jump_functions_after_inlining (cs
, e
);
3259 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
3260 update_jump_functions_after_inlining (cs
, e
);
3265 /* Combine two controlled uses counts as done during inlining. */
3268 combine_controlled_uses_counters (int c
, int d
)
3270 if (c
== IPA_UNDESCRIBED_USE
|| d
== IPA_UNDESCRIBED_USE
)
3271 return IPA_UNDESCRIBED_USE
;
3276 /* Propagate number of controlled users from CS->caleee to the new root of the
3277 tree of inlined nodes. */
3280 propagate_controlled_uses (struct cgraph_edge
*cs
)
3282 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
3283 struct cgraph_node
*new_root
= cs
->caller
->global
.inlined_to
3284 ? cs
->caller
->global
.inlined_to
: cs
->caller
;
3285 struct ipa_node_params
*new_root_info
= IPA_NODE_REF (new_root
);
3286 struct ipa_node_params
*old_root_info
= IPA_NODE_REF (cs
->callee
);
3289 count
= MIN (ipa_get_cs_argument_count (args
),
3290 ipa_get_param_count (old_root_info
));
3291 for (i
= 0; i
< count
; i
++)
3293 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
3294 struct ipa_cst_ref_desc
*rdesc
;
3296 if (jf
->type
== IPA_JF_PASS_THROUGH
)
3299 src_idx
= ipa_get_jf_pass_through_formal_id (jf
);
3300 c
= ipa_get_controlled_uses (new_root_info
, src_idx
);
3301 d
= ipa_get_controlled_uses (old_root_info
, i
);
3303 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf
)
3304 == NOP_EXPR
|| c
== IPA_UNDESCRIBED_USE
);
3305 c
= combine_controlled_uses_counters (c
, d
);
3306 ipa_set_controlled_uses (new_root_info
, src_idx
, c
);
3307 if (c
== 0 && new_root_info
->ipcp_orig_node
)
3309 struct cgraph_node
*n
;
3310 struct ipa_ref
*ref
;
3311 tree t
= new_root_info
->known_vals
[src_idx
];
3313 if (t
&& TREE_CODE (t
) == ADDR_EXPR
3314 && TREE_CODE (TREE_OPERAND (t
, 0)) == FUNCTION_DECL
3315 && (n
= cgraph_get_node (TREE_OPERAND (t
, 0)))
3316 && (ref
= new_root
->find_reference (n
, NULL
, 0)))
3319 fprintf (dump_file
, "ipa-prop: Removing cloning-created "
3320 "reference from %s/%i to %s/%i.\n",
3321 xstrdup (new_root
->name ()),
3323 xstrdup (n
->name ()), n
->order
);
3324 ref
->remove_reference ();
3328 else if (jf
->type
== IPA_JF_CONST
3329 && (rdesc
= jfunc_rdesc_usable (jf
)))
3331 int d
= ipa_get_controlled_uses (old_root_info
, i
);
3332 int c
= rdesc
->refcount
;
3333 rdesc
->refcount
= combine_controlled_uses_counters (c
, d
);
3334 if (rdesc
->refcount
== 0)
3336 tree cst
= ipa_get_jf_constant (jf
);
3337 struct cgraph_node
*n
;
3338 gcc_checking_assert (TREE_CODE (cst
) == ADDR_EXPR
3339 && TREE_CODE (TREE_OPERAND (cst
, 0))
3341 n
= cgraph_get_node (TREE_OPERAND (cst
, 0));
3344 struct cgraph_node
*clone
;
3346 ok
= remove_described_reference (n
, rdesc
);
3347 gcc_checking_assert (ok
);
3350 while (clone
->global
.inlined_to
3351 && clone
!= rdesc
->cs
->caller
3352 && IPA_NODE_REF (clone
)->ipcp_orig_node
)
3354 struct ipa_ref
*ref
;
3355 ref
= clone
->find_reference (n
, NULL
, 0);
3359 fprintf (dump_file
, "ipa-prop: Removing "
3360 "cloning-created reference "
3361 "from %s/%i to %s/%i.\n",
3362 xstrdup (clone
->name ()),
3364 xstrdup (n
->name ()),
3366 ref
->remove_reference ();
3368 clone
= clone
->callers
->caller
;
3375 for (i
= ipa_get_param_count (old_root_info
);
3376 i
< ipa_get_cs_argument_count (args
);
3379 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
3381 if (jf
->type
== IPA_JF_CONST
)
3383 struct ipa_cst_ref_desc
*rdesc
= jfunc_rdesc_usable (jf
);
3385 rdesc
->refcount
= IPA_UNDESCRIBED_USE
;
3387 else if (jf
->type
== IPA_JF_PASS_THROUGH
)
3388 ipa_set_controlled_uses (new_root_info
,
3389 jf
->value
.pass_through
.formal_id
,
3390 IPA_UNDESCRIBED_USE
);
3394 /* Update jump functions and call note functions on inlining the call site CS.
3395 CS is expected to lead to a node already cloned by
3396 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3397 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3401 ipa_propagate_indirect_call_infos (struct cgraph_edge
*cs
,
3402 vec
<cgraph_edge_p
> *new_edges
)
3405 /* Do nothing if the preparation phase has not been carried out yet
3406 (i.e. during early inlining). */
3407 if (!ipa_node_params_vector
.exists ())
3409 gcc_assert (ipa_edge_args_vector
);
3411 propagate_controlled_uses (cs
);
3412 changed
= propagate_info_to_inlined_callees (cs
, cs
->callee
, new_edges
);
3417 /* Frees all dynamically allocated structures that the argument info points
3421 ipa_free_edge_args_substructures (struct ipa_edge_args
*args
)
3423 vec_free (args
->jump_functions
);
3424 memset (args
, 0, sizeof (*args
));
3427 /* Free all ipa_edge structures. */
3430 ipa_free_all_edge_args (void)
3433 struct ipa_edge_args
*args
;
3435 if (!ipa_edge_args_vector
)
3438 FOR_EACH_VEC_ELT (*ipa_edge_args_vector
, i
, args
)
3439 ipa_free_edge_args_substructures (args
);
3441 vec_free (ipa_edge_args_vector
);
3444 /* Frees all dynamically allocated structures that the param info points
3448 ipa_free_node_params_substructures (struct ipa_node_params
*info
)
3450 info
->descriptors
.release ();
3451 free (info
->lattices
);
3452 /* Lattice values and their sources are deallocated with their alocation
3454 info
->known_vals
.release ();
3455 memset (info
, 0, sizeof (*info
));
3458 /* Free all ipa_node_params structures. */
3461 ipa_free_all_node_params (void)
3464 struct ipa_node_params
*info
;
3466 FOR_EACH_VEC_ELT (ipa_node_params_vector
, i
, info
)
3467 ipa_free_node_params_substructures (info
);
3469 ipa_node_params_vector
.release ();
3472 /* Set the aggregate replacements of NODE to be AGGVALS. */
3475 ipa_set_node_agg_value_chain (struct cgraph_node
*node
,
3476 struct ipa_agg_replacement_value
*aggvals
)
3478 if (vec_safe_length (ipa_node_agg_replacements
) <= (unsigned) cgraph_max_uid
)
3479 vec_safe_grow_cleared (ipa_node_agg_replacements
, cgraph_max_uid
+ 1);
3481 (*ipa_node_agg_replacements
)[node
->uid
] = aggvals
;
3484 /* Hook that is called by cgraph.c when an edge is removed. */
3487 ipa_edge_removal_hook (struct cgraph_edge
*cs
, void *data ATTRIBUTE_UNUSED
)
3489 struct ipa_edge_args
*args
;
3491 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3492 if (vec_safe_length (ipa_edge_args_vector
) <= (unsigned)cs
->uid
)
3495 args
= IPA_EDGE_REF (cs
);
3496 if (args
->jump_functions
)
3498 struct ipa_jump_func
*jf
;
3500 FOR_EACH_VEC_ELT (*args
->jump_functions
, i
, jf
)
3502 struct ipa_cst_ref_desc
*rdesc
;
3503 try_decrement_rdesc_refcount (jf
);
3504 if (jf
->type
== IPA_JF_CONST
3505 && (rdesc
= ipa_get_jf_constant_rdesc (jf
))
3511 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs
));
3514 /* Hook that is called by cgraph.c when a node is removed. */
3517 ipa_node_removal_hook (struct cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
3519 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3520 if (ipa_node_params_vector
.length () > (unsigned)node
->uid
)
3521 ipa_free_node_params_substructures (IPA_NODE_REF (node
));
3522 if (vec_safe_length (ipa_node_agg_replacements
) > (unsigned)node
->uid
)
3523 (*ipa_node_agg_replacements
)[(unsigned)node
->uid
] = NULL
;
3526 /* Hook that is called by cgraph.c when an edge is duplicated. */
3529 ipa_edge_duplication_hook (struct cgraph_edge
*src
, struct cgraph_edge
*dst
,
3530 __attribute__((unused
)) void *data
)
3532 struct ipa_edge_args
*old_args
, *new_args
;
3535 ipa_check_create_edge_args ();
3537 old_args
= IPA_EDGE_REF (src
);
3538 new_args
= IPA_EDGE_REF (dst
);
3540 new_args
->jump_functions
= vec_safe_copy (old_args
->jump_functions
);
3542 for (i
= 0; i
< vec_safe_length (old_args
->jump_functions
); i
++)
3544 struct ipa_jump_func
*src_jf
= ipa_get_ith_jump_func (old_args
, i
);
3545 struct ipa_jump_func
*dst_jf
= ipa_get_ith_jump_func (new_args
, i
);
3547 dst_jf
->agg
.items
= vec_safe_copy (dst_jf
->agg
.items
);
3549 if (src_jf
->type
== IPA_JF_CONST
)
3551 struct ipa_cst_ref_desc
*src_rdesc
= jfunc_rdesc_usable (src_jf
);
3554 dst_jf
->value
.constant
.rdesc
= NULL
;
3555 else if (src
->caller
== dst
->caller
)
3557 struct ipa_ref
*ref
;
3558 symtab_node
*n
= cgraph_node_for_jfunc (src_jf
);
3559 gcc_checking_assert (n
);
3560 ref
= src
->caller
->find_reference (n
, src
->call_stmt
,
3562 gcc_checking_assert (ref
);
3563 dst
->caller
->clone_reference (ref
, ref
->stmt
);
3565 gcc_checking_assert (ipa_refdesc_pool
);
3566 struct ipa_cst_ref_desc
*dst_rdesc
3567 = (struct ipa_cst_ref_desc
*) pool_alloc (ipa_refdesc_pool
);
3568 dst_rdesc
->cs
= dst
;
3569 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3570 dst_rdesc
->next_duplicate
= NULL
;
3571 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3573 else if (src_rdesc
->cs
== src
)
3575 struct ipa_cst_ref_desc
*dst_rdesc
;
3576 gcc_checking_assert (ipa_refdesc_pool
);
3578 = (struct ipa_cst_ref_desc
*) pool_alloc (ipa_refdesc_pool
);
3579 dst_rdesc
->cs
= dst
;
3580 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3581 dst_rdesc
->next_duplicate
= src_rdesc
->next_duplicate
;
3582 src_rdesc
->next_duplicate
= dst_rdesc
;
3583 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3587 struct ipa_cst_ref_desc
*dst_rdesc
;
3588 /* This can happen during inlining, when a JFUNC can refer to a
3589 reference taken in a function up in the tree of inline clones.
3590 We need to find the duplicate that refers to our tree of
3593 gcc_assert (dst
->caller
->global
.inlined_to
);
3594 for (dst_rdesc
= src_rdesc
->next_duplicate
;
3596 dst_rdesc
= dst_rdesc
->next_duplicate
)
3598 struct cgraph_node
*top
;
3599 top
= dst_rdesc
->cs
->caller
->global
.inlined_to
3600 ? dst_rdesc
->cs
->caller
->global
.inlined_to
3601 : dst_rdesc
->cs
->caller
;
3602 if (dst
->caller
->global
.inlined_to
== top
)
3605 gcc_assert (dst_rdesc
);
3606 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3612 /* Hook that is called by cgraph.c when a node is duplicated. */
3615 ipa_node_duplication_hook (struct cgraph_node
*src
, struct cgraph_node
*dst
,
3616 ATTRIBUTE_UNUSED
void *data
)
3618 struct ipa_node_params
*old_info
, *new_info
;
3619 struct ipa_agg_replacement_value
*old_av
, *new_av
;
3621 ipa_check_create_node_params ();
3622 old_info
= IPA_NODE_REF (src
);
3623 new_info
= IPA_NODE_REF (dst
);
3625 new_info
->descriptors
= old_info
->descriptors
.copy ();
3626 new_info
->lattices
= NULL
;
3627 new_info
->ipcp_orig_node
= old_info
->ipcp_orig_node
;
3629 new_info
->analysis_done
= old_info
->analysis_done
;
3630 new_info
->node_enqueued
= old_info
->node_enqueued
;
3632 old_av
= ipa_get_agg_replacements_for_node (src
);
3639 struct ipa_agg_replacement_value
*v
;
3641 v
= ggc_alloc
<ipa_agg_replacement_value
> ();
3642 memcpy (v
, old_av
, sizeof (*v
));
3645 old_av
= old_av
->next
;
3647 ipa_set_node_agg_value_chain (dst
, new_av
);
3651 /* Analyze newly added function into callgraph. */
3654 ipa_add_new_function (struct cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
3656 if (cgraph_function_with_gimple_body_p (node
))
3657 ipa_analyze_node (node
);
3660 /* Register our cgraph hooks if they are not already there. */
3663 ipa_register_cgraph_hooks (void)
3665 if (!edge_removal_hook_holder
)
3666 edge_removal_hook_holder
=
3667 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook
, NULL
);
3668 if (!node_removal_hook_holder
)
3669 node_removal_hook_holder
=
3670 cgraph_add_node_removal_hook (&ipa_node_removal_hook
, NULL
);
3671 if (!edge_duplication_hook_holder
)
3672 edge_duplication_hook_holder
=
3673 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook
, NULL
);
3674 if (!node_duplication_hook_holder
)
3675 node_duplication_hook_holder
=
3676 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook
, NULL
);
3677 function_insertion_hook_holder
=
3678 cgraph_add_function_insertion_hook (&ipa_add_new_function
, NULL
);
3681 /* Unregister our cgraph hooks if they are not already there. */
3684 ipa_unregister_cgraph_hooks (void)
3686 cgraph_remove_edge_removal_hook (edge_removal_hook_holder
);
3687 edge_removal_hook_holder
= NULL
;
3688 cgraph_remove_node_removal_hook (node_removal_hook_holder
);
3689 node_removal_hook_holder
= NULL
;
3690 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder
);
3691 edge_duplication_hook_holder
= NULL
;
3692 cgraph_remove_node_duplication_hook (node_duplication_hook_holder
);
3693 node_duplication_hook_holder
= NULL
;
3694 cgraph_remove_function_insertion_hook (function_insertion_hook_holder
);
3695 function_insertion_hook_holder
= NULL
;
3698 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3699 longer needed after ipa-cp. */
3702 ipa_free_all_structures_after_ipa_cp (void)
3706 ipa_free_all_edge_args ();
3707 ipa_free_all_node_params ();
3708 free_alloc_pool (ipcp_sources_pool
);
3709 free_alloc_pool (ipcp_values_pool
);
3710 free_alloc_pool (ipcp_agg_lattice_pool
);
3711 ipa_unregister_cgraph_hooks ();
3712 if (ipa_refdesc_pool
)
3713 free_alloc_pool (ipa_refdesc_pool
);
3717 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3718 longer needed after indirect inlining. */
3721 ipa_free_all_structures_after_iinln (void)
3723 ipa_free_all_edge_args ();
3724 ipa_free_all_node_params ();
3725 ipa_unregister_cgraph_hooks ();
3726 if (ipcp_sources_pool
)
3727 free_alloc_pool (ipcp_sources_pool
);
3728 if (ipcp_values_pool
)
3729 free_alloc_pool (ipcp_values_pool
);
3730 if (ipcp_agg_lattice_pool
)
3731 free_alloc_pool (ipcp_agg_lattice_pool
);
3732 if (ipa_refdesc_pool
)
3733 free_alloc_pool (ipa_refdesc_pool
);
3736 /* Print ipa_tree_map data structures of all functions in the
3740 ipa_print_node_params (FILE *f
, struct cgraph_node
*node
)
3743 struct ipa_node_params
*info
;
3745 if (!node
->definition
)
3747 info
= IPA_NODE_REF (node
);
3748 fprintf (f
, " function %s/%i parameter descriptors:\n",
3749 node
->name (), node
->order
);
3750 count
= ipa_get_param_count (info
);
3751 for (i
= 0; i
< count
; i
++)
3756 ipa_dump_param (f
, info
, i
);
3757 if (ipa_is_param_used (info
, i
))
3758 fprintf (f
, " used");
3759 c
= ipa_get_controlled_uses (info
, i
);
3760 if (c
== IPA_UNDESCRIBED_USE
)
3761 fprintf (f
, " undescribed_use");
3763 fprintf (f
, " controlled_uses=%i", c
);
3768 /* Print ipa_tree_map data structures of all functions in the
3772 ipa_print_all_params (FILE * f
)
3774 struct cgraph_node
*node
;
3776 fprintf (f
, "\nFunction parameters:\n");
3777 FOR_EACH_FUNCTION (node
)
3778 ipa_print_node_params (f
, node
);
3781 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3784 ipa_get_vector_of_formal_parms (tree fndecl
)
3790 gcc_assert (!flag_wpa
);
3791 count
= count_formal_params (fndecl
);
3792 args
.create (count
);
3793 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
3794 args
.quick_push (parm
);
3799 /* Return a heap allocated vector containing types of formal parameters of
3800 function type FNTYPE. */
3803 ipa_get_vector_of_formal_parm_types (tree fntype
)
3809 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
3812 types
.create (count
);
3813 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
3814 types
.quick_push (TREE_VALUE (t
));
3819 /* Modify the function declaration FNDECL and its type according to the plan in
3820 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3821 to reflect the actual parameters being modified which are determined by the
3822 base_index field. */
3825 ipa_modify_formal_parameters (tree fndecl
, ipa_parm_adjustment_vec adjustments
)
3827 vec
<tree
> oparms
= ipa_get_vector_of_formal_parms (fndecl
);
3828 tree orig_type
= TREE_TYPE (fndecl
);
3829 tree old_arg_types
= TYPE_ARG_TYPES (orig_type
);
3831 /* The following test is an ugly hack, some functions simply don't have any
3832 arguments in their type. This is probably a bug but well... */
3833 bool care_for_types
= (old_arg_types
!= NULL_TREE
);
3834 bool last_parm_void
;
3838 last_parm_void
= (TREE_VALUE (tree_last (old_arg_types
))
3840 otypes
= ipa_get_vector_of_formal_parm_types (orig_type
);
3842 gcc_assert (oparms
.length () + 1 == otypes
.length ());
3844 gcc_assert (oparms
.length () == otypes
.length ());
3848 last_parm_void
= false;
3852 int len
= adjustments
.length ();
3853 tree
*link
= &DECL_ARGUMENTS (fndecl
);
3854 tree new_arg_types
= NULL
;
3855 for (int i
= 0; i
< len
; i
++)
3857 struct ipa_parm_adjustment
*adj
;
3860 adj
= &adjustments
[i
];
3862 if (adj
->op
== IPA_PARM_OP_NEW
)
3865 parm
= oparms
[adj
->base_index
];
3868 if (adj
->op
== IPA_PARM_OP_COPY
)
3871 new_arg_types
= tree_cons (NULL_TREE
, otypes
[adj
->base_index
],
3874 link
= &DECL_CHAIN (parm
);
3876 else if (adj
->op
!= IPA_PARM_OP_REMOVE
)
3882 ptype
= build_pointer_type (adj
->type
);
3886 if (is_gimple_reg_type (ptype
))
3888 unsigned malign
= GET_MODE_ALIGNMENT (TYPE_MODE (ptype
));
3889 if (TYPE_ALIGN (ptype
) < malign
)
3890 ptype
= build_aligned_type (ptype
, malign
);
3895 new_arg_types
= tree_cons (NULL_TREE
, ptype
, new_arg_types
);
3897 new_parm
= build_decl (UNKNOWN_LOCATION
, PARM_DECL
, NULL_TREE
,
3899 const char *prefix
= adj
->arg_prefix
? adj
->arg_prefix
: "SYNTH";
3900 DECL_NAME (new_parm
) = create_tmp_var_name (prefix
);
3901 DECL_ARTIFICIAL (new_parm
) = 1;
3902 DECL_ARG_TYPE (new_parm
) = ptype
;
3903 DECL_CONTEXT (new_parm
) = fndecl
;
3904 TREE_USED (new_parm
) = 1;
3905 DECL_IGNORED_P (new_parm
) = 1;
3906 layout_decl (new_parm
, 0);
3908 if (adj
->op
== IPA_PARM_OP_NEW
)
3912 adj
->new_decl
= new_parm
;
3915 link
= &DECL_CHAIN (new_parm
);
3921 tree new_reversed
= NULL
;
3924 new_reversed
= nreverse (new_arg_types
);
3928 TREE_CHAIN (new_arg_types
) = void_list_node
;
3930 new_reversed
= void_list_node
;
3934 /* Use copy_node to preserve as much as possible from original type
3935 (debug info, attribute lists etc.)
3936 Exception is METHOD_TYPEs must have THIS argument.
3937 When we are asked to remove it, we need to build new FUNCTION_TYPE
3939 tree new_type
= NULL
;
3940 if (TREE_CODE (orig_type
) != METHOD_TYPE
3941 || (adjustments
[0].op
== IPA_PARM_OP_COPY
3942 && adjustments
[0].base_index
== 0))
3944 new_type
= build_distinct_type_copy (orig_type
);
3945 TYPE_ARG_TYPES (new_type
) = new_reversed
;
3950 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type
),
3952 TYPE_CONTEXT (new_type
) = TYPE_CONTEXT (orig_type
);
3953 DECL_VINDEX (fndecl
) = NULL_TREE
;
3956 /* When signature changes, we need to clear builtin info. */
3957 if (DECL_BUILT_IN (fndecl
))
3959 DECL_BUILT_IN_CLASS (fndecl
) = NOT_BUILT_IN
;
3960 DECL_FUNCTION_CODE (fndecl
) = (enum built_in_function
) 0;
3963 /* This is a new type, not a copy of an old type. Need to reassociate
3964 variants. We can handle everything except the main variant lazily. */
3965 tree t
= TYPE_MAIN_VARIANT (orig_type
);
3968 TYPE_MAIN_VARIANT (new_type
) = t
;
3969 TYPE_NEXT_VARIANT (new_type
) = TYPE_NEXT_VARIANT (t
);
3970 TYPE_NEXT_VARIANT (t
) = new_type
;
3974 TYPE_MAIN_VARIANT (new_type
) = new_type
;
3975 TYPE_NEXT_VARIANT (new_type
) = NULL
;
3978 TREE_TYPE (fndecl
) = new_type
;
3979 DECL_VIRTUAL_P (fndecl
) = 0;
3980 DECL_LANG_SPECIFIC (fndecl
) = NULL
;
3985 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3986 If this is a directly recursive call, CS must be NULL. Otherwise it must
3987 contain the corresponding call graph edge. */
3990 ipa_modify_call_arguments (struct cgraph_edge
*cs
, gimple stmt
,
3991 ipa_parm_adjustment_vec adjustments
)
3993 struct cgraph_node
*current_node
= cgraph_get_node (current_function_decl
);
3995 vec
<tree
, va_gc
> **debug_args
= NULL
;
3997 gimple_stmt_iterator gsi
, prev_gsi
;
4001 len
= adjustments
.length ();
4003 callee_decl
= !cs
? gimple_call_fndecl (stmt
) : cs
->callee
->decl
;
4004 current_node
->remove_stmt_references (stmt
);
4006 gsi
= gsi_for_stmt (stmt
);
4008 gsi_prev (&prev_gsi
);
4009 for (i
= 0; i
< len
; i
++)
4011 struct ipa_parm_adjustment
*adj
;
4013 adj
= &adjustments
[i
];
4015 if (adj
->op
== IPA_PARM_OP_COPY
)
4017 tree arg
= gimple_call_arg (stmt
, adj
->base_index
);
4019 vargs
.quick_push (arg
);
4021 else if (adj
->op
!= IPA_PARM_OP_REMOVE
)
4023 tree expr
, base
, off
;
4025 unsigned int deref_align
= 0;
4026 bool deref_base
= false;
4028 /* We create a new parameter out of the value of the old one, we can
4029 do the following kind of transformations:
4031 - A scalar passed by reference is converted to a scalar passed by
4032 value. (adj->by_ref is false and the type of the original
4033 actual argument is a pointer to a scalar).
4035 - A part of an aggregate is passed instead of the whole aggregate.
4036 The part can be passed either by value or by reference, this is
4037 determined by value of adj->by_ref. Moreover, the code below
4038 handles both situations when the original aggregate is passed by
4039 value (its type is not a pointer) and when it is passed by
4040 reference (it is a pointer to an aggregate).
4042 When the new argument is passed by reference (adj->by_ref is true)
4043 it must be a part of an aggregate and therefore we form it by
4044 simply taking the address of a reference inside the original
4047 gcc_checking_assert (adj
->offset
% BITS_PER_UNIT
== 0);
4048 base
= gimple_call_arg (stmt
, adj
->base_index
);
4049 loc
= DECL_P (base
) ? DECL_SOURCE_LOCATION (base
)
4050 : EXPR_LOCATION (base
);
4052 if (TREE_CODE (base
) != ADDR_EXPR
4053 && POINTER_TYPE_P (TREE_TYPE (base
)))
4054 off
= build_int_cst (adj
->alias_ptr_type
,
4055 adj
->offset
/ BITS_PER_UNIT
);
4058 HOST_WIDE_INT base_offset
;
4062 if (TREE_CODE (base
) == ADDR_EXPR
)
4064 base
= TREE_OPERAND (base
, 0);
4070 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
4071 /* Aggregate arguments can have non-invariant addresses. */
4074 base
= build_fold_addr_expr (prev_base
);
4075 off
= build_int_cst (adj
->alias_ptr_type
,
4076 adj
->offset
/ BITS_PER_UNIT
);
4078 else if (TREE_CODE (base
) == MEM_REF
)
4083 deref_align
= TYPE_ALIGN (TREE_TYPE (base
));
4085 off
= build_int_cst (adj
->alias_ptr_type
,
4087 + adj
->offset
/ BITS_PER_UNIT
);
4088 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1),
4090 base
= TREE_OPERAND (base
, 0);
4094 off
= build_int_cst (adj
->alias_ptr_type
,
4096 + adj
->offset
/ BITS_PER_UNIT
);
4097 base
= build_fold_addr_expr (base
);
4103 tree type
= adj
->type
;
4105 unsigned HOST_WIDE_INT misalign
;
4109 align
= deref_align
;
4114 get_pointer_alignment_1 (base
, &align
, &misalign
);
4115 if (TYPE_ALIGN (type
) > align
)
4116 align
= TYPE_ALIGN (type
);
4118 misalign
+= (offset_int::from (off
, SIGNED
).to_short_addr ()
4120 misalign
= misalign
& (align
- 1);
4122 align
= (misalign
& -misalign
);
4123 if (align
< TYPE_ALIGN (type
))
4124 type
= build_aligned_type (type
, align
);
4125 base
= force_gimple_operand_gsi (&gsi
, base
,
4126 true, NULL
, true, GSI_SAME_STMT
);
4127 expr
= fold_build2_loc (loc
, MEM_REF
, type
, base
, off
);
4128 /* If expr is not a valid gimple call argument emit
4129 a load into a temporary. */
4130 if (is_gimple_reg_type (TREE_TYPE (expr
)))
4132 gimple tem
= gimple_build_assign (NULL_TREE
, expr
);
4133 if (gimple_in_ssa_p (cfun
))
4135 gimple_set_vuse (tem
, gimple_vuse (stmt
));
4136 expr
= make_ssa_name (TREE_TYPE (expr
), tem
);
4139 expr
= create_tmp_reg (TREE_TYPE (expr
), NULL
);
4140 gimple_assign_set_lhs (tem
, expr
);
4141 gsi_insert_before (&gsi
, tem
, GSI_SAME_STMT
);
4146 expr
= fold_build2_loc (loc
, MEM_REF
, adj
->type
, base
, off
);
4147 expr
= build_fold_addr_expr (expr
);
4148 expr
= force_gimple_operand_gsi (&gsi
, expr
,
4149 true, NULL
, true, GSI_SAME_STMT
);
4151 vargs
.quick_push (expr
);
4153 if (adj
->op
!= IPA_PARM_OP_COPY
&& MAY_HAVE_DEBUG_STMTS
)
4156 tree ddecl
= NULL_TREE
, origin
= DECL_ORIGIN (adj
->base
), arg
;
4159 arg
= gimple_call_arg (stmt
, adj
->base_index
);
4160 if (!useless_type_conversion_p (TREE_TYPE (origin
), TREE_TYPE (arg
)))
4162 if (!fold_convertible_p (TREE_TYPE (origin
), arg
))
4164 arg
= fold_convert_loc (gimple_location (stmt
),
4165 TREE_TYPE (origin
), arg
);
4167 if (debug_args
== NULL
)
4168 debug_args
= decl_debug_args_insert (callee_decl
);
4169 for (ix
= 0; vec_safe_iterate (*debug_args
, ix
, &ddecl
); ix
+= 2)
4170 if (ddecl
== origin
)
4172 ddecl
= (**debug_args
)[ix
+ 1];
4177 ddecl
= make_node (DEBUG_EXPR_DECL
);
4178 DECL_ARTIFICIAL (ddecl
) = 1;
4179 TREE_TYPE (ddecl
) = TREE_TYPE (origin
);
4180 DECL_MODE (ddecl
) = DECL_MODE (origin
);
4182 vec_safe_push (*debug_args
, origin
);
4183 vec_safe_push (*debug_args
, ddecl
);
4185 def_temp
= gimple_build_debug_bind (ddecl
, unshare_expr (arg
), stmt
);
4186 gsi_insert_before (&gsi
, def_temp
, GSI_SAME_STMT
);
4190 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4192 fprintf (dump_file
, "replacing stmt:");
4193 print_gimple_stmt (dump_file
, gsi_stmt (gsi
), 0, 0);
4196 new_stmt
= gimple_build_call_vec (callee_decl
, vargs
);
4198 if (gimple_call_lhs (stmt
))
4199 gimple_call_set_lhs (new_stmt
, gimple_call_lhs (stmt
));
4201 gimple_set_block (new_stmt
, gimple_block (stmt
));
4202 if (gimple_has_location (stmt
))
4203 gimple_set_location (new_stmt
, gimple_location (stmt
));
4204 gimple_call_set_chain (new_stmt
, gimple_call_chain (stmt
));
4205 gimple_call_copy_flags (new_stmt
, stmt
);
4206 if (gimple_in_ssa_p (cfun
))
4208 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
4209 if (gimple_vdef (stmt
))
4211 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
4212 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
4216 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4218 fprintf (dump_file
, "with stmt:");
4219 print_gimple_stmt (dump_file
, new_stmt
, 0, 0);
4220 fprintf (dump_file
, "\n");
4222 gsi_replace (&gsi
, new_stmt
, true);
4224 cgraph_set_call_stmt (cs
, new_stmt
);
4227 ipa_record_stmt_references (current_node
, gsi_stmt (gsi
));
4230 while (gsi_stmt (gsi
) != gsi_stmt (prev_gsi
));
4233 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4234 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4235 specifies whether the function should care about type incompatibility the
4236 current and new expressions. If it is false, the function will leave
4237 incompatibility issues to the caller. Return true iff the expression
4241 ipa_modify_expr (tree
*expr
, bool convert
,
4242 ipa_parm_adjustment_vec adjustments
)
4244 struct ipa_parm_adjustment
*cand
4245 = ipa_get_adjustment_candidate (&expr
, &convert
, adjustments
, false);
4251 src
= build_simple_mem_ref (cand
->new_decl
);
4253 src
= cand
->new_decl
;
4255 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4257 fprintf (dump_file
, "About to replace expr ");
4258 print_generic_expr (dump_file
, *expr
, 0);
4259 fprintf (dump_file
, " with ");
4260 print_generic_expr (dump_file
, src
, 0);
4261 fprintf (dump_file
, "\n");
4264 if (convert
&& !useless_type_conversion_p (TREE_TYPE (*expr
), cand
->type
))
4266 tree vce
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (*expr
), src
);
4274 /* If T is an SSA_NAME, return NULL if it is not a default def or
4275 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4276 the base variable is always returned, regardless if it is a default
4277 def. Return T if it is not an SSA_NAME. */
4280 get_ssa_base_param (tree t
, bool ignore_default_def
)
4282 if (TREE_CODE (t
) == SSA_NAME
)
4284 if (ignore_default_def
|| SSA_NAME_IS_DEFAULT_DEF (t
))
4285 return SSA_NAME_VAR (t
);
4292 /* Given an expression, return an adjustment entry specifying the
4293 transformation to be done on EXPR. If no suitable adjustment entry
4294 was found, returns NULL.
4296 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4297 default def, otherwise bail on them.
4299 If CONVERT is non-NULL, this function will set *CONVERT if the
4300 expression provided is a component reference. ADJUSTMENTS is the
4301 adjustments vector. */
4303 ipa_parm_adjustment
*
4304 ipa_get_adjustment_candidate (tree
**expr
, bool *convert
,
4305 ipa_parm_adjustment_vec adjustments
,
4306 bool ignore_default_def
)
4308 if (TREE_CODE (**expr
) == BIT_FIELD_REF
4309 || TREE_CODE (**expr
) == IMAGPART_EXPR
4310 || TREE_CODE (**expr
) == REALPART_EXPR
)
4312 *expr
= &TREE_OPERAND (**expr
, 0);
4317 HOST_WIDE_INT offset
, size
, max_size
;
4318 tree base
= get_ref_base_and_extent (**expr
, &offset
, &size
, &max_size
);
4319 if (!base
|| size
== -1 || max_size
== -1)
4322 if (TREE_CODE (base
) == MEM_REF
)
4324 offset
+= mem_ref_offset (base
).to_short_addr () * BITS_PER_UNIT
;
4325 base
= TREE_OPERAND (base
, 0);
4328 base
= get_ssa_base_param (base
, ignore_default_def
);
4329 if (!base
|| TREE_CODE (base
) != PARM_DECL
)
4332 struct ipa_parm_adjustment
*cand
= NULL
;
4333 unsigned int len
= adjustments
.length ();
4334 for (unsigned i
= 0; i
< len
; i
++)
4336 struct ipa_parm_adjustment
*adj
= &adjustments
[i
];
4338 if (adj
->base
== base
4339 && (adj
->offset
== offset
|| adj
->op
== IPA_PARM_OP_REMOVE
))
4346 if (!cand
|| cand
->op
== IPA_PARM_OP_COPY
|| cand
->op
== IPA_PARM_OP_REMOVE
)
4351 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4354 index_in_adjustments_multiple_times_p (int base_index
,
4355 ipa_parm_adjustment_vec adjustments
)
4357 int i
, len
= adjustments
.length ();
4360 for (i
= 0; i
< len
; i
++)
4362 struct ipa_parm_adjustment
*adj
;
4363 adj
= &adjustments
[i
];
4365 if (adj
->base_index
== base_index
)
4377 /* Return adjustments that should have the same effect on function parameters
4378 and call arguments as if they were first changed according to adjustments in
4379 INNER and then by adjustments in OUTER. */
4381 ipa_parm_adjustment_vec
4382 ipa_combine_adjustments (ipa_parm_adjustment_vec inner
,
4383 ipa_parm_adjustment_vec outer
)
4385 int i
, outlen
= outer
.length ();
4386 int inlen
= inner
.length ();
4388 ipa_parm_adjustment_vec adjustments
, tmp
;
4391 for (i
= 0; i
< inlen
; i
++)
4393 struct ipa_parm_adjustment
*n
;
4396 if (n
->op
== IPA_PARM_OP_REMOVE
)
4400 /* FIXME: Handling of new arguments are not implemented yet. */
4401 gcc_assert (n
->op
!= IPA_PARM_OP_NEW
);
4402 tmp
.quick_push (*n
);
4406 adjustments
.create (outlen
+ removals
);
4407 for (i
= 0; i
< outlen
; i
++)
4409 struct ipa_parm_adjustment r
;
4410 struct ipa_parm_adjustment
*out
= &outer
[i
];
4411 struct ipa_parm_adjustment
*in
= &tmp
[out
->base_index
];
4413 memset (&r
, 0, sizeof (r
));
4414 gcc_assert (in
->op
!= IPA_PARM_OP_REMOVE
);
4415 if (out
->op
== IPA_PARM_OP_REMOVE
)
4417 if (!index_in_adjustments_multiple_times_p (in
->base_index
, tmp
))
4419 r
.op
= IPA_PARM_OP_REMOVE
;
4420 adjustments
.quick_push (r
);
4426 /* FIXME: Handling of new arguments are not implemented yet. */
4427 gcc_assert (out
->op
!= IPA_PARM_OP_NEW
);
4430 r
.base_index
= in
->base_index
;
4433 /* FIXME: Create nonlocal value too. */
4435 if (in
->op
== IPA_PARM_OP_COPY
&& out
->op
== IPA_PARM_OP_COPY
)
4436 r
.op
= IPA_PARM_OP_COPY
;
4437 else if (in
->op
== IPA_PARM_OP_COPY
)
4438 r
.offset
= out
->offset
;
4439 else if (out
->op
== IPA_PARM_OP_COPY
)
4440 r
.offset
= in
->offset
;
4442 r
.offset
= in
->offset
+ out
->offset
;
4443 adjustments
.quick_push (r
);
4446 for (i
= 0; i
< inlen
; i
++)
4448 struct ipa_parm_adjustment
*n
= &inner
[i
];
4450 if (n
->op
== IPA_PARM_OP_REMOVE
)
4451 adjustments
.quick_push (*n
);
4458 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4459 friendly way, assuming they are meant to be applied to FNDECL. */
4462 ipa_dump_param_adjustments (FILE *file
, ipa_parm_adjustment_vec adjustments
,
4465 int i
, len
= adjustments
.length ();
4467 vec
<tree
> parms
= ipa_get_vector_of_formal_parms (fndecl
);
4469 fprintf (file
, "IPA param adjustments: ");
4470 for (i
= 0; i
< len
; i
++)
4472 struct ipa_parm_adjustment
*adj
;
4473 adj
= &adjustments
[i
];
4476 fprintf (file
, " ");
4480 fprintf (file
, "%i. base_index: %i - ", i
, adj
->base_index
);
4481 print_generic_expr (file
, parms
[adj
->base_index
], 0);
4484 fprintf (file
, ", base: ");
4485 print_generic_expr (file
, adj
->base
, 0);
4489 fprintf (file
, ", new_decl: ");
4490 print_generic_expr (file
, adj
->new_decl
, 0);
4492 if (adj
->new_ssa_base
)
4494 fprintf (file
, ", new_ssa_base: ");
4495 print_generic_expr (file
, adj
->new_ssa_base
, 0);
4498 if (adj
->op
== IPA_PARM_OP_COPY
)
4499 fprintf (file
, ", copy_param");
4500 else if (adj
->op
== IPA_PARM_OP_REMOVE
)
4501 fprintf (file
, ", remove_param");
4503 fprintf (file
, ", offset %li", (long) adj
->offset
);
4505 fprintf (file
, ", by_ref");
4506 print_node_brief (file
, ", type: ", adj
->type
, 0);
4507 fprintf (file
, "\n");
4512 /* Dump the AV linked list. */
4515 ipa_dump_agg_replacement_values (FILE *f
, struct ipa_agg_replacement_value
*av
)
4518 fprintf (f
, " Aggregate replacements:");
4519 for (; av
; av
= av
->next
)
4521 fprintf (f
, "%s %i[" HOST_WIDE_INT_PRINT_DEC
"]=", comma
? "," : "",
4522 av
->index
, av
->offset
);
4523 print_generic_expr (f
, av
->value
, 0);
4529 /* Stream out jump function JUMP_FUNC to OB. */
4532 ipa_write_jump_function (struct output_block
*ob
,
4533 struct ipa_jump_func
*jump_func
)
4535 struct ipa_agg_jf_item
*item
;
4536 struct bitpack_d bp
;
4539 streamer_write_uhwi (ob
, jump_func
->type
);
4540 switch (jump_func
->type
)
4542 case IPA_JF_UNKNOWN
:
4544 case IPA_JF_KNOWN_TYPE
:
4545 streamer_write_uhwi (ob
, jump_func
->value
.known_type
.offset
);
4546 stream_write_tree (ob
, jump_func
->value
.known_type
.base_type
, true);
4547 stream_write_tree (ob
, jump_func
->value
.known_type
.component_type
, true);
4551 EXPR_LOCATION (jump_func
->value
.constant
.value
) == UNKNOWN_LOCATION
);
4552 stream_write_tree (ob
, jump_func
->value
.constant
.value
, true);
4554 case IPA_JF_PASS_THROUGH
:
4555 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.operation
);
4556 if (jump_func
->value
.pass_through
.operation
== NOP_EXPR
)
4558 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4559 bp
= bitpack_create (ob
->main_stream
);
4560 bp_pack_value (&bp
, jump_func
->value
.pass_through
.agg_preserved
, 1);
4561 bp_pack_value (&bp
, jump_func
->value
.pass_through
.type_preserved
, 1);
4562 streamer_write_bitpack (&bp
);
4566 stream_write_tree (ob
, jump_func
->value
.pass_through
.operand
, true);
4567 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4570 case IPA_JF_ANCESTOR
:
4571 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.offset
);
4572 stream_write_tree (ob
, jump_func
->value
.ancestor
.type
, true);
4573 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.formal_id
);
4574 bp
= bitpack_create (ob
->main_stream
);
4575 bp_pack_value (&bp
, jump_func
->value
.ancestor
.agg_preserved
, 1);
4576 bp_pack_value (&bp
, jump_func
->value
.ancestor
.type_preserved
, 1);
4577 streamer_write_bitpack (&bp
);
4581 count
= vec_safe_length (jump_func
->agg
.items
);
4582 streamer_write_uhwi (ob
, count
);
4585 bp
= bitpack_create (ob
->main_stream
);
4586 bp_pack_value (&bp
, jump_func
->agg
.by_ref
, 1);
4587 streamer_write_bitpack (&bp
);
4590 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, i
, item
)
4592 streamer_write_uhwi (ob
, item
->offset
);
4593 stream_write_tree (ob
, item
->value
, true);
4597 /* Read in jump function JUMP_FUNC from IB. */
4600 ipa_read_jump_function (struct lto_input_block
*ib
,
4601 struct ipa_jump_func
*jump_func
,
4602 struct cgraph_edge
*cs
,
4603 struct data_in
*data_in
)
4605 enum jump_func_type jftype
;
4606 enum tree_code operation
;
4609 jftype
= (enum jump_func_type
) streamer_read_uhwi (ib
);
4612 case IPA_JF_UNKNOWN
:
4613 jump_func
->type
= IPA_JF_UNKNOWN
;
4615 case IPA_JF_KNOWN_TYPE
:
4617 HOST_WIDE_INT offset
= streamer_read_uhwi (ib
);
4618 tree base_type
= stream_read_tree (ib
, data_in
);
4619 tree component_type
= stream_read_tree (ib
, data_in
);
4621 ipa_set_jf_known_type (jump_func
, offset
, base_type
, component_type
);
4625 ipa_set_jf_constant (jump_func
, stream_read_tree (ib
, data_in
), cs
);
4627 case IPA_JF_PASS_THROUGH
:
4628 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
4629 if (operation
== NOP_EXPR
)
4631 int formal_id
= streamer_read_uhwi (ib
);
4632 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4633 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4634 bool type_preserved
= bp_unpack_value (&bp
, 1);
4635 ipa_set_jf_simple_pass_through (jump_func
, formal_id
, agg_preserved
,
4640 tree operand
= stream_read_tree (ib
, data_in
);
4641 int formal_id
= streamer_read_uhwi (ib
);
4642 ipa_set_jf_arith_pass_through (jump_func
, formal_id
, operand
,
4646 case IPA_JF_ANCESTOR
:
4648 HOST_WIDE_INT offset
= streamer_read_uhwi (ib
);
4649 tree type
= stream_read_tree (ib
, data_in
);
4650 int formal_id
= streamer_read_uhwi (ib
);
4651 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4652 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4653 bool type_preserved
= bp_unpack_value (&bp
, 1);
4655 ipa_set_ancestor_jf (jump_func
, offset
, type
, formal_id
, agg_preserved
,
4661 count
= streamer_read_uhwi (ib
);
4662 vec_alloc (jump_func
->agg
.items
, count
);
4665 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4666 jump_func
->agg
.by_ref
= bp_unpack_value (&bp
, 1);
4668 for (i
= 0; i
< count
; i
++)
4670 struct ipa_agg_jf_item item
;
4671 item
.offset
= streamer_read_uhwi (ib
);
4672 item
.value
= stream_read_tree (ib
, data_in
);
4673 jump_func
->agg
.items
->quick_push (item
);
4677 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4678 relevant to indirect inlining to OB. */
4681 ipa_write_indirect_edge_info (struct output_block
*ob
,
4682 struct cgraph_edge
*cs
)
4684 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4685 struct bitpack_d bp
;
4687 streamer_write_hwi (ob
, ii
->param_index
);
4688 streamer_write_hwi (ob
, ii
->offset
);
4689 bp
= bitpack_create (ob
->main_stream
);
4690 bp_pack_value (&bp
, ii
->polymorphic
, 1);
4691 bp_pack_value (&bp
, ii
->agg_contents
, 1);
4692 bp_pack_value (&bp
, ii
->member_ptr
, 1);
4693 bp_pack_value (&bp
, ii
->by_ref
, 1);
4694 bp_pack_value (&bp
, ii
->maybe_in_construction
, 1);
4695 bp_pack_value (&bp
, ii
->maybe_derived_type
, 1);
4696 streamer_write_bitpack (&bp
);
4698 if (ii
->polymorphic
)
4700 streamer_write_hwi (ob
, ii
->otr_token
);
4701 stream_write_tree (ob
, ii
->otr_type
, true);
4702 stream_write_tree (ob
, ii
->outer_type
, true);
4706 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4707 relevant to indirect inlining from IB. */
4710 ipa_read_indirect_edge_info (struct lto_input_block
*ib
,
4711 struct data_in
*data_in ATTRIBUTE_UNUSED
,
4712 struct cgraph_edge
*cs
)
4714 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4715 struct bitpack_d bp
;
4717 ii
->param_index
= (int) streamer_read_hwi (ib
);
4718 ii
->offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4719 bp
= streamer_read_bitpack (ib
);
4720 ii
->polymorphic
= bp_unpack_value (&bp
, 1);
4721 ii
->agg_contents
= bp_unpack_value (&bp
, 1);
4722 ii
->member_ptr
= bp_unpack_value (&bp
, 1);
4723 ii
->by_ref
= bp_unpack_value (&bp
, 1);
4724 ii
->maybe_in_construction
= bp_unpack_value (&bp
, 1);
4725 ii
->maybe_derived_type
= bp_unpack_value (&bp
, 1);
4726 if (ii
->polymorphic
)
4728 ii
->otr_token
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4729 ii
->otr_type
= stream_read_tree (ib
, data_in
);
4730 ii
->outer_type
= stream_read_tree (ib
, data_in
);
4734 /* Stream out NODE info to OB. */
4737 ipa_write_node_info (struct output_block
*ob
, struct cgraph_node
*node
)
4740 lto_symtab_encoder_t encoder
;
4741 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
4743 struct cgraph_edge
*e
;
4744 struct bitpack_d bp
;
4746 encoder
= ob
->decl_state
->symtab_node_encoder
;
4747 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4748 streamer_write_uhwi (ob
, node_ref
);
4750 streamer_write_uhwi (ob
, ipa_get_param_count (info
));
4751 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4752 streamer_write_uhwi (ob
, ipa_get_param_move_cost (info
, j
));
4753 bp
= bitpack_create (ob
->main_stream
);
4754 gcc_assert (info
->analysis_done
4755 || ipa_get_param_count (info
) == 0);
4756 gcc_assert (!info
->node_enqueued
);
4757 gcc_assert (!info
->ipcp_orig_node
);
4758 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4759 bp_pack_value (&bp
, ipa_is_param_used (info
, j
), 1);
4760 streamer_write_bitpack (&bp
);
4761 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4762 streamer_write_hwi (ob
, ipa_get_controlled_uses (info
, j
));
4763 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4765 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4767 streamer_write_uhwi (ob
, ipa_get_cs_argument_count (args
));
4768 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4769 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4771 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4773 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4775 streamer_write_uhwi (ob
, ipa_get_cs_argument_count (args
));
4776 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4777 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4778 ipa_write_indirect_edge_info (ob
, e
);
4782 /* Stream in NODE info from IB. */
4785 ipa_read_node_info (struct lto_input_block
*ib
, struct cgraph_node
*node
,
4786 struct data_in
*data_in
)
4788 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
4790 struct cgraph_edge
*e
;
4791 struct bitpack_d bp
;
4793 ipa_alloc_node_params (node
, streamer_read_uhwi (ib
));
4795 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4796 info
->descriptors
[k
].move_cost
= streamer_read_uhwi (ib
);
4798 bp
= streamer_read_bitpack (ib
);
4799 if (ipa_get_param_count (info
) != 0)
4800 info
->analysis_done
= true;
4801 info
->node_enqueued
= false;
4802 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4803 ipa_set_param_used (info
, k
, bp_unpack_value (&bp
, 1));
4804 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4805 ipa_set_controlled_uses (info
, k
, streamer_read_hwi (ib
));
4806 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4808 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4809 int count
= streamer_read_uhwi (ib
);
4813 vec_safe_grow_cleared (args
->jump_functions
, count
);
4815 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
4816 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4819 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4821 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4822 int count
= streamer_read_uhwi (ib
);
4826 vec_safe_grow_cleared (args
->jump_functions
, count
);
4827 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
4828 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4831 ipa_read_indirect_edge_info (ib
, data_in
, e
);
4835 /* Write jump functions for nodes in SET. */
4838 ipa_prop_write_jump_functions (void)
4840 struct cgraph_node
*node
;
4841 struct output_block
*ob
;
4842 unsigned int count
= 0;
4843 lto_symtab_encoder_iterator lsei
;
4844 lto_symtab_encoder_t encoder
;
4847 if (!ipa_node_params_vector
.exists ())
4850 ob
= create_output_block (LTO_section_jump_functions
);
4851 encoder
= ob
->decl_state
->symtab_node_encoder
;
4853 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4854 lsei_next_function_in_partition (&lsei
))
4856 node
= lsei_cgraph_node (lsei
);
4857 if (cgraph_function_with_gimple_body_p (node
)
4858 && IPA_NODE_REF (node
) != NULL
)
4862 streamer_write_uhwi (ob
, count
);
4864 /* Process all of the functions. */
4865 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4866 lsei_next_function_in_partition (&lsei
))
4868 node
= lsei_cgraph_node (lsei
);
4869 if (cgraph_function_with_gimple_body_p (node
)
4870 && IPA_NODE_REF (node
) != NULL
)
4871 ipa_write_node_info (ob
, node
);
4873 streamer_write_char_stream (ob
->main_stream
, 0);
4874 produce_asm (ob
, NULL
);
4875 destroy_output_block (ob
);
4878 /* Read section in file FILE_DATA of length LEN with data DATA. */
4881 ipa_prop_read_section (struct lto_file_decl_data
*file_data
, const char *data
,
4884 const struct lto_function_header
*header
=
4885 (const struct lto_function_header
*) data
;
4886 const int cfg_offset
= sizeof (struct lto_function_header
);
4887 const int main_offset
= cfg_offset
+ header
->cfg_size
;
4888 const int string_offset
= main_offset
+ header
->main_size
;
4889 struct data_in
*data_in
;
4890 struct lto_input_block ib_main
;
4894 LTO_INIT_INPUT_BLOCK (ib_main
, (const char *) data
+ main_offset
, 0,
4898 lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
4899 header
->string_size
, vNULL
);
4900 count
= streamer_read_uhwi (&ib_main
);
4902 for (i
= 0; i
< count
; i
++)
4905 struct cgraph_node
*node
;
4906 lto_symtab_encoder_t encoder
;
4908 index
= streamer_read_uhwi (&ib_main
);
4909 encoder
= file_data
->symtab_node_encoder
;
4910 node
= cgraph (lto_symtab_encoder_deref (encoder
, index
));
4911 gcc_assert (node
->definition
);
4912 ipa_read_node_info (&ib_main
, node
, data_in
);
4914 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
4916 lto_data_in_delete (data_in
);
4919 /* Read ipcp jump functions. */
4922 ipa_prop_read_jump_functions (void)
4924 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
4925 struct lto_file_decl_data
*file_data
;
4928 ipa_check_create_node_params ();
4929 ipa_check_create_edge_args ();
4930 ipa_register_cgraph_hooks ();
4932 while ((file_data
= file_data_vec
[j
++]))
4935 const char *data
= lto_get_section_data (file_data
, LTO_section_jump_functions
, NULL
, &len
);
4938 ipa_prop_read_section (file_data
, data
, len
);
4942 /* After merging units, we can get mismatch in argument counts.
4943 Also decl merging might've rendered parameter lists obsolete.
4944 Also compute called_with_variable_arg info. */
4947 ipa_update_after_lto_read (void)
4949 ipa_check_create_node_params ();
4950 ipa_check_create_edge_args ();
4954 write_agg_replacement_chain (struct output_block
*ob
, struct cgraph_node
*node
)
4957 unsigned int count
= 0;
4958 lto_symtab_encoder_t encoder
;
4959 struct ipa_agg_replacement_value
*aggvals
, *av
;
4961 aggvals
= ipa_get_agg_replacements_for_node (node
);
4962 encoder
= ob
->decl_state
->symtab_node_encoder
;
4963 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4964 streamer_write_uhwi (ob
, node_ref
);
4966 for (av
= aggvals
; av
; av
= av
->next
)
4968 streamer_write_uhwi (ob
, count
);
4970 for (av
= aggvals
; av
; av
= av
->next
)
4972 struct bitpack_d bp
;
4974 streamer_write_uhwi (ob
, av
->offset
);
4975 streamer_write_uhwi (ob
, av
->index
);
4976 stream_write_tree (ob
, av
->value
, true);
4978 bp
= bitpack_create (ob
->main_stream
);
4979 bp_pack_value (&bp
, av
->by_ref
, 1);
4980 streamer_write_bitpack (&bp
);
4984 /* Stream in the aggregate value replacement chain for NODE from IB. */
4987 read_agg_replacement_chain (struct lto_input_block
*ib
,
4988 struct cgraph_node
*node
,
4989 struct data_in
*data_in
)
4991 struct ipa_agg_replacement_value
*aggvals
= NULL
;
4992 unsigned int count
, i
;
4994 count
= streamer_read_uhwi (ib
);
4995 for (i
= 0; i
<count
; i
++)
4997 struct ipa_agg_replacement_value
*av
;
4998 struct bitpack_d bp
;
5000 av
= ggc_alloc
<ipa_agg_replacement_value
> ();
5001 av
->offset
= streamer_read_uhwi (ib
);
5002 av
->index
= streamer_read_uhwi (ib
);
5003 av
->value
= stream_read_tree (ib
, data_in
);
5004 bp
= streamer_read_bitpack (ib
);
5005 av
->by_ref
= bp_unpack_value (&bp
, 1);
5009 ipa_set_node_agg_value_chain (node
, aggvals
);
5012 /* Write all aggregate replacement for nodes in set. */
5015 ipa_prop_write_all_agg_replacement (void)
5017 struct cgraph_node
*node
;
5018 struct output_block
*ob
;
5019 unsigned int count
= 0;
5020 lto_symtab_encoder_iterator lsei
;
5021 lto_symtab_encoder_t encoder
;
5023 if (!ipa_node_agg_replacements
)
5026 ob
= create_output_block (LTO_section_ipcp_transform
);
5027 encoder
= ob
->decl_state
->symtab_node_encoder
;
5029 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5030 lsei_next_function_in_partition (&lsei
))
5032 node
= lsei_cgraph_node (lsei
);
5033 if (cgraph_function_with_gimple_body_p (node
)
5034 && ipa_get_agg_replacements_for_node (node
) != NULL
)
5038 streamer_write_uhwi (ob
, count
);
5040 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5041 lsei_next_function_in_partition (&lsei
))
5043 node
= lsei_cgraph_node (lsei
);
5044 if (cgraph_function_with_gimple_body_p (node
)
5045 && ipa_get_agg_replacements_for_node (node
) != NULL
)
5046 write_agg_replacement_chain (ob
, node
);
5048 streamer_write_char_stream (ob
->main_stream
, 0);
5049 produce_asm (ob
, NULL
);
5050 destroy_output_block (ob
);
5053 /* Read replacements section in file FILE_DATA of length LEN with data
5057 read_replacements_section (struct lto_file_decl_data
*file_data
,
5061 const struct lto_function_header
*header
=
5062 (const struct lto_function_header
*) data
;
5063 const int cfg_offset
= sizeof (struct lto_function_header
);
5064 const int main_offset
= cfg_offset
+ header
->cfg_size
;
5065 const int string_offset
= main_offset
+ header
->main_size
;
5066 struct data_in
*data_in
;
5067 struct lto_input_block ib_main
;
5071 LTO_INIT_INPUT_BLOCK (ib_main
, (const char *) data
+ main_offset
, 0,
5074 data_in
= lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
5075 header
->string_size
, vNULL
);
5076 count
= streamer_read_uhwi (&ib_main
);
5078 for (i
= 0; i
< count
; i
++)
5081 struct cgraph_node
*node
;
5082 lto_symtab_encoder_t encoder
;
5084 index
= streamer_read_uhwi (&ib_main
);
5085 encoder
= file_data
->symtab_node_encoder
;
5086 node
= cgraph (lto_symtab_encoder_deref (encoder
, index
));
5087 gcc_assert (node
->definition
);
5088 read_agg_replacement_chain (&ib_main
, node
, data_in
);
5090 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
5092 lto_data_in_delete (data_in
);
5095 /* Read IPA-CP aggregate replacements. */
5098 ipa_prop_read_all_agg_replacement (void)
5100 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
5101 struct lto_file_decl_data
*file_data
;
5104 while ((file_data
= file_data_vec
[j
++]))
5107 const char *data
= lto_get_section_data (file_data
,
5108 LTO_section_ipcp_transform
,
5111 read_replacements_section (file_data
, data
, len
);
5115 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5119 adjust_agg_replacement_values (struct cgraph_node
*node
,
5120 struct ipa_agg_replacement_value
*aggval
)
5122 struct ipa_agg_replacement_value
*v
;
5123 int i
, c
= 0, d
= 0, *adj
;
5125 if (!node
->clone
.combined_args_to_skip
)
5128 for (v
= aggval
; v
; v
= v
->next
)
5130 gcc_assert (v
->index
>= 0);
5136 adj
= XALLOCAVEC (int, c
);
5137 for (i
= 0; i
< c
; i
++)
5138 if (bitmap_bit_p (node
->clone
.combined_args_to_skip
, i
))
5146 for (v
= aggval
; v
; v
= v
->next
)
5147 v
->index
= adj
[v
->index
];
5150 /* Dominator walker driving the ipcp modification phase. */
5152 class ipcp_modif_dom_walker
: public dom_walker
5155 ipcp_modif_dom_walker (struct func_body_info
*fbi
,
5156 vec
<ipa_param_descriptor
> descs
,
5157 struct ipa_agg_replacement_value
*av
,
5159 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
), m_descriptors (descs
),
5160 m_aggval (av
), m_something_changed (sc
), m_cfg_changed (cc
) {}
5162 virtual void before_dom_children (basic_block
);
5165 struct func_body_info
*m_fbi
;
5166 vec
<ipa_param_descriptor
> m_descriptors
;
5167 struct ipa_agg_replacement_value
*m_aggval
;
5168 bool *m_something_changed
, *m_cfg_changed
;
5172 ipcp_modif_dom_walker::before_dom_children (basic_block bb
)
5174 gimple_stmt_iterator gsi
;
5175 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5177 struct ipa_agg_replacement_value
*v
;
5178 gimple stmt
= gsi_stmt (gsi
);
5180 HOST_WIDE_INT offset
, size
;
5184 if (!gimple_assign_load_p (stmt
))
5186 rhs
= gimple_assign_rhs1 (stmt
);
5187 if (!is_gimple_reg_type (TREE_TYPE (rhs
)))
5192 while (handled_component_p (t
))
5194 /* V_C_E can do things like convert an array of integers to one
5195 bigger integer and similar things we do not handle below. */
5196 if (TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
)
5201 t
= TREE_OPERAND (t
, 0);
5206 if (!ipa_load_from_parm_agg_1 (m_fbi
, m_descriptors
, stmt
, rhs
, &index
,
5207 &offset
, &size
, &by_ref
))
5209 for (v
= m_aggval
; v
; v
= v
->next
)
5210 if (v
->index
== index
5211 && v
->offset
== offset
)
5214 || v
->by_ref
!= by_ref
5215 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v
->value
))) != size
)
5218 gcc_checking_assert (is_gimple_ip_invariant (v
->value
));
5219 if (!useless_type_conversion_p (TREE_TYPE (rhs
), TREE_TYPE (v
->value
)))
5221 if (fold_convertible_p (TREE_TYPE (rhs
), v
->value
))
5222 val
= fold_build1 (NOP_EXPR
, TREE_TYPE (rhs
), v
->value
);
5223 else if (TYPE_SIZE (TREE_TYPE (rhs
))
5224 == TYPE_SIZE (TREE_TYPE (v
->value
)))
5225 val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (rhs
), v
->value
);
5230 fprintf (dump_file
, " const ");
5231 print_generic_expr (dump_file
, v
->value
, 0);
5232 fprintf (dump_file
, " can't be converted to type of ");
5233 print_generic_expr (dump_file
, rhs
, 0);
5234 fprintf (dump_file
, "\n");
5242 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5244 fprintf (dump_file
, "Modifying stmt:\n ");
5245 print_gimple_stmt (dump_file
, stmt
, 0, 0);
5247 gimple_assign_set_rhs_from_tree (&gsi
, val
);
5250 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5252 fprintf (dump_file
, "into:\n ");
5253 print_gimple_stmt (dump_file
, stmt
, 0, 0);
5254 fprintf (dump_file
, "\n");
5257 *m_something_changed
= true;
5258 if (maybe_clean_eh_stmt (stmt
)
5259 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
5260 *m_cfg_changed
= true;
5265 /* IPCP transformation phase doing propagation of aggregate values. */
5268 ipcp_transform_function (struct cgraph_node
*node
)
5270 vec
<ipa_param_descriptor
> descriptors
= vNULL
;
5271 struct func_body_info fbi
;
5272 struct ipa_agg_replacement_value
*aggval
;
5274 bool cfg_changed
= false, something_changed
= false;
5276 gcc_checking_assert (cfun
);
5277 gcc_checking_assert (current_function_decl
);
5280 fprintf (dump_file
, "Modification phase of node %s/%i\n",
5281 node
->name (), node
->order
);
5283 aggval
= ipa_get_agg_replacements_for_node (node
);
5286 param_count
= count_formal_params (node
->decl
);
5287 if (param_count
== 0)
5289 adjust_agg_replacement_values (node
, aggval
);
5291 ipa_dump_agg_replacement_values (dump_file
, aggval
);
5295 fbi
.bb_infos
= vNULL
;
5296 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
));
5297 fbi
.param_count
= param_count
;
5300 descriptors
.safe_grow_cleared (param_count
);
5301 ipa_populate_param_decls (node
, descriptors
);
5302 calculate_dominance_info (CDI_DOMINATORS
);
5303 ipcp_modif_dom_walker (&fbi
, descriptors
, aggval
, &something_changed
,
5304 &cfg_changed
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5307 struct ipa_bb_info
*bi
;
5308 FOR_EACH_VEC_ELT (fbi
.bb_infos
, i
, bi
)
5309 free_ipa_bb_info (bi
);
5310 fbi
.bb_infos
.release ();
5311 free_dominance_info (CDI_DOMINATORS
);
5312 (*ipa_node_agg_replacements
)[node
->uid
] = NULL
;
5313 descriptors
.release ();
5315 if (!something_changed
)
5317 else if (cfg_changed
)
5318 return TODO_update_ssa_only_virtuals
| TODO_cleanup_cfg
;
5320 return TODO_update_ssa_only_virtuals
;