1 /* Interprocedural analyses.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
26 #include "double-int.h"
34 #include "fold-const.h"
37 #include "hard-reg-set.h"
40 #include "dominance.h"
42 #include "basic-block.h"
43 #include "tree-ssa-alias.h"
44 #include "internal-fn.h"
45 #include "gimple-fold.h"
47 #include "gimple-expr.h"
51 #include "stor-layout.h"
52 #include "print-tree.h"
54 #include "gimple-iterator.h"
55 #include "gimplify-me.h"
56 #include "gimple-walk.h"
57 #include "langhooks.h"
60 #include "plugin-api.h"
63 #include "alloc-pool.h"
64 #include "symbol-summary.h"
67 #include "gimple-ssa.h"
69 #include "tree-phinodes.h"
70 #include "ssa-iterators.h"
71 #include "tree-into-ssa.h"
73 #include "tree-pass.h"
74 #include "tree-inline.h"
75 #include "ipa-inline.h"
77 #include "diagnostic.h"
78 #include "gimple-pretty-print.h"
79 #include "lto-streamer.h"
80 #include "data-streamer.h"
81 #include "tree-streamer.h"
83 #include "ipa-utils.h"
84 #include "stringpool.h"
85 #include "tree-ssanames.h"
91 /* Intermediate information that we get from alias analysis about a particular
92 parameter in a particular basic_block. When a parameter or the memory it
93 references is marked modified, we use that information in all dominatd
94 blocks without cosulting alias analysis oracle. */
96 struct param_aa_status
98 /* Set when this structure contains meaningful information. If not, the
99 structure describing a dominating BB should be used instead. */
102 /* Whether we have seen something which might have modified the data in
103 question. PARM is for the parameter itself, REF is for data it points to
104 but using the alias type of individual accesses and PT is the same thing
105 but for computing aggregate pass-through functions using a very inclusive
107 bool parm_modified
, ref_modified
, pt_modified
;
110 /* Information related to a given BB that used only when looking at function
115 /* Call graph edges going out of this BB. */
116 vec
<cgraph_edge
*> cg_edges
;
117 /* Alias analysis statuses of each formal parameter at this bb. */
118 vec
<param_aa_status
> param_aa_statuses
;
121 /* Structure with global information that is only used when looking at function
124 struct func_body_info
126 /* The node that is being analyzed. */
130 struct ipa_node_params
*info
;
132 /* Information about individual BBs. */
133 vec
<ipa_bb_info
> bb_infos
;
135 /* Number of parameters. */
138 /* Number of statements already walked by when analyzing this function. */
139 unsigned int aa_walked
;
142 /* Function summary where the parameter infos are actually stored. */
143 ipa_node_params_t
*ipa_node_params_sum
= NULL
;
144 /* Vector of IPA-CP transformation data for each clone. */
145 vec
<ipcp_transformation_summary
, va_gc
> *ipcp_transformations
;
146 /* Vector where the parameter infos are actually stored. */
147 vec
<ipa_edge_args
, va_gc
> *ipa_edge_args_vector
;
149 /* Holders of ipa cgraph hooks: */
150 static struct cgraph_edge_hook_list
*edge_removal_hook_holder
;
151 static struct cgraph_2edge_hook_list
*edge_duplication_hook_holder
;
152 static struct cgraph_node_hook_list
*function_insertion_hook_holder
;
154 /* Description of a reference to an IPA constant. */
155 struct ipa_cst_ref_desc
157 /* Edge that corresponds to the statement which took the reference. */
158 struct cgraph_edge
*cs
;
159 /* Linked list of duplicates created when call graph edges are cloned. */
160 struct ipa_cst_ref_desc
*next_duplicate
;
161 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
162 if out of control. */
166 /* Allocation pool for reference descriptions. */
168 static alloc_pool ipa_refdesc_pool
;
170 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
171 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
174 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node
*node
)
176 tree fs_opts
= DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node
->decl
);
180 return !opt_for_fn (node
->decl
, optimize
) || !opt_for_fn (node
->decl
, flag_ipa_cp
);
183 /* Return index of the formal whose tree is PTREE in function which corresponds
187 ipa_get_param_decl_index_1 (vec
<ipa_param_descriptor
> descriptors
, tree ptree
)
191 count
= descriptors
.length ();
192 for (i
= 0; i
< count
; i
++)
193 if (descriptors
[i
].decl
== ptree
)
199 /* Return index of the formal whose tree is PTREE in function which corresponds
203 ipa_get_param_decl_index (struct ipa_node_params
*info
, tree ptree
)
205 return ipa_get_param_decl_index_1 (info
->descriptors
, ptree
);
208 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
212 ipa_populate_param_decls (struct cgraph_node
*node
,
213 vec
<ipa_param_descriptor
> &descriptors
)
221 gcc_assert (gimple_has_body_p (fndecl
));
222 fnargs
= DECL_ARGUMENTS (fndecl
);
224 for (parm
= fnargs
; parm
; parm
= DECL_CHAIN (parm
))
226 descriptors
[param_num
].decl
= parm
;
227 descriptors
[param_num
].move_cost
= estimate_move_cost (TREE_TYPE (parm
),
233 /* Return how many formal parameters FNDECL has. */
236 count_formal_params (tree fndecl
)
240 gcc_assert (gimple_has_body_p (fndecl
));
242 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
248 /* Return the declaration of Ith formal parameter of the function corresponding
249 to INFO. Note there is no setter function as this array is built just once
250 using ipa_initialize_node_params. */
253 ipa_dump_param (FILE *file
, struct ipa_node_params
*info
, int i
)
255 fprintf (file
, "param #%i", i
);
256 if (info
->descriptors
[i
].decl
)
259 print_generic_expr (file
, info
->descriptors
[i
].decl
, 0);
263 /* Initialize the ipa_node_params structure associated with NODE
264 to hold PARAM_COUNT parameters. */
267 ipa_alloc_node_params (struct cgraph_node
*node
, int param_count
)
269 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
271 if (!info
->descriptors
.exists () && param_count
)
272 info
->descriptors
.safe_grow_cleared (param_count
);
275 /* Initialize the ipa_node_params structure associated with NODE by counting
276 the function parameters, creating the descriptors and populating their
280 ipa_initialize_node_params (struct cgraph_node
*node
)
282 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
284 if (!info
->descriptors
.exists ())
286 ipa_alloc_node_params (node
, count_formal_params (node
->decl
));
287 ipa_populate_param_decls (node
, info
->descriptors
);
291 /* Print the jump functions associated with call graph edge CS to file F. */
294 ipa_print_node_jump_functions_for_edge (FILE *f
, struct cgraph_edge
*cs
)
298 count
= ipa_get_cs_argument_count (IPA_EDGE_REF (cs
));
299 for (i
= 0; i
< count
; i
++)
301 struct ipa_jump_func
*jump_func
;
302 enum jump_func_type type
;
304 jump_func
= ipa_get_ith_jump_func (IPA_EDGE_REF (cs
), i
);
305 type
= jump_func
->type
;
307 fprintf (f
, " param %d: ", i
);
308 if (type
== IPA_JF_UNKNOWN
)
309 fprintf (f
, "UNKNOWN\n");
310 else if (type
== IPA_JF_CONST
)
312 tree val
= jump_func
->value
.constant
.value
;
313 fprintf (f
, "CONST: ");
314 print_generic_expr (f
, val
, 0);
315 if (TREE_CODE (val
) == ADDR_EXPR
316 && TREE_CODE (TREE_OPERAND (val
, 0)) == CONST_DECL
)
319 print_generic_expr (f
, DECL_INITIAL (TREE_OPERAND (val
, 0)),
324 else if (type
== IPA_JF_PASS_THROUGH
)
326 fprintf (f
, "PASS THROUGH: ");
327 fprintf (f
, "%d, op %s",
328 jump_func
->value
.pass_through
.formal_id
,
329 get_tree_code_name(jump_func
->value
.pass_through
.operation
));
330 if (jump_func
->value
.pass_through
.operation
!= NOP_EXPR
)
333 print_generic_expr (f
,
334 jump_func
->value
.pass_through
.operand
, 0);
336 if (jump_func
->value
.pass_through
.agg_preserved
)
337 fprintf (f
, ", agg_preserved");
340 else if (type
== IPA_JF_ANCESTOR
)
342 fprintf (f
, "ANCESTOR: ");
343 fprintf (f
, "%d, offset "HOST_WIDE_INT_PRINT_DEC
,
344 jump_func
->value
.ancestor
.formal_id
,
345 jump_func
->value
.ancestor
.offset
);
346 if (jump_func
->value
.ancestor
.agg_preserved
)
347 fprintf (f
, ", agg_preserved");
351 if (jump_func
->agg
.items
)
353 struct ipa_agg_jf_item
*item
;
356 fprintf (f
, " Aggregate passed by %s:\n",
357 jump_func
->agg
.by_ref
? "reference" : "value");
358 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, j
, item
)
360 fprintf (f
, " offset: " HOST_WIDE_INT_PRINT_DEC
", ",
362 if (TYPE_P (item
->value
))
363 fprintf (f
, "clobber of " HOST_WIDE_INT_PRINT_DEC
" bits",
364 tree_to_uhwi (TYPE_SIZE (item
->value
)));
367 fprintf (f
, "cst: ");
368 print_generic_expr (f
, item
->value
, 0);
374 struct ipa_polymorphic_call_context
*ctx
375 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs
), i
);
376 if (ctx
&& !ctx
->useless_p ())
378 fprintf (f
, " Context: ");
379 ctx
->dump (dump_file
);
382 if (jump_func
->alignment
.known
)
384 fprintf (f
, " Alignment: %u, misalignment: %u\n",
385 jump_func
->alignment
.align
,
386 jump_func
->alignment
.misalign
);
389 fprintf (f
, " Unknown alignment\n");
394 /* Print the jump functions of all arguments on all call graph edges going from
398 ipa_print_node_jump_functions (FILE *f
, struct cgraph_node
*node
)
400 struct cgraph_edge
*cs
;
402 fprintf (f
, " Jump functions of caller %s/%i:\n", node
->name (),
404 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
406 if (!ipa_edge_args_info_available_for_edge_p (cs
))
409 fprintf (f
, " callsite %s/%i -> %s/%i : \n",
410 xstrdup_for_dump (node
->name ()), node
->order
,
411 xstrdup_for_dump (cs
->callee
->name ()),
413 ipa_print_node_jump_functions_for_edge (f
, cs
);
416 for (cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
418 struct cgraph_indirect_call_info
*ii
;
419 if (!ipa_edge_args_info_available_for_edge_p (cs
))
422 ii
= cs
->indirect_info
;
423 if (ii
->agg_contents
)
424 fprintf (f
, " indirect %s callsite, calling param %i, "
425 "offset " HOST_WIDE_INT_PRINT_DEC
", %s",
426 ii
->member_ptr
? "member ptr" : "aggregate",
427 ii
->param_index
, ii
->offset
,
428 ii
->by_ref
? "by reference" : "by_value");
430 fprintf (f
, " indirect %s callsite, calling param %i, "
431 "offset " HOST_WIDE_INT_PRINT_DEC
,
432 ii
->polymorphic
? "polymorphic" : "simple", ii
->param_index
,
437 fprintf (f
, ", for stmt ");
438 print_gimple_stmt (f
, cs
->call_stmt
, 0, TDF_SLIM
);
443 ii
->context
.dump (f
);
444 ipa_print_node_jump_functions_for_edge (f
, cs
);
448 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
451 ipa_print_all_jump_functions (FILE *f
)
453 struct cgraph_node
*node
;
455 fprintf (f
, "\nJump functions:\n");
456 FOR_EACH_FUNCTION (node
)
458 ipa_print_node_jump_functions (f
, node
);
462 /* Set jfunc to be a know-really nothing jump function. */
465 ipa_set_jf_unknown (struct ipa_jump_func
*jfunc
)
467 jfunc
->type
= IPA_JF_UNKNOWN
;
468 jfunc
->alignment
.known
= false;
471 /* Set JFUNC to be a copy of another jmp (to be used by jump function
472 combination code). The two functions will share their rdesc. */
475 ipa_set_jf_cst_copy (struct ipa_jump_func
*dst
,
476 struct ipa_jump_func
*src
)
479 gcc_checking_assert (src
->type
== IPA_JF_CONST
);
480 dst
->type
= IPA_JF_CONST
;
481 dst
->value
.constant
= src
->value
.constant
;
484 /* Set JFUNC to be a constant jmp function. */
487 ipa_set_jf_constant (struct ipa_jump_func
*jfunc
, tree constant
,
488 struct cgraph_edge
*cs
)
490 constant
= unshare_expr (constant
);
491 if (constant
&& EXPR_P (constant
))
492 SET_EXPR_LOCATION (constant
, UNKNOWN_LOCATION
);
493 jfunc
->type
= IPA_JF_CONST
;
494 jfunc
->value
.constant
.value
= unshare_expr_without_location (constant
);
496 if (TREE_CODE (constant
) == ADDR_EXPR
497 && TREE_CODE (TREE_OPERAND (constant
, 0)) == FUNCTION_DECL
)
499 struct ipa_cst_ref_desc
*rdesc
;
500 if (!ipa_refdesc_pool
)
501 ipa_refdesc_pool
= create_alloc_pool ("IPA-PROP ref descriptions",
502 sizeof (struct ipa_cst_ref_desc
), 32);
504 rdesc
= (struct ipa_cst_ref_desc
*) pool_alloc (ipa_refdesc_pool
);
506 rdesc
->next_duplicate
= NULL
;
508 jfunc
->value
.constant
.rdesc
= rdesc
;
511 jfunc
->value
.constant
.rdesc
= NULL
;
514 /* Set JFUNC to be a simple pass-through jump function. */
516 ipa_set_jf_simple_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
519 jfunc
->type
= IPA_JF_PASS_THROUGH
;
520 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
521 jfunc
->value
.pass_through
.formal_id
= formal_id
;
522 jfunc
->value
.pass_through
.operation
= NOP_EXPR
;
523 jfunc
->value
.pass_through
.agg_preserved
= agg_preserved
;
526 /* Set JFUNC to be an arithmetic pass through jump function. */
529 ipa_set_jf_arith_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
530 tree operand
, enum tree_code operation
)
532 jfunc
->type
= IPA_JF_PASS_THROUGH
;
533 jfunc
->value
.pass_through
.operand
= unshare_expr_without_location (operand
);
534 jfunc
->value
.pass_through
.formal_id
= formal_id
;
535 jfunc
->value
.pass_through
.operation
= operation
;
536 jfunc
->value
.pass_through
.agg_preserved
= false;
539 /* Set JFUNC to be an ancestor jump function. */
542 ipa_set_ancestor_jf (struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
,
543 int formal_id
, bool agg_preserved
)
545 jfunc
->type
= IPA_JF_ANCESTOR
;
546 jfunc
->value
.ancestor
.formal_id
= formal_id
;
547 jfunc
->value
.ancestor
.offset
= offset
;
548 jfunc
->value
.ancestor
.agg_preserved
= agg_preserved
;
551 /* Get IPA BB information about the given BB. FBI is the context of analyzis
552 of this function body. */
554 static struct ipa_bb_info
*
555 ipa_get_bb_info (struct func_body_info
*fbi
, basic_block bb
)
557 gcc_checking_assert (fbi
);
558 return &fbi
->bb_infos
[bb
->index
];
561 /* Structure to be passed in between detect_type_change and
562 check_stmt_for_type_change. */
564 struct prop_type_change_info
566 /* Offset into the object where there is the virtual method pointer we are
568 HOST_WIDE_INT offset
;
569 /* The declaration or SSA_NAME pointer of the base that we are checking for
572 /* Set to true if dynamic type change has been detected. */
573 bool type_maybe_changed
;
576 /* Return true if STMT can modify a virtual method table pointer.
578 This function makes special assumptions about both constructors and
579 destructors which are all the functions that are allowed to alter the VMT
580 pointers. It assumes that destructors begin with assignment into all VMT
581 pointers and that constructors essentially look in the following way:
583 1) The very first thing they do is that they call constructors of ancestor
584 sub-objects that have them.
586 2) Then VMT pointers of this and all its ancestors is set to new values
587 corresponding to the type corresponding to the constructor.
589 3) Only afterwards, other stuff such as constructor of member sub-objects
590 and the code written by the user is run. Only this may include calling
591 virtual functions, directly or indirectly.
593 There is no way to call a constructor of an ancestor sub-object in any
596 This means that we do not have to care whether constructors get the correct
597 type information because they will always change it (in fact, if we define
598 the type to be given by the VMT pointer, it is undefined).
600 The most important fact to derive from the above is that if, for some
601 statement in the section 3, we try to detect whether the dynamic type has
602 changed, we can safely ignore all calls as we examine the function body
603 backwards until we reach statements in section 2 because these calls cannot
604 be ancestor constructors or destructors (if the input is not bogus) and so
605 do not change the dynamic type (this holds true only for automatically
606 allocated objects but at the moment we devirtualize only these). We then
607 must detect that statements in section 2 change the dynamic type and can try
608 to derive the new type. That is enough and we can stop, we will never see
609 the calls into constructors of sub-objects in this code. Therefore we can
610 safely ignore all call statements that we traverse.
614 stmt_may_be_vtbl_ptr_store (gimple stmt
)
616 if (is_gimple_call (stmt
))
618 if (gimple_clobber_p (stmt
))
620 else if (is_gimple_assign (stmt
))
622 tree lhs
= gimple_assign_lhs (stmt
);
624 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs
)))
626 if (flag_strict_aliasing
627 && !POINTER_TYPE_P (TREE_TYPE (lhs
)))
630 if (TREE_CODE (lhs
) == COMPONENT_REF
631 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
633 /* In the future we might want to use get_base_ref_and_offset to find
634 if there is a field corresponding to the offset and if so, proceed
635 almost like if it was a component ref. */
641 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
642 to check whether a particular statement may modify the virtual table
643 pointerIt stores its result into DATA, which points to a
644 prop_type_change_info structure. */
647 check_stmt_for_type_change (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef
, void *data
)
649 gimple stmt
= SSA_NAME_DEF_STMT (vdef
);
650 struct prop_type_change_info
*tci
= (struct prop_type_change_info
*) data
;
652 if (stmt_may_be_vtbl_ptr_store (stmt
))
654 tci
->type_maybe_changed
= true;
661 /* See if ARG is PARAM_DECl describing instance passed by pointer
662 or reference in FUNCTION. Return false if the dynamic type may change
663 in between beggining of the function until CALL is invoked.
665 Generally functions are not allowed to change type of such instances,
666 but they call destructors. We assume that methods can not destroy the THIS
667 pointer. Also as a special cases, constructor and destructors may change
668 type of the THIS pointer. */
671 param_type_may_change_p (tree function
, tree arg
, gimple call
)
673 /* Pure functions can not do any changes on the dynamic type;
674 that require writting to memory. */
675 if (flags_from_decl_or_type (function
) & (ECF_PURE
| ECF_CONST
))
677 /* We need to check if we are within inlined consturctor
678 or destructor (ideally we would have way to check that the
679 inline cdtor is actually working on ARG, but we don't have
680 easy tie on this, so punt on all non-pure cdtors.
681 We may also record the types of cdtors and once we know type
682 of the instance match them.
684 Also code unification optimizations may merge calls from
685 different blocks making return values unreliable. So
686 do nothing during late optimization. */
687 if (DECL_STRUCT_FUNCTION (function
)->after_inlining
)
689 if (TREE_CODE (arg
) == SSA_NAME
690 && SSA_NAME_IS_DEFAULT_DEF (arg
)
691 && TREE_CODE (SSA_NAME_VAR (arg
)) == PARM_DECL
)
693 /* Normal (non-THIS) argument. */
694 if ((SSA_NAME_VAR (arg
) != DECL_ARGUMENTS (function
)
695 || TREE_CODE (TREE_TYPE (function
)) != METHOD_TYPE
)
696 /* THIS pointer of an method - here we we want to watch constructors
697 and destructors as those definitely may change the dynamic
699 || (TREE_CODE (TREE_TYPE (function
)) == METHOD_TYPE
700 && !DECL_CXX_CONSTRUCTOR_P (function
)
701 && !DECL_CXX_DESTRUCTOR_P (function
)
702 && (SSA_NAME_VAR (arg
) == DECL_ARGUMENTS (function
))))
704 /* Walk the inline stack and watch out for ctors/dtors. */
705 for (tree block
= gimple_block (call
); block
&& TREE_CODE (block
) == BLOCK
;
706 block
= BLOCK_SUPERCONTEXT (block
))
707 if (BLOCK_ABSTRACT_ORIGIN (block
)
708 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block
)) == FUNCTION_DECL
)
710 tree fn
= BLOCK_ABSTRACT_ORIGIN (block
);
712 if (flags_from_decl_or_type (fn
) & (ECF_PURE
| ECF_CONST
))
714 if (TREE_CODE (TREE_TYPE (fn
)) == METHOD_TYPE
715 && (DECL_CXX_CONSTRUCTOR_P (fn
)
716 || DECL_CXX_DESTRUCTOR_P (fn
)))
725 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
726 callsite CALL) by looking for assignments to its virtual table pointer. If
727 it is, return true and fill in the jump function JFUNC with relevant type
728 information or set it to unknown. ARG is the object itself (not a pointer
729 to it, unless dereferenced). BASE is the base of the memory access as
730 returned by get_ref_base_and_extent, as is the offset.
732 This is helper function for detect_type_change and detect_type_change_ssa
733 that does the heavy work which is usually unnecesary. */
736 detect_type_change_from_memory_writes (tree arg
, tree base
, tree comp_type
,
737 gcall
*call
, struct ipa_jump_func
*jfunc
,
738 HOST_WIDE_INT offset
)
740 struct prop_type_change_info tci
;
742 bool entry_reached
= false;
744 gcc_checking_assert (DECL_P (arg
)
745 || TREE_CODE (arg
) == MEM_REF
746 || handled_component_p (arg
));
748 comp_type
= TYPE_MAIN_VARIANT (comp_type
);
750 /* Const calls cannot call virtual methods through VMT and so type changes do
752 if (!flag_devirtualize
|| !gimple_vuse (call
)
753 /* Be sure expected_type is polymorphic. */
755 || TREE_CODE (comp_type
) != RECORD_TYPE
756 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))
757 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))))
760 ao_ref_init (&ao
, arg
);
763 ao
.size
= POINTER_SIZE
;
764 ao
.max_size
= ao
.size
;
767 tci
.object
= get_base_address (arg
);
768 tci
.type_maybe_changed
= false;
770 walk_aliased_vdefs (&ao
, gimple_vuse (call
), check_stmt_for_type_change
,
771 &tci
, NULL
, &entry_reached
);
772 if (!tci
.type_maybe_changed
)
775 ipa_set_jf_unknown (jfunc
);
779 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
780 If it is, return true and fill in the jump function JFUNC with relevant type
781 information or set it to unknown. ARG is the object itself (not a pointer
782 to it, unless dereferenced). BASE is the base of the memory access as
783 returned by get_ref_base_and_extent, as is the offset. */
786 detect_type_change (tree arg
, tree base
, tree comp_type
, gcall
*call
,
787 struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
)
789 if (!flag_devirtualize
)
792 if (TREE_CODE (base
) == MEM_REF
793 && !param_type_may_change_p (current_function_decl
,
794 TREE_OPERAND (base
, 0),
797 return detect_type_change_from_memory_writes (arg
, base
, comp_type
,
798 call
, jfunc
, offset
);
801 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
802 SSA name (its dereference will become the base and the offset is assumed to
806 detect_type_change_ssa (tree arg
, tree comp_type
,
807 gcall
*call
, struct ipa_jump_func
*jfunc
)
809 gcc_checking_assert (TREE_CODE (arg
) == SSA_NAME
);
810 if (!flag_devirtualize
811 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
814 if (!param_type_may_change_p (current_function_decl
, arg
, call
))
817 arg
= build2 (MEM_REF
, ptr_type_node
, arg
,
818 build_int_cst (ptr_type_node
, 0));
820 return detect_type_change_from_memory_writes (arg
, arg
, comp_type
,
824 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
825 boolean variable pointed to by DATA. */
828 mark_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
831 bool *b
= (bool *) data
;
836 /* Return true if we have already walked so many statements in AA that we
837 should really just start giving up. */
840 aa_overwalked (struct func_body_info
*fbi
)
842 gcc_checking_assert (fbi
);
843 return fbi
->aa_walked
> (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS
);
846 /* Find the nearest valid aa status for parameter specified by INDEX that
849 static struct param_aa_status
*
850 find_dominating_aa_status (struct func_body_info
*fbi
, basic_block bb
,
855 bb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
858 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
859 if (!bi
->param_aa_statuses
.is_empty ()
860 && bi
->param_aa_statuses
[index
].valid
)
861 return &bi
->param_aa_statuses
[index
];
865 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
866 structures and/or intialize the result with a dominating description as
869 static struct param_aa_status
*
870 parm_bb_aa_status_for_bb (struct func_body_info
*fbi
, basic_block bb
,
873 gcc_checking_assert (fbi
);
874 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
875 if (bi
->param_aa_statuses
.is_empty ())
876 bi
->param_aa_statuses
.safe_grow_cleared (fbi
->param_count
);
877 struct param_aa_status
*paa
= &bi
->param_aa_statuses
[index
];
880 gcc_checking_assert (!paa
->parm_modified
881 && !paa
->ref_modified
882 && !paa
->pt_modified
);
883 struct param_aa_status
*dom_paa
;
884 dom_paa
= find_dominating_aa_status (fbi
, bb
, index
);
894 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
895 a value known not to be modified in this function before reaching the
896 statement STMT. FBI holds information about the function we have so far
897 gathered but do not survive the summary building stage. */
900 parm_preserved_before_stmt_p (struct func_body_info
*fbi
, int index
,
901 gimple stmt
, tree parm_load
)
903 struct param_aa_status
*paa
;
904 bool modified
= false;
907 /* FIXME: FBI can be NULL if we are being called from outside
908 ipa_node_analysis or ipcp_transform_function, which currently happens
909 during inlining analysis. It would be great to extend fbi's lifetime and
910 always have it. Currently, we are just not afraid of too much walking in
914 if (aa_overwalked (fbi
))
916 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
917 if (paa
->parm_modified
)
923 gcc_checking_assert (gimple_vuse (stmt
) != NULL_TREE
);
924 ao_ref_init (&refd
, parm_load
);
925 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
928 fbi
->aa_walked
+= walked
;
930 paa
->parm_modified
= true;
934 /* If STMT is an assignment that loads a value from an parameter declaration,
935 return the index of the parameter in ipa_node_params which has not been
936 modified. Otherwise return -1. */
939 load_from_unmodified_param (struct func_body_info
*fbi
,
940 vec
<ipa_param_descriptor
> descriptors
,
946 if (!gimple_assign_single_p (stmt
))
949 op1
= gimple_assign_rhs1 (stmt
);
950 if (TREE_CODE (op1
) != PARM_DECL
)
953 index
= ipa_get_param_decl_index_1 (descriptors
, op1
);
955 || !parm_preserved_before_stmt_p (fbi
, index
, stmt
, op1
))
961 /* Return true if memory reference REF (which must be a load through parameter
962 with INDEX) loads data that are known to be unmodified in this function
963 before reaching statement STMT. */
966 parm_ref_data_preserved_p (struct func_body_info
*fbi
,
967 int index
, gimple stmt
, tree ref
)
969 struct param_aa_status
*paa
;
970 bool modified
= false;
973 /* FIXME: FBI can be NULL if we are being called from outside
974 ipa_node_analysis or ipcp_transform_function, which currently happens
975 during inlining analysis. It would be great to extend fbi's lifetime and
976 always have it. Currently, we are just not afraid of too much walking in
980 if (aa_overwalked (fbi
))
982 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
983 if (paa
->ref_modified
)
989 gcc_checking_assert (gimple_vuse (stmt
));
990 ao_ref_init (&refd
, ref
);
991 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
994 fbi
->aa_walked
+= walked
;
996 paa
->ref_modified
= true;
1000 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1001 is known to be unmodified in this function before reaching call statement
1002 CALL into which it is passed. FBI describes the function body. */
1005 parm_ref_data_pass_through_p (struct func_body_info
*fbi
, int index
,
1006 gimple call
, tree parm
)
1008 bool modified
= false;
1011 /* It's unnecessary to calculate anything about memory contnets for a const
1012 function because it is not goin to use it. But do not cache the result
1013 either. Also, no such calculations for non-pointers. */
1014 if (!gimple_vuse (call
)
1015 || !POINTER_TYPE_P (TREE_TYPE (parm
))
1016 || aa_overwalked (fbi
))
1019 struct param_aa_status
*paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (call
),
1021 if (paa
->pt_modified
)
1024 ao_ref_init_from_ptr_and_size (&refd
, parm
, NULL_TREE
);
1025 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (call
), mark_modified
,
1027 fbi
->aa_walked
+= walked
;
1029 paa
->pt_modified
= true;
1033 /* Return true if we can prove that OP is a memory reference loading unmodified
1034 data from an aggregate passed as a parameter and if the aggregate is passed
1035 by reference, that the alias type of the load corresponds to the type of the
1036 formal parameter (so that we can rely on this type for TBAA in callers).
1037 INFO and PARMS_AINFO describe parameters of the current function (but the
1038 latter can be NULL), STMT is the load statement. If function returns true,
1039 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1040 within the aggregate and whether it is a load from a value passed by
1041 reference respectively. */
1044 ipa_load_from_parm_agg_1 (struct func_body_info
*fbi
,
1045 vec
<ipa_param_descriptor
> descriptors
,
1046 gimple stmt
, tree op
, int *index_p
,
1047 HOST_WIDE_INT
*offset_p
, HOST_WIDE_INT
*size_p
,
1051 HOST_WIDE_INT size
, max_size
;
1052 tree base
= get_ref_base_and_extent (op
, offset_p
, &size
, &max_size
);
1054 if (max_size
== -1 || max_size
!= size
|| *offset_p
< 0)
1059 int index
= ipa_get_param_decl_index_1 (descriptors
, base
);
1061 && parm_preserved_before_stmt_p (fbi
, index
, stmt
, op
))
1072 if (TREE_CODE (base
) != MEM_REF
1073 || TREE_CODE (TREE_OPERAND (base
, 0)) != SSA_NAME
1074 || !integer_zerop (TREE_OPERAND (base
, 1)))
1077 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base
, 0)))
1079 tree parm
= SSA_NAME_VAR (TREE_OPERAND (base
, 0));
1080 index
= ipa_get_param_decl_index_1 (descriptors
, parm
);
1084 /* This branch catches situations where a pointer parameter is not a
1085 gimple register, for example:
1087 void hip7(S*) (struct S * p)
1089 void (*<T2e4>) (struct S *) D.1867;
1094 D.1867_2 = p.1_1->f;
1099 gimple def
= SSA_NAME_DEF_STMT (TREE_OPERAND (base
, 0));
1100 index
= load_from_unmodified_param (fbi
, descriptors
, def
);
1104 && parm_ref_data_preserved_p (fbi
, index
, stmt
, op
))
1115 /* Just like the previous function, just without the param_analysis_info
1116 pointer, for users outside of this file. */
1119 ipa_load_from_parm_agg (struct ipa_node_params
*info
, gimple stmt
,
1120 tree op
, int *index_p
, HOST_WIDE_INT
*offset_p
,
1123 return ipa_load_from_parm_agg_1 (NULL
, info
->descriptors
, stmt
, op
, index_p
,
1124 offset_p
, NULL
, by_ref_p
);
1127 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1128 of an assignment statement STMT, try to determine whether we are actually
1129 handling any of the following cases and construct an appropriate jump
1130 function into JFUNC if so:
1132 1) The passed value is loaded from a formal parameter which is not a gimple
1133 register (most probably because it is addressable, the value has to be
1134 scalar) and we can guarantee the value has not changed. This case can
1135 therefore be described by a simple pass-through jump function. For example:
1144 2) The passed value can be described by a simple arithmetic pass-through
1151 D.2064_4 = a.1(D) + 4;
1154 This case can also occur in combination of the previous one, e.g.:
1162 D.2064_4 = a.0_3 + 4;
1165 3) The passed value is an address of an object within another one (which
1166 also passed by reference). Such situations are described by an ancestor
1167 jump function and describe situations such as:
1169 B::foo() (struct B * const this)
1173 D.1845_2 = &this_1(D)->D.1748;
1176 INFO is the structure describing individual parameters access different
1177 stages of IPA optimizations. PARMS_AINFO contains the information that is
1178 only needed for intraprocedural analysis. */
1181 compute_complex_assign_jump_func (struct func_body_info
*fbi
,
1182 struct ipa_node_params
*info
,
1183 struct ipa_jump_func
*jfunc
,
1184 gcall
*call
, gimple stmt
, tree name
,
1187 HOST_WIDE_INT offset
, size
, max_size
;
1188 tree op1
, tc_ssa
, base
, ssa
;
1191 op1
= gimple_assign_rhs1 (stmt
);
1193 if (TREE_CODE (op1
) == SSA_NAME
)
1195 if (SSA_NAME_IS_DEFAULT_DEF (op1
))
1196 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (op1
));
1198 index
= load_from_unmodified_param (fbi
, info
->descriptors
,
1199 SSA_NAME_DEF_STMT (op1
));
1204 index
= load_from_unmodified_param (fbi
, info
->descriptors
, stmt
);
1205 tc_ssa
= gimple_assign_lhs (stmt
);
1210 tree op2
= gimple_assign_rhs2 (stmt
);
1214 if (!is_gimple_ip_invariant (op2
)
1215 || (TREE_CODE_CLASS (gimple_expr_code (stmt
)) != tcc_comparison
1216 && !useless_type_conversion_p (TREE_TYPE (name
),
1220 ipa_set_jf_arith_pass_through (jfunc
, index
, op2
,
1221 gimple_assign_rhs_code (stmt
));
1223 else if (gimple_assign_single_p (stmt
))
1225 bool agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
, tc_ssa
);
1226 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1231 if (TREE_CODE (op1
) != ADDR_EXPR
)
1233 op1
= TREE_OPERAND (op1
, 0);
1234 if (TREE_CODE (TREE_TYPE (op1
)) != RECORD_TYPE
)
1236 base
= get_ref_base_and_extent (op1
, &offset
, &size
, &max_size
);
1237 if (TREE_CODE (base
) != MEM_REF
1238 /* If this is a varying address, punt. */
1240 || max_size
!= size
)
1242 offset
+= mem_ref_offset (base
).to_short_addr () * BITS_PER_UNIT
;
1243 ssa
= TREE_OPERAND (base
, 0);
1244 if (TREE_CODE (ssa
) != SSA_NAME
1245 || !SSA_NAME_IS_DEFAULT_DEF (ssa
)
1249 /* Dynamic types are changed in constructors and destructors. */
1250 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (ssa
));
1251 if (index
>= 0 && param_type
&& POINTER_TYPE_P (param_type
))
1252 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1253 parm_ref_data_pass_through_p (fbi
, index
, call
, ssa
));
1256 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1259 iftmp.1_3 = &obj_2(D)->D.1762;
1261 The base of the MEM_REF must be a default definition SSA NAME of a
1262 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1263 whole MEM_REF expression is returned and the offset calculated from any
1264 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1265 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1268 get_ancestor_addr_info (gimple assign
, tree
*obj_p
, HOST_WIDE_INT
*offset
)
1270 HOST_WIDE_INT size
, max_size
;
1271 tree expr
, parm
, obj
;
1273 if (!gimple_assign_single_p (assign
))
1275 expr
= gimple_assign_rhs1 (assign
);
1277 if (TREE_CODE (expr
) != ADDR_EXPR
)
1279 expr
= TREE_OPERAND (expr
, 0);
1281 expr
= get_ref_base_and_extent (expr
, offset
, &size
, &max_size
);
1283 if (TREE_CODE (expr
) != MEM_REF
1284 /* If this is a varying address, punt. */
1289 parm
= TREE_OPERAND (expr
, 0);
1290 if (TREE_CODE (parm
) != SSA_NAME
1291 || !SSA_NAME_IS_DEFAULT_DEF (parm
)
1292 || TREE_CODE (SSA_NAME_VAR (parm
)) != PARM_DECL
)
1295 *offset
+= mem_ref_offset (expr
).to_short_addr () * BITS_PER_UNIT
;
1301 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1302 statement PHI, try to find out whether NAME is in fact a
1303 multiple-inheritance typecast from a descendant into an ancestor of a formal
1304 parameter and thus can be described by an ancestor jump function and if so,
1305 write the appropriate function into JFUNC.
1307 Essentially we want to match the following pattern:
1315 iftmp.1_3 = &obj_2(D)->D.1762;
1318 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1319 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1323 compute_complex_ancestor_jump_func (struct func_body_info
*fbi
,
1324 struct ipa_node_params
*info
,
1325 struct ipa_jump_func
*jfunc
,
1326 gcall
*call
, gphi
*phi
)
1328 HOST_WIDE_INT offset
;
1329 gimple assign
, cond
;
1330 basic_block phi_bb
, assign_bb
, cond_bb
;
1331 tree tmp
, parm
, expr
, obj
;
1334 if (gimple_phi_num_args (phi
) != 2)
1337 if (integer_zerop (PHI_ARG_DEF (phi
, 1)))
1338 tmp
= PHI_ARG_DEF (phi
, 0);
1339 else if (integer_zerop (PHI_ARG_DEF (phi
, 0)))
1340 tmp
= PHI_ARG_DEF (phi
, 1);
1343 if (TREE_CODE (tmp
) != SSA_NAME
1344 || SSA_NAME_IS_DEFAULT_DEF (tmp
)
1345 || !POINTER_TYPE_P (TREE_TYPE (tmp
))
1346 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp
))) != RECORD_TYPE
)
1349 assign
= SSA_NAME_DEF_STMT (tmp
);
1350 assign_bb
= gimple_bb (assign
);
1351 if (!single_pred_p (assign_bb
))
1353 expr
= get_ancestor_addr_info (assign
, &obj
, &offset
);
1356 parm
= TREE_OPERAND (expr
, 0);
1357 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (parm
));
1361 cond_bb
= single_pred (assign_bb
);
1362 cond
= last_stmt (cond_bb
);
1364 || gimple_code (cond
) != GIMPLE_COND
1365 || gimple_cond_code (cond
) != NE_EXPR
1366 || gimple_cond_lhs (cond
) != parm
1367 || !integer_zerop (gimple_cond_rhs (cond
)))
1370 phi_bb
= gimple_bb (phi
);
1371 for (i
= 0; i
< 2; i
++)
1373 basic_block pred
= EDGE_PRED (phi_bb
, i
)->src
;
1374 if (pred
!= assign_bb
&& pred
!= cond_bb
)
1378 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1379 parm_ref_data_pass_through_p (fbi
, index
, call
, parm
));
1382 /* Inspect the given TYPE and return true iff it has the same structure (the
1383 same number of fields of the same types) as a C++ member pointer. If
1384 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1385 corresponding fields there. */
1388 type_like_member_ptr_p (tree type
, tree
*method_ptr
, tree
*delta
)
1392 if (TREE_CODE (type
) != RECORD_TYPE
)
1395 fld
= TYPE_FIELDS (type
);
1396 if (!fld
|| !POINTER_TYPE_P (TREE_TYPE (fld
))
1397 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld
))) != METHOD_TYPE
1398 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1404 fld
= DECL_CHAIN (fld
);
1405 if (!fld
|| INTEGRAL_TYPE_P (fld
)
1406 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1411 if (DECL_CHAIN (fld
))
1417 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1418 return the rhs of its defining statement. Otherwise return RHS as it
1422 get_ssa_def_if_simple_copy (tree rhs
)
1424 while (TREE_CODE (rhs
) == SSA_NAME
&& !SSA_NAME_IS_DEFAULT_DEF (rhs
))
1426 gimple def_stmt
= SSA_NAME_DEF_STMT (rhs
);
1428 if (gimple_assign_single_p (def_stmt
))
1429 rhs
= gimple_assign_rhs1 (def_stmt
);
1436 /* Simple linked list, describing known contents of an aggregate beforere
1439 struct ipa_known_agg_contents_list
1441 /* Offset and size of the described part of the aggregate. */
1442 HOST_WIDE_INT offset
, size
;
1443 /* Known constant value or NULL if the contents is known to be unknown. */
1445 /* Pointer to the next structure in the list. */
1446 struct ipa_known_agg_contents_list
*next
;
1449 /* Find the proper place in linked list of ipa_known_agg_contents_list
1450 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1451 unless there is a partial overlap, in which case return NULL, or such
1452 element is already there, in which case set *ALREADY_THERE to true. */
1454 static struct ipa_known_agg_contents_list
**
1455 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list
**list
,
1456 HOST_WIDE_INT lhs_offset
,
1457 HOST_WIDE_INT lhs_size
,
1458 bool *already_there
)
1460 struct ipa_known_agg_contents_list
**p
= list
;
1461 while (*p
&& (*p
)->offset
< lhs_offset
)
1463 if ((*p
)->offset
+ (*p
)->size
> lhs_offset
)
1468 if (*p
&& (*p
)->offset
< lhs_offset
+ lhs_size
)
1470 if ((*p
)->offset
== lhs_offset
&& (*p
)->size
== lhs_size
)
1471 /* We already know this value is subsequently overwritten with
1473 *already_there
= true;
1475 /* Otherwise this is a partial overlap which we cannot
1482 /* Build aggregate jump function from LIST, assuming there are exactly
1483 CONST_COUNT constant entries there and that th offset of the passed argument
1484 is ARG_OFFSET and store it into JFUNC. */
1487 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list
*list
,
1488 int const_count
, HOST_WIDE_INT arg_offset
,
1489 struct ipa_jump_func
*jfunc
)
1491 vec_alloc (jfunc
->agg
.items
, const_count
);
1496 struct ipa_agg_jf_item item
;
1497 item
.offset
= list
->offset
- arg_offset
;
1498 gcc_assert ((item
.offset
% BITS_PER_UNIT
) == 0);
1499 item
.value
= unshare_expr_without_location (list
->constant
);
1500 jfunc
->agg
.items
->quick_push (item
);
1506 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1507 in ARG is filled in with constant values. ARG can either be an aggregate
1508 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1509 aggregate. JFUNC is the jump function into which the constants are
1510 subsequently stored. */
1513 determine_locally_known_aggregate_parts (gcall
*call
, tree arg
,
1515 struct ipa_jump_func
*jfunc
)
1517 struct ipa_known_agg_contents_list
*list
= NULL
;
1518 int item_count
= 0, const_count
= 0;
1519 HOST_WIDE_INT arg_offset
, arg_size
;
1520 gimple_stmt_iterator gsi
;
1522 bool check_ref
, by_ref
;
1525 /* The function operates in three stages. First, we prepare check_ref, r,
1526 arg_base and arg_offset based on what is actually passed as an actual
1529 if (POINTER_TYPE_P (arg_type
))
1532 if (TREE_CODE (arg
) == SSA_NAME
)
1535 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type
))))
1540 type_size
= TYPE_SIZE (TREE_TYPE (arg_type
));
1541 arg_size
= tree_to_uhwi (type_size
);
1542 ao_ref_init_from_ptr_and_size (&r
, arg_base
, NULL_TREE
);
1544 else if (TREE_CODE (arg
) == ADDR_EXPR
)
1546 HOST_WIDE_INT arg_max_size
;
1548 arg
= TREE_OPERAND (arg
, 0);
1549 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1551 if (arg_max_size
== -1
1552 || arg_max_size
!= arg_size
1555 if (DECL_P (arg_base
))
1558 ao_ref_init (&r
, arg_base
);
1568 HOST_WIDE_INT arg_max_size
;
1570 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg
)));
1574 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1576 if (arg_max_size
== -1
1577 || arg_max_size
!= arg_size
1581 ao_ref_init (&r
, arg
);
1584 /* Second stage walks back the BB, looks at individual statements and as long
1585 as it is confident of how the statements affect contents of the
1586 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1588 gsi
= gsi_for_stmt (call
);
1590 for (; !gsi_end_p (gsi
); gsi_prev (&gsi
))
1592 struct ipa_known_agg_contents_list
*n
, **p
;
1593 gimple stmt
= gsi_stmt (gsi
);
1594 HOST_WIDE_INT lhs_offset
, lhs_size
, lhs_max_size
;
1595 tree lhs
, rhs
, lhs_base
;
1597 if (!stmt_may_clobber_ref_p_1 (stmt
, &r
))
1599 if (!gimple_assign_single_p (stmt
))
1602 lhs
= gimple_assign_lhs (stmt
);
1603 rhs
= gimple_assign_rhs1 (stmt
);
1604 if (!is_gimple_reg_type (TREE_TYPE (rhs
))
1605 || TREE_CODE (lhs
) == BIT_FIELD_REF
1606 || contains_bitfld_component_ref_p (lhs
))
1609 lhs_base
= get_ref_base_and_extent (lhs
, &lhs_offset
, &lhs_size
,
1611 if (lhs_max_size
== -1
1612 || lhs_max_size
!= lhs_size
)
1617 if (TREE_CODE (lhs_base
) != MEM_REF
1618 || TREE_OPERAND (lhs_base
, 0) != arg_base
1619 || !integer_zerop (TREE_OPERAND (lhs_base
, 1)))
1622 else if (lhs_base
!= arg_base
)
1624 if (DECL_P (lhs_base
))
1630 bool already_there
= false;
1631 p
= get_place_in_agg_contents_list (&list
, lhs_offset
, lhs_size
,
1638 rhs
= get_ssa_def_if_simple_copy (rhs
);
1639 n
= XALLOCA (struct ipa_known_agg_contents_list
);
1641 n
->offset
= lhs_offset
;
1642 if (is_gimple_ip_invariant (rhs
))
1648 n
->constant
= NULL_TREE
;
1653 if (const_count
== PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
)
1654 || item_count
== 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
))
1658 /* Third stage just goes over the list and creates an appropriate vector of
1659 ipa_agg_jf_item structures out of it, of sourse only if there are
1660 any known constants to begin with. */
1664 jfunc
->agg
.by_ref
= by_ref
;
1665 build_agg_jump_func_from_list (list
, const_count
, arg_offset
, jfunc
);
1670 ipa_get_callee_param_type (struct cgraph_edge
*e
, int i
)
1673 tree type
= (e
->callee
1674 ? TREE_TYPE (e
->callee
->decl
)
1675 : gimple_call_fntype (e
->call_stmt
));
1676 tree t
= TYPE_ARG_TYPES (type
);
1678 for (n
= 0; n
< i
; n
++)
1685 return TREE_VALUE (t
);
1688 t
= DECL_ARGUMENTS (e
->callee
->decl
);
1689 for (n
= 0; n
< i
; n
++)
1696 return TREE_TYPE (t
);
1700 /* Compute jump function for all arguments of callsite CS and insert the
1701 information in the jump_functions array in the ipa_edge_args corresponding
1702 to this callsite. */
1705 ipa_compute_jump_functions_for_edge (struct func_body_info
*fbi
,
1706 struct cgraph_edge
*cs
)
1708 struct ipa_node_params
*info
= IPA_NODE_REF (cs
->caller
);
1709 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
1710 gcall
*call
= cs
->call_stmt
;
1711 int n
, arg_num
= gimple_call_num_args (call
);
1712 bool useful_context
= false;
1714 if (arg_num
== 0 || args
->jump_functions
)
1716 vec_safe_grow_cleared (args
->jump_functions
, arg_num
);
1717 if (flag_devirtualize
)
1718 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, arg_num
);
1720 if (gimple_call_internal_p (call
))
1722 if (ipa_func_spec_opts_forbid_analysis_p (cs
->caller
))
1725 for (n
= 0; n
< arg_num
; n
++)
1727 struct ipa_jump_func
*jfunc
= ipa_get_ith_jump_func (args
, n
);
1728 tree arg
= gimple_call_arg (call
, n
);
1729 tree param_type
= ipa_get_callee_param_type (cs
, n
);
1730 if (flag_devirtualize
&& POINTER_TYPE_P (TREE_TYPE (arg
)))
1733 struct ipa_polymorphic_call_context
context (cs
->caller
->decl
,
1736 context
.get_dynamic_type (instance
, arg
, NULL
, cs
->call_stmt
);
1737 *ipa_get_ith_polymorhic_call_context (args
, n
) = context
;
1738 if (!context
.useless_p ())
1739 useful_context
= true;
1742 if (POINTER_TYPE_P (TREE_TYPE(arg
)))
1744 unsigned HOST_WIDE_INT hwi_bitpos
;
1747 if (get_pointer_alignment_1 (arg
, &align
, &hwi_bitpos
)
1748 && align
% BITS_PER_UNIT
== 0
1749 && hwi_bitpos
% BITS_PER_UNIT
== 0)
1751 jfunc
->alignment
.known
= true;
1752 jfunc
->alignment
.align
= align
/ BITS_PER_UNIT
;
1753 jfunc
->alignment
.misalign
= hwi_bitpos
/ BITS_PER_UNIT
;
1756 gcc_assert (!jfunc
->alignment
.known
);
1759 gcc_assert (!jfunc
->alignment
.known
);
1761 if (is_gimple_ip_invariant (arg
))
1762 ipa_set_jf_constant (jfunc
, arg
, cs
);
1763 else if (!is_gimple_reg_type (TREE_TYPE (arg
))
1764 && TREE_CODE (arg
) == PARM_DECL
)
1766 int index
= ipa_get_param_decl_index (info
, arg
);
1768 gcc_assert (index
>=0);
1769 /* Aggregate passed by value, check for pass-through, otherwise we
1770 will attempt to fill in aggregate contents later in this
1772 if (parm_preserved_before_stmt_p (fbi
, index
, call
, arg
))
1774 ipa_set_jf_simple_pass_through (jfunc
, index
, false);
1778 else if (TREE_CODE (arg
) == SSA_NAME
)
1780 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
1782 int index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (arg
));
1786 agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
, arg
);
1787 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1792 gimple stmt
= SSA_NAME_DEF_STMT (arg
);
1793 if (is_gimple_assign (stmt
))
1794 compute_complex_assign_jump_func (fbi
, info
, jfunc
,
1795 call
, stmt
, arg
, param_type
);
1796 else if (gimple_code (stmt
) == GIMPLE_PHI
)
1797 compute_complex_ancestor_jump_func (fbi
, info
, jfunc
,
1799 as_a
<gphi
*> (stmt
));
1803 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1804 passed (because type conversions are ignored in gimple). Usually we can
1805 safely get type from function declaration, but in case of K&R prototypes or
1806 variadic functions we can try our luck with type of the pointer passed.
1807 TODO: Since we look for actual initialization of the memory object, we may better
1808 work out the type based on the memory stores we find. */
1810 param_type
= TREE_TYPE (arg
);
1812 if ((jfunc
->type
!= IPA_JF_PASS_THROUGH
1813 || !ipa_get_jf_pass_through_agg_preserved (jfunc
))
1814 && (jfunc
->type
!= IPA_JF_ANCESTOR
1815 || !ipa_get_jf_ancestor_agg_preserved (jfunc
))
1816 && (AGGREGATE_TYPE_P (TREE_TYPE (arg
))
1817 || POINTER_TYPE_P (param_type
)))
1818 determine_locally_known_aggregate_parts (call
, arg
, param_type
, jfunc
);
1820 if (!useful_context
)
1821 vec_free (args
->polymorphic_call_contexts
);
1824 /* Compute jump functions for all edges - both direct and indirect - outgoing
1828 ipa_compute_jump_functions_for_bb (struct func_body_info
*fbi
, basic_block bb
)
1830 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
1832 struct cgraph_edge
*cs
;
1834 FOR_EACH_VEC_ELT_REVERSE (bi
->cg_edges
, i
, cs
)
1836 struct cgraph_node
*callee
= cs
->callee
;
1840 callee
->ultimate_alias_target ();
1841 /* We do not need to bother analyzing calls to unknown functions
1842 unless they may become known during lto/whopr. */
1843 if (!callee
->definition
&& !flag_lto
)
1846 ipa_compute_jump_functions_for_edge (fbi
, cs
);
1850 /* If STMT looks like a statement loading a value from a member pointer formal
1851 parameter, return that parameter and store the offset of the field to
1852 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1853 might be clobbered). If USE_DELTA, then we look for a use of the delta
1854 field rather than the pfn. */
1857 ipa_get_stmt_member_ptr_load_param (gimple stmt
, bool use_delta
,
1858 HOST_WIDE_INT
*offset_p
)
1860 tree rhs
, rec
, ref_field
, ref_offset
, fld
, ptr_field
, delta_field
;
1862 if (!gimple_assign_single_p (stmt
))
1865 rhs
= gimple_assign_rhs1 (stmt
);
1866 if (TREE_CODE (rhs
) == COMPONENT_REF
)
1868 ref_field
= TREE_OPERAND (rhs
, 1);
1869 rhs
= TREE_OPERAND (rhs
, 0);
1872 ref_field
= NULL_TREE
;
1873 if (TREE_CODE (rhs
) != MEM_REF
)
1875 rec
= TREE_OPERAND (rhs
, 0);
1876 if (TREE_CODE (rec
) != ADDR_EXPR
)
1878 rec
= TREE_OPERAND (rec
, 0);
1879 if (TREE_CODE (rec
) != PARM_DECL
1880 || !type_like_member_ptr_p (TREE_TYPE (rec
), &ptr_field
, &delta_field
))
1882 ref_offset
= TREE_OPERAND (rhs
, 1);
1889 *offset_p
= int_bit_position (fld
);
1893 if (integer_nonzerop (ref_offset
))
1895 return ref_field
== fld
? rec
: NULL_TREE
;
1898 return tree_int_cst_equal (byte_position (fld
), ref_offset
) ? rec
1902 /* Returns true iff T is an SSA_NAME defined by a statement. */
1905 ipa_is_ssa_with_stmt_def (tree t
)
1907 if (TREE_CODE (t
) == SSA_NAME
1908 && !SSA_NAME_IS_DEFAULT_DEF (t
))
1914 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1915 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1916 indirect call graph edge. */
1918 static struct cgraph_edge
*
1919 ipa_note_param_call (struct cgraph_node
*node
, int param_index
,
1922 struct cgraph_edge
*cs
;
1924 cs
= node
->get_edge (stmt
);
1925 cs
->indirect_info
->param_index
= param_index
;
1926 cs
->indirect_info
->agg_contents
= 0;
1927 cs
->indirect_info
->member_ptr
= 0;
1931 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1932 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1933 intermediate information about each formal parameter. Currently it checks
1934 whether the call calls a pointer that is a formal parameter and if so, the
1935 parameter is marked with the called flag and an indirect call graph edge
1936 describing the call is created. This is very simple for ordinary pointers
1937 represented in SSA but not-so-nice when it comes to member pointers. The
1938 ugly part of this function does nothing more than trying to match the
1939 pattern of such a call. An example of such a pattern is the gimple dump
1940 below, the call is on the last line:
1943 f$__delta_5 = f.__delta;
1944 f$__pfn_24 = f.__pfn;
1948 f$__delta_5 = MEM[(struct *)&f];
1949 f$__pfn_24 = MEM[(struct *)&f + 4B];
1951 and a few lines below:
1954 D.2496_3 = (int) f$__pfn_24;
1955 D.2497_4 = D.2496_3 & 1;
1962 D.2500_7 = (unsigned int) f$__delta_5;
1963 D.2501_8 = &S + D.2500_7;
1964 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1965 D.2503_10 = *D.2502_9;
1966 D.2504_12 = f$__pfn_24 + -1;
1967 D.2505_13 = (unsigned int) D.2504_12;
1968 D.2506_14 = D.2503_10 + D.2505_13;
1969 D.2507_15 = *D.2506_14;
1970 iftmp.11_16 = (String:: *) D.2507_15;
1973 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1974 D.2500_19 = (unsigned int) f$__delta_5;
1975 D.2508_20 = &S + D.2500_19;
1976 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1978 Such patterns are results of simple calls to a member pointer:
1980 int doprinting (int (MyString::* f)(int) const)
1982 MyString S ("somestring");
1987 Moreover, the function also looks for called pointers loaded from aggregates
1988 passed by value or reference. */
1991 ipa_analyze_indirect_call_uses (struct func_body_info
*fbi
, gcall
*call
,
1994 struct ipa_node_params
*info
= fbi
->info
;
1995 HOST_WIDE_INT offset
;
1998 if (SSA_NAME_IS_DEFAULT_DEF (target
))
2000 tree var
= SSA_NAME_VAR (target
);
2001 int index
= ipa_get_param_decl_index (info
, var
);
2003 ipa_note_param_call (fbi
->node
, index
, call
);
2008 gimple def
= SSA_NAME_DEF_STMT (target
);
2009 if (gimple_assign_single_p (def
)
2010 && ipa_load_from_parm_agg_1 (fbi
, info
->descriptors
, def
,
2011 gimple_assign_rhs1 (def
), &index
, &offset
,
2014 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2015 cs
->indirect_info
->offset
= offset
;
2016 cs
->indirect_info
->agg_contents
= 1;
2017 cs
->indirect_info
->by_ref
= by_ref
;
2021 /* Now we need to try to match the complex pattern of calling a member
2023 if (gimple_code (def
) != GIMPLE_PHI
2024 || gimple_phi_num_args (def
) != 2
2025 || !POINTER_TYPE_P (TREE_TYPE (target
))
2026 || TREE_CODE (TREE_TYPE (TREE_TYPE (target
))) != METHOD_TYPE
)
2029 /* First, we need to check whether one of these is a load from a member
2030 pointer that is a parameter to this function. */
2031 tree n1
= PHI_ARG_DEF (def
, 0);
2032 tree n2
= PHI_ARG_DEF (def
, 1);
2033 if (!ipa_is_ssa_with_stmt_def (n1
) || !ipa_is_ssa_with_stmt_def (n2
))
2035 gimple d1
= SSA_NAME_DEF_STMT (n1
);
2036 gimple d2
= SSA_NAME_DEF_STMT (n2
);
2039 basic_block bb
, virt_bb
;
2040 basic_block join
= gimple_bb (def
);
2041 if ((rec
= ipa_get_stmt_member_ptr_load_param (d1
, false, &offset
)))
2043 if (ipa_get_stmt_member_ptr_load_param (d2
, false, NULL
))
2046 bb
= EDGE_PRED (join
, 0)->src
;
2047 virt_bb
= gimple_bb (d2
);
2049 else if ((rec
= ipa_get_stmt_member_ptr_load_param (d2
, false, &offset
)))
2051 bb
= EDGE_PRED (join
, 1)->src
;
2052 virt_bb
= gimple_bb (d1
);
2057 /* Second, we need to check that the basic blocks are laid out in the way
2058 corresponding to the pattern. */
2060 if (!single_pred_p (virt_bb
) || !single_succ_p (virt_bb
)
2061 || single_pred (virt_bb
) != bb
2062 || single_succ (virt_bb
) != join
)
2065 /* Third, let's see that the branching is done depending on the least
2066 significant bit of the pfn. */
2068 gimple branch
= last_stmt (bb
);
2069 if (!branch
|| gimple_code (branch
) != GIMPLE_COND
)
2072 if ((gimple_cond_code (branch
) != NE_EXPR
2073 && gimple_cond_code (branch
) != EQ_EXPR
)
2074 || !integer_zerop (gimple_cond_rhs (branch
)))
2077 tree cond
= gimple_cond_lhs (branch
);
2078 if (!ipa_is_ssa_with_stmt_def (cond
))
2081 def
= SSA_NAME_DEF_STMT (cond
);
2082 if (!is_gimple_assign (def
)
2083 || gimple_assign_rhs_code (def
) != BIT_AND_EXPR
2084 || !integer_onep (gimple_assign_rhs2 (def
)))
2087 cond
= gimple_assign_rhs1 (def
);
2088 if (!ipa_is_ssa_with_stmt_def (cond
))
2091 def
= SSA_NAME_DEF_STMT (cond
);
2093 if (is_gimple_assign (def
)
2094 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
2096 cond
= gimple_assign_rhs1 (def
);
2097 if (!ipa_is_ssa_with_stmt_def (cond
))
2099 def
= SSA_NAME_DEF_STMT (cond
);
2103 rec2
= ipa_get_stmt_member_ptr_load_param (def
,
2104 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2105 == ptrmemfunc_vbit_in_delta
),
2110 index
= ipa_get_param_decl_index (info
, rec
);
2112 && parm_preserved_before_stmt_p (fbi
, index
, call
, rec
))
2114 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2115 cs
->indirect_info
->offset
= offset
;
2116 cs
->indirect_info
->agg_contents
= 1;
2117 cs
->indirect_info
->member_ptr
= 1;
2123 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2124 object referenced in the expression is a formal parameter of the caller
2125 FBI->node (described by FBI->info), create a call note for the
2129 ipa_analyze_virtual_call_uses (struct func_body_info
*fbi
,
2130 gcall
*call
, tree target
)
2132 tree obj
= OBJ_TYPE_REF_OBJECT (target
);
2134 HOST_WIDE_INT anc_offset
;
2136 if (!flag_devirtualize
)
2139 if (TREE_CODE (obj
) != SSA_NAME
)
2142 struct ipa_node_params
*info
= fbi
->info
;
2143 if (SSA_NAME_IS_DEFAULT_DEF (obj
))
2145 struct ipa_jump_func jfunc
;
2146 if (TREE_CODE (SSA_NAME_VAR (obj
)) != PARM_DECL
)
2150 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (obj
));
2151 gcc_assert (index
>= 0);
2152 if (detect_type_change_ssa (obj
, obj_type_ref_class (target
),
2158 struct ipa_jump_func jfunc
;
2159 gimple stmt
= SSA_NAME_DEF_STMT (obj
);
2162 expr
= get_ancestor_addr_info (stmt
, &obj
, &anc_offset
);
2165 index
= ipa_get_param_decl_index (info
,
2166 SSA_NAME_VAR (TREE_OPERAND (expr
, 0)));
2167 gcc_assert (index
>= 0);
2168 if (detect_type_change (obj
, expr
, obj_type_ref_class (target
),
2169 call
, &jfunc
, anc_offset
))
2173 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2174 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
2175 ii
->offset
= anc_offset
;
2176 ii
->otr_token
= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target
));
2177 ii
->otr_type
= obj_type_ref_class (target
);
2178 ii
->polymorphic
= 1;
2181 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2182 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2183 containing intermediate information about each formal parameter. */
2186 ipa_analyze_call_uses (struct func_body_info
*fbi
, gcall
*call
)
2188 tree target
= gimple_call_fn (call
);
2191 || (TREE_CODE (target
) != SSA_NAME
2192 && !virtual_method_call_p (target
)))
2195 struct cgraph_edge
*cs
= fbi
->node
->get_edge (call
);
2196 /* If we previously turned the call into a direct call, there is
2197 no need to analyze. */
2198 if (cs
&& !cs
->indirect_unknown_callee
)
2201 if (cs
->indirect_info
->polymorphic
&& flag_devirtualize
)
2204 tree target
= gimple_call_fn (call
);
2205 ipa_polymorphic_call_context
context (current_function_decl
,
2206 target
, call
, &instance
);
2208 gcc_checking_assert (cs
->indirect_info
->otr_type
2209 == obj_type_ref_class (target
));
2210 gcc_checking_assert (cs
->indirect_info
->otr_token
2211 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target
)));
2213 cs
->indirect_info
->vptr_changed
2214 = !context
.get_dynamic_type (instance
,
2215 OBJ_TYPE_REF_OBJECT (target
),
2216 obj_type_ref_class (target
), call
);
2217 cs
->indirect_info
->context
= context
;
2220 if (TREE_CODE (target
) == SSA_NAME
)
2221 ipa_analyze_indirect_call_uses (fbi
, call
, target
);
2222 else if (virtual_method_call_p (target
))
2223 ipa_analyze_virtual_call_uses (fbi
, call
, target
);
2227 /* Analyze the call statement STMT with respect to formal parameters (described
2228 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2229 formal parameters are called. */
2232 ipa_analyze_stmt_uses (struct func_body_info
*fbi
, gimple stmt
)
2234 if (is_gimple_call (stmt
))
2235 ipa_analyze_call_uses (fbi
, as_a
<gcall
*> (stmt
));
2238 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2239 If OP is a parameter declaration, mark it as used in the info structure
2243 visit_ref_for_mod_analysis (gimple
, tree op
, tree
, void *data
)
2245 struct ipa_node_params
*info
= (struct ipa_node_params
*) data
;
2247 op
= get_base_address (op
);
2249 && TREE_CODE (op
) == PARM_DECL
)
2251 int index
= ipa_get_param_decl_index (info
, op
);
2252 gcc_assert (index
>= 0);
2253 ipa_set_param_used (info
, index
, true);
2259 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2260 the findings in various structures of the associated ipa_node_params
2261 structure, such as parameter flags, notes etc. FBI holds various data about
2262 the function being analyzed. */
2265 ipa_analyze_params_uses_in_bb (struct func_body_info
*fbi
, basic_block bb
)
2267 gimple_stmt_iterator gsi
;
2268 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2270 gimple stmt
= gsi_stmt (gsi
);
2272 if (is_gimple_debug (stmt
))
2275 ipa_analyze_stmt_uses (fbi
, stmt
);
2276 walk_stmt_load_store_addr_ops (stmt
, fbi
->info
,
2277 visit_ref_for_mod_analysis
,
2278 visit_ref_for_mod_analysis
,
2279 visit_ref_for_mod_analysis
);
2281 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2282 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), fbi
->info
,
2283 visit_ref_for_mod_analysis
,
2284 visit_ref_for_mod_analysis
,
2285 visit_ref_for_mod_analysis
);
2288 /* Calculate controlled uses of parameters of NODE. */
2291 ipa_analyze_controlled_uses (struct cgraph_node
*node
)
2293 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
2295 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2297 tree parm
= ipa_get_param (info
, i
);
2298 int controlled_uses
= 0;
2300 /* For SSA regs see if parameter is used. For non-SSA we compute
2301 the flag during modification analysis. */
2302 if (is_gimple_reg (parm
))
2304 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
),
2306 if (ddef
&& !has_zero_uses (ddef
))
2308 imm_use_iterator imm_iter
;
2309 use_operand_p use_p
;
2311 ipa_set_param_used (info
, i
, true);
2312 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, ddef
)
2313 if (!is_gimple_call (USE_STMT (use_p
)))
2315 if (!is_gimple_debug (USE_STMT (use_p
)))
2317 controlled_uses
= IPA_UNDESCRIBED_USE
;
2325 controlled_uses
= 0;
2328 controlled_uses
= IPA_UNDESCRIBED_USE
;
2329 ipa_set_controlled_uses (info
, i
, controlled_uses
);
2333 /* Free stuff in BI. */
2336 free_ipa_bb_info (struct ipa_bb_info
*bi
)
2338 bi
->cg_edges
.release ();
2339 bi
->param_aa_statuses
.release ();
2342 /* Dominator walker driving the analysis. */
2344 class analysis_dom_walker
: public dom_walker
2347 analysis_dom_walker (struct func_body_info
*fbi
)
2348 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
) {}
2350 virtual void before_dom_children (basic_block
);
2353 struct func_body_info
*m_fbi
;
2357 analysis_dom_walker::before_dom_children (basic_block bb
)
2359 ipa_analyze_params_uses_in_bb (m_fbi
, bb
);
2360 ipa_compute_jump_functions_for_bb (m_fbi
, bb
);
2363 /* Initialize the array describing properties of of formal parameters
2364 of NODE, analyze their uses and compute jump functions associated
2365 with actual arguments of calls from within NODE. */
2368 ipa_analyze_node (struct cgraph_node
*node
)
2370 struct func_body_info fbi
;
2371 struct ipa_node_params
*info
;
2373 ipa_check_create_node_params ();
2374 ipa_check_create_edge_args ();
2375 info
= IPA_NODE_REF (node
);
2377 if (info
->analysis_done
)
2379 info
->analysis_done
= 1;
2381 if (ipa_func_spec_opts_forbid_analysis_p (node
))
2383 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2385 ipa_set_param_used (info
, i
, true);
2386 ipa_set_controlled_uses (info
, i
, IPA_UNDESCRIBED_USE
);
2391 struct function
*func
= DECL_STRUCT_FUNCTION (node
->decl
);
2393 calculate_dominance_info (CDI_DOMINATORS
);
2394 ipa_initialize_node_params (node
);
2395 ipa_analyze_controlled_uses (node
);
2398 fbi
.info
= IPA_NODE_REF (node
);
2399 fbi
.bb_infos
= vNULL
;
2400 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
));
2401 fbi
.param_count
= ipa_get_param_count (info
);
2404 for (struct cgraph_edge
*cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
2406 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
2407 bi
->cg_edges
.safe_push (cs
);
2410 for (struct cgraph_edge
*cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
2412 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
2413 bi
->cg_edges
.safe_push (cs
);
2416 analysis_dom_walker (&fbi
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
2419 struct ipa_bb_info
*bi
;
2420 FOR_EACH_VEC_ELT (fbi
.bb_infos
, i
, bi
)
2421 free_ipa_bb_info (bi
);
2422 fbi
.bb_infos
.release ();
2423 free_dominance_info (CDI_DOMINATORS
);
2427 /* Update the jump functions associated with call graph edge E when the call
2428 graph edge CS is being inlined, assuming that E->caller is already (possibly
2429 indirectly) inlined into CS->callee and that E has not been inlined. */
2432 update_jump_functions_after_inlining (struct cgraph_edge
*cs
,
2433 struct cgraph_edge
*e
)
2435 struct ipa_edge_args
*top
= IPA_EDGE_REF (cs
);
2436 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
2437 int count
= ipa_get_cs_argument_count (args
);
2440 for (i
= 0; i
< count
; i
++)
2442 struct ipa_jump_func
*dst
= ipa_get_ith_jump_func (args
, i
);
2443 struct ipa_polymorphic_call_context
*dst_ctx
2444 = ipa_get_ith_polymorhic_call_context (args
, i
);
2446 if (dst
->type
== IPA_JF_ANCESTOR
)
2448 struct ipa_jump_func
*src
;
2449 int dst_fid
= dst
->value
.ancestor
.formal_id
;
2450 struct ipa_polymorphic_call_context
*src_ctx
2451 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
2453 /* Variable number of arguments can cause havoc if we try to access
2454 one that does not exist in the inlined edge. So make sure we
2456 if (dst_fid
>= ipa_get_cs_argument_count (top
))
2458 ipa_set_jf_unknown (dst
);
2462 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2464 if (src_ctx
&& !src_ctx
->useless_p ())
2466 struct ipa_polymorphic_call_context ctx
= *src_ctx
;
2468 /* TODO: Make type preserved safe WRT contexts. */
2469 if (!ipa_get_jf_ancestor_type_preserved (dst
))
2470 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
2471 ctx
.offset_by (dst
->value
.ancestor
.offset
);
2472 if (!ctx
.useless_p ())
2474 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
2476 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
2478 dst_ctx
->combine_with (ctx
);
2482 && (dst
->value
.ancestor
.agg_preserved
|| !src
->agg
.by_ref
))
2484 struct ipa_agg_jf_item
*item
;
2487 /* Currently we do not produce clobber aggregate jump functions,
2488 replace with merging when we do. */
2489 gcc_assert (!dst
->agg
.items
);
2491 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2492 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2493 FOR_EACH_VEC_SAFE_ELT (dst
->agg
.items
, j
, item
)
2494 item
->offset
-= dst
->value
.ancestor
.offset
;
2497 if (src
->type
== IPA_JF_PASS_THROUGH
2498 && src
->value
.pass_through
.operation
== NOP_EXPR
)
2500 dst
->value
.ancestor
.formal_id
= src
->value
.pass_through
.formal_id
;
2501 dst
->value
.ancestor
.agg_preserved
&=
2502 src
->value
.pass_through
.agg_preserved
;
2504 else if (src
->type
== IPA_JF_ANCESTOR
)
2506 dst
->value
.ancestor
.formal_id
= src
->value
.ancestor
.formal_id
;
2507 dst
->value
.ancestor
.offset
+= src
->value
.ancestor
.offset
;
2508 dst
->value
.ancestor
.agg_preserved
&=
2509 src
->value
.ancestor
.agg_preserved
;
2512 ipa_set_jf_unknown (dst
);
2514 else if (dst
->type
== IPA_JF_PASS_THROUGH
)
2516 struct ipa_jump_func
*src
;
2517 /* We must check range due to calls with variable number of arguments
2518 and we cannot combine jump functions with operations. */
2519 if (dst
->value
.pass_through
.operation
== NOP_EXPR
2520 && (dst
->value
.pass_through
.formal_id
2521 < ipa_get_cs_argument_count (top
)))
2523 int dst_fid
= dst
->value
.pass_through
.formal_id
;
2524 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2525 bool dst_agg_p
= ipa_get_jf_pass_through_agg_preserved (dst
);
2526 struct ipa_polymorphic_call_context
*src_ctx
2527 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
2529 if (src_ctx
&& !src_ctx
->useless_p ())
2531 struct ipa_polymorphic_call_context ctx
= *src_ctx
;
2533 /* TODO: Make type preserved safe WRT contexts. */
2534 if (!ipa_get_jf_pass_through_type_preserved (dst
))
2535 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
2536 if (!ctx
.useless_p ())
2540 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
2542 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
2544 dst_ctx
->combine_with (ctx
);
2549 case IPA_JF_UNKNOWN
:
2550 ipa_set_jf_unknown (dst
);
2553 ipa_set_jf_cst_copy (dst
, src
);
2556 case IPA_JF_PASS_THROUGH
:
2558 int formal_id
= ipa_get_jf_pass_through_formal_id (src
);
2559 enum tree_code operation
;
2560 operation
= ipa_get_jf_pass_through_operation (src
);
2562 if (operation
== NOP_EXPR
)
2566 && ipa_get_jf_pass_through_agg_preserved (src
);
2567 ipa_set_jf_simple_pass_through (dst
, formal_id
, agg_p
);
2571 tree operand
= ipa_get_jf_pass_through_operand (src
);
2572 ipa_set_jf_arith_pass_through (dst
, formal_id
, operand
,
2577 case IPA_JF_ANCESTOR
:
2581 && ipa_get_jf_ancestor_agg_preserved (src
);
2582 ipa_set_ancestor_jf (dst
,
2583 ipa_get_jf_ancestor_offset (src
),
2584 ipa_get_jf_ancestor_formal_id (src
),
2593 && (dst_agg_p
|| !src
->agg
.by_ref
))
2595 /* Currently we do not produce clobber aggregate jump
2596 functions, replace with merging when we do. */
2597 gcc_assert (!dst
->agg
.items
);
2599 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2600 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2604 ipa_set_jf_unknown (dst
);
2609 /* If TARGET is an addr_expr of a function declaration, make it the
2610 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2611 Otherwise, return NULL. */
2613 struct cgraph_edge
*
2614 ipa_make_edge_direct_to_target (struct cgraph_edge
*ie
, tree target
,
2617 struct cgraph_node
*callee
;
2618 struct inline_edge_summary
*es
= inline_edge_summary (ie
);
2619 bool unreachable
= false;
2621 if (TREE_CODE (target
) == ADDR_EXPR
)
2622 target
= TREE_OPERAND (target
, 0);
2623 if (TREE_CODE (target
) != FUNCTION_DECL
)
2625 target
= canonicalize_constructor_val (target
, NULL
);
2626 if (!target
|| TREE_CODE (target
) != FUNCTION_DECL
)
2628 if (ie
->indirect_info
->member_ptr
)
2629 /* Member pointer call that goes through a VMT lookup. */
2632 if (dump_enabled_p ())
2634 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2635 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2636 "discovered direct call to non-function in %s/%i, "
2637 "making it __builtin_unreachable\n",
2638 ie
->caller
->name (), ie
->caller
->order
);
2641 target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2642 callee
= cgraph_node::get_create (target
);
2646 callee
= cgraph_node::get (target
);
2649 callee
= cgraph_node::get (target
);
2651 /* Because may-edges are not explicitely represented and vtable may be external,
2652 we may create the first reference to the object in the unit. */
2653 if (!callee
|| callee
->global
.inlined_to
)
2656 /* We are better to ensure we can refer to it.
2657 In the case of static functions we are out of luck, since we already
2658 removed its body. In the case of public functions we may or may
2659 not introduce the reference. */
2660 if (!canonicalize_constructor_val (target
, NULL
)
2661 || !TREE_PUBLIC (target
))
2664 fprintf (dump_file
, "ipa-prop: Discovered call to a known target "
2665 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2666 xstrdup_for_dump (ie
->caller
->name ()),
2668 xstrdup_for_dump (ie
->callee
->name ()),
2672 callee
= cgraph_node::get_create (target
);
2675 /* If the edge is already speculated. */
2676 if (speculative
&& ie
->speculative
)
2678 struct cgraph_edge
*e2
;
2679 struct ipa_ref
*ref
;
2680 ie
->speculative_call_info (e2
, ie
, ref
);
2681 if (e2
->callee
->ultimate_alias_target ()
2682 != callee
->ultimate_alias_target ())
2685 fprintf (dump_file
, "ipa-prop: Discovered call to a speculative target "
2686 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2687 xstrdup_for_dump (ie
->caller
->name ()),
2689 xstrdup_for_dump (callee
->name ()),
2691 xstrdup_for_dump (e2
->callee
->name ()),
2697 fprintf (dump_file
, "ipa-prop: Discovered call to a speculative target "
2698 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2699 xstrdup_for_dump (ie
->caller
->name ()),
2701 xstrdup_for_dump (callee
->name ()),
2707 if (!dbg_cnt (devirt
))
2710 ipa_check_create_node_params ();
2712 /* We can not make edges to inline clones. It is bug that someone removed
2713 the cgraph node too early. */
2714 gcc_assert (!callee
->global
.inlined_to
);
2716 if (dump_file
&& !unreachable
)
2718 fprintf (dump_file
, "ipa-prop: Discovered %s call to a %s target "
2719 "(%s/%i -> %s/%i), for stmt ",
2720 ie
->indirect_info
->polymorphic
? "a virtual" : "an indirect",
2721 speculative
? "speculative" : "known",
2722 xstrdup_for_dump (ie
->caller
->name ()),
2724 xstrdup_for_dump (callee
->name ()),
2727 print_gimple_stmt (dump_file
, ie
->call_stmt
, 2, TDF_SLIM
);
2729 fprintf (dump_file
, "with uid %i\n", ie
->lto_stmt_uid
);
2731 if (dump_enabled_p ())
2733 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2735 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2736 "converting indirect call in %s to direct call to %s\n",
2737 ie
->caller
->name (), callee
->name ());
2741 struct cgraph_edge
*orig
= ie
;
2742 ie
= ie
->make_direct (callee
);
2743 /* If we resolved speculative edge the cost is already up to date
2744 for direct call (adjusted by inline_edge_duplication_hook). */
2747 es
= inline_edge_summary (ie
);
2748 es
->call_stmt_size
-= (eni_size_weights
.indirect_call_cost
2749 - eni_size_weights
.call_cost
);
2750 es
->call_stmt_time
-= (eni_time_weights
.indirect_call_cost
2751 - eni_time_weights
.call_cost
);
2756 if (!callee
->can_be_discarded_p ())
2759 alias
= dyn_cast
<cgraph_node
*> (callee
->noninterposable_alias ());
2763 /* make_speculative will update ie's cost to direct call cost. */
2764 ie
= ie
->make_speculative
2765 (callee
, ie
->count
* 8 / 10, ie
->frequency
* 8 / 10);
2771 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2772 return NULL if there is not any. BY_REF specifies whether the value has to
2773 be passed by reference or by value. */
2776 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function
*agg
,
2777 HOST_WIDE_INT offset
, bool by_ref
)
2779 struct ipa_agg_jf_item
*item
;
2782 if (by_ref
!= agg
->by_ref
)
2785 FOR_EACH_VEC_SAFE_ELT (agg
->items
, i
, item
)
2786 if (item
->offset
== offset
)
2788 /* Currently we do not have clobber values, return NULL for them once
2790 gcc_checking_assert (is_gimple_ip_invariant (item
->value
));
2796 /* Remove a reference to SYMBOL from the list of references of a node given by
2797 reference description RDESC. Return true if the reference has been
2798 successfully found and removed. */
2801 remove_described_reference (symtab_node
*symbol
, struct ipa_cst_ref_desc
*rdesc
)
2803 struct ipa_ref
*to_del
;
2804 struct cgraph_edge
*origin
;
2809 to_del
= origin
->caller
->find_reference (symbol
, origin
->call_stmt
,
2810 origin
->lto_stmt_uid
);
2814 to_del
->remove_reference ();
2816 fprintf (dump_file
, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2817 xstrdup_for_dump (origin
->caller
->name ()),
2818 origin
->caller
->order
, xstrdup_for_dump (symbol
->name ()));
2822 /* If JFUNC has a reference description with refcount different from
2823 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2824 NULL. JFUNC must be a constant jump function. */
2826 static struct ipa_cst_ref_desc
*
2827 jfunc_rdesc_usable (struct ipa_jump_func
*jfunc
)
2829 struct ipa_cst_ref_desc
*rdesc
= ipa_get_jf_constant_rdesc (jfunc
);
2830 if (rdesc
&& rdesc
->refcount
!= IPA_UNDESCRIBED_USE
)
2836 /* If the value of constant jump function JFUNC is an address of a function
2837 declaration, return the associated call graph node. Otherwise return
2840 static cgraph_node
*
2841 cgraph_node_for_jfunc (struct ipa_jump_func
*jfunc
)
2843 gcc_checking_assert (jfunc
->type
== IPA_JF_CONST
);
2844 tree cst
= ipa_get_jf_constant (jfunc
);
2845 if (TREE_CODE (cst
) != ADDR_EXPR
2846 || TREE_CODE (TREE_OPERAND (cst
, 0)) != FUNCTION_DECL
)
2849 return cgraph_node::get (TREE_OPERAND (cst
, 0));
2853 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2854 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2855 the edge specified in the rdesc. Return false if either the symbol or the
2856 reference could not be found, otherwise return true. */
2859 try_decrement_rdesc_refcount (struct ipa_jump_func
*jfunc
)
2861 struct ipa_cst_ref_desc
*rdesc
;
2862 if (jfunc
->type
== IPA_JF_CONST
2863 && (rdesc
= jfunc_rdesc_usable (jfunc
))
2864 && --rdesc
->refcount
== 0)
2866 symtab_node
*symbol
= cgraph_node_for_jfunc (jfunc
);
2870 return remove_described_reference (symbol
, rdesc
);
2875 /* Try to find a destination for indirect edge IE that corresponds to a simple
2876 call or a call of a member function pointer and where the destination is a
2877 pointer formal parameter described by jump function JFUNC. If it can be
2878 determined, return the newly direct edge, otherwise return NULL.
2879 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2881 static struct cgraph_edge
*
2882 try_make_edge_direct_simple_call (struct cgraph_edge
*ie
,
2883 struct ipa_jump_func
*jfunc
,
2884 struct ipa_node_params
*new_root_info
)
2886 struct cgraph_edge
*cs
;
2888 bool agg_contents
= ie
->indirect_info
->agg_contents
;
2890 if (ie
->indirect_info
->agg_contents
)
2891 target
= ipa_find_agg_cst_for_param (&jfunc
->agg
,
2892 ie
->indirect_info
->offset
,
2893 ie
->indirect_info
->by_ref
);
2895 target
= ipa_value_from_jfunc (new_root_info
, jfunc
);
2898 cs
= ipa_make_edge_direct_to_target (ie
, target
);
2900 if (cs
&& !agg_contents
)
2903 gcc_checking_assert (cs
->callee
2905 || jfunc
->type
!= IPA_JF_CONST
2906 || !cgraph_node_for_jfunc (jfunc
)
2907 || cs
->callee
== cgraph_node_for_jfunc (jfunc
)));
2908 ok
= try_decrement_rdesc_refcount (jfunc
);
2909 gcc_checking_assert (ok
);
2915 /* Return the target to be used in cases of impossible devirtualization. IE
2916 and target (the latter can be NULL) are dumped when dumping is enabled. */
2919 ipa_impossible_devirt_target (struct cgraph_edge
*ie
, tree target
)
2925 "Type inconsistent devirtualization: %s/%i->%s\n",
2926 ie
->caller
->name (), ie
->caller
->order
,
2927 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target
)));
2930 "No devirtualization target in %s/%i\n",
2931 ie
->caller
->name (), ie
->caller
->order
);
2933 tree new_target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2934 cgraph_node::get_create (new_target
);
2938 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2939 call based on a formal parameter which is described by jump function JFUNC
2940 and if it can be determined, make it direct and return the direct edge.
2941 Otherwise, return NULL. CTX describes the polymorphic context that the
2942 parameter the call is based on brings along with it. */
2944 static struct cgraph_edge
*
2945 try_make_edge_direct_virtual_call (struct cgraph_edge
*ie
,
2946 struct ipa_jump_func
*jfunc
,
2947 struct ipa_polymorphic_call_context ctx
)
2950 bool speculative
= false;
2952 if (!opt_for_fn (ie
->caller
->decl
, flag_devirtualize
))
2955 gcc_assert (!ie
->indirect_info
->by_ref
);
2957 /* Try to do lookup via known virtual table pointer value. */
2958 if (!ie
->indirect_info
->vptr_changed
2959 || opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
))
2962 unsigned HOST_WIDE_INT offset
;
2963 tree t
= ipa_find_agg_cst_for_param (&jfunc
->agg
,
2964 ie
->indirect_info
->offset
,
2966 if (t
&& vtable_pointer_value_to_vtable (t
, &vtable
, &offset
))
2968 t
= gimple_get_virt_method_for_vtable (ie
->indirect_info
->otr_token
,
2972 if ((TREE_CODE (TREE_TYPE (t
)) == FUNCTION_TYPE
2973 && DECL_FUNCTION_CODE (t
) == BUILT_IN_UNREACHABLE
)
2974 || !possible_polymorphic_call_target_p
2975 (ie
, cgraph_node::get (t
)))
2977 /* Do not speculate builtin_unreachable, it is stpid! */
2978 if (!ie
->indirect_info
->vptr_changed
)
2979 target
= ipa_impossible_devirt_target (ie
, target
);
2984 speculative
= ie
->indirect_info
->vptr_changed
;
2990 ipa_polymorphic_call_context
ie_context (ie
);
2991 vec
<cgraph_node
*>targets
;
2994 ctx
.offset_by (ie
->indirect_info
->offset
);
2995 if (ie
->indirect_info
->vptr_changed
)
2996 ctx
.possible_dynamic_type_change (ie
->in_polymorphic_cdtor
,
2997 ie
->indirect_info
->otr_type
);
2998 ctx
.combine_with (ie_context
, ie
->indirect_info
->otr_type
);
2999 targets
= possible_polymorphic_call_targets
3000 (ie
->indirect_info
->otr_type
,
3001 ie
->indirect_info
->otr_token
,
3003 if (final
&& targets
.length () <= 1)
3005 if (targets
.length () == 1)
3006 target
= targets
[0]->decl
;
3008 target
= ipa_impossible_devirt_target (ie
, NULL_TREE
);
3010 else if (!target
&& opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
)
3011 && !ie
->speculative
&& ie
->maybe_hot_p ())
3014 n
= try_speculative_devirtualization (ie
->indirect_info
->otr_type
,
3015 ie
->indirect_info
->otr_token
,
3016 ie
->indirect_info
->context
);
3026 if (!possible_polymorphic_call_target_p
3027 (ie
, cgraph_node::get_create (target
)))
3031 target
= ipa_impossible_devirt_target (ie
, target
);
3033 return ipa_make_edge_direct_to_target (ie
, target
, speculative
);
3039 /* Update the param called notes associated with NODE when CS is being inlined,
3040 assuming NODE is (potentially indirectly) inlined into CS->callee.
3041 Moreover, if the callee is discovered to be constant, create a new cgraph
3042 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3043 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3046 update_indirect_edges_after_inlining (struct cgraph_edge
*cs
,
3047 struct cgraph_node
*node
,
3048 vec
<cgraph_edge
*> *new_edges
)
3050 struct ipa_edge_args
*top
;
3051 struct cgraph_edge
*ie
, *next_ie
, *new_direct_edge
;
3052 struct ipa_node_params
*new_root_info
;
3055 ipa_check_create_edge_args ();
3056 top
= IPA_EDGE_REF (cs
);
3057 new_root_info
= IPA_NODE_REF (cs
->caller
->global
.inlined_to
3058 ? cs
->caller
->global
.inlined_to
3061 for (ie
= node
->indirect_calls
; ie
; ie
= next_ie
)
3063 struct cgraph_indirect_call_info
*ici
= ie
->indirect_info
;
3064 struct ipa_jump_func
*jfunc
;
3067 next_ie
= ie
->next_callee
;
3069 if (ici
->param_index
== -1)
3072 /* We must check range due to calls with variable number of arguments: */
3073 if (ici
->param_index
>= ipa_get_cs_argument_count (top
))
3075 ici
->param_index
= -1;
3079 param_index
= ici
->param_index
;
3080 jfunc
= ipa_get_ith_jump_func (top
, param_index
);
3082 if (!opt_for_fn (node
->decl
, flag_indirect_inlining
))
3083 new_direct_edge
= NULL
;
3084 else if (ici
->polymorphic
)
3086 ipa_polymorphic_call_context ctx
;
3087 ctx
= ipa_context_from_jfunc (new_root_info
, cs
, param_index
, jfunc
);
3088 new_direct_edge
= try_make_edge_direct_virtual_call (ie
, jfunc
, ctx
);
3091 new_direct_edge
= try_make_edge_direct_simple_call (ie
, jfunc
,
3093 /* If speculation was removed, then we need to do nothing. */
3094 if (new_direct_edge
&& new_direct_edge
!= ie
)
3096 new_direct_edge
->indirect_inlining_edge
= 1;
3097 top
= IPA_EDGE_REF (cs
);
3100 else if (new_direct_edge
)
3102 new_direct_edge
->indirect_inlining_edge
= 1;
3103 if (new_direct_edge
->call_stmt
)
3104 new_direct_edge
->call_stmt_cannot_inline_p
3105 = !gimple_check_call_matching_types (
3106 new_direct_edge
->call_stmt
,
3107 new_direct_edge
->callee
->decl
, false);
3110 new_edges
->safe_push (new_direct_edge
);
3113 top
= IPA_EDGE_REF (cs
);
3115 else if (jfunc
->type
== IPA_JF_PASS_THROUGH
3116 && ipa_get_jf_pass_through_operation (jfunc
) == NOP_EXPR
)
3118 if ((ici
->agg_contents
3119 && !ipa_get_jf_pass_through_agg_preserved (jfunc
))
3120 || (ici
->polymorphic
3121 && !ipa_get_jf_pass_through_type_preserved (jfunc
)))
3122 ici
->param_index
= -1;
3124 ici
->param_index
= ipa_get_jf_pass_through_formal_id (jfunc
);
3126 else if (jfunc
->type
== IPA_JF_ANCESTOR
)
3128 if ((ici
->agg_contents
3129 && !ipa_get_jf_ancestor_agg_preserved (jfunc
))
3130 || (ici
->polymorphic
3131 && !ipa_get_jf_ancestor_type_preserved (jfunc
)))
3132 ici
->param_index
= -1;
3135 ici
->param_index
= ipa_get_jf_ancestor_formal_id (jfunc
);
3136 ici
->offset
+= ipa_get_jf_ancestor_offset (jfunc
);
3140 /* Either we can find a destination for this edge now or never. */
3141 ici
->param_index
= -1;
3147 /* Recursively traverse subtree of NODE (including node) made of inlined
3148 cgraph_edges when CS has been inlined and invoke
3149 update_indirect_edges_after_inlining on all nodes and
3150 update_jump_functions_after_inlining on all non-inlined edges that lead out
3151 of this subtree. Newly discovered indirect edges will be added to
3152 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3156 propagate_info_to_inlined_callees (struct cgraph_edge
*cs
,
3157 struct cgraph_node
*node
,
3158 vec
<cgraph_edge
*> *new_edges
)
3160 struct cgraph_edge
*e
;
3163 res
= update_indirect_edges_after_inlining (cs
, node
, new_edges
);
3165 for (e
= node
->callees
; e
; e
= e
->next_callee
)
3166 if (!e
->inline_failed
)
3167 res
|= propagate_info_to_inlined_callees (cs
, e
->callee
, new_edges
);
3169 update_jump_functions_after_inlining (cs
, e
);
3170 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
3171 update_jump_functions_after_inlining (cs
, e
);
3176 /* Combine two controlled uses counts as done during inlining. */
3179 combine_controlled_uses_counters (int c
, int d
)
3181 if (c
== IPA_UNDESCRIBED_USE
|| d
== IPA_UNDESCRIBED_USE
)
3182 return IPA_UNDESCRIBED_USE
;
3187 /* Propagate number of controlled users from CS->caleee to the new root of the
3188 tree of inlined nodes. */
3191 propagate_controlled_uses (struct cgraph_edge
*cs
)
3193 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
3194 struct cgraph_node
*new_root
= cs
->caller
->global
.inlined_to
3195 ? cs
->caller
->global
.inlined_to
: cs
->caller
;
3196 struct ipa_node_params
*new_root_info
= IPA_NODE_REF (new_root
);
3197 struct ipa_node_params
*old_root_info
= IPA_NODE_REF (cs
->callee
);
3200 count
= MIN (ipa_get_cs_argument_count (args
),
3201 ipa_get_param_count (old_root_info
));
3202 for (i
= 0; i
< count
; i
++)
3204 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
3205 struct ipa_cst_ref_desc
*rdesc
;
3207 if (jf
->type
== IPA_JF_PASS_THROUGH
)
3210 src_idx
= ipa_get_jf_pass_through_formal_id (jf
);
3211 c
= ipa_get_controlled_uses (new_root_info
, src_idx
);
3212 d
= ipa_get_controlled_uses (old_root_info
, i
);
3214 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf
)
3215 == NOP_EXPR
|| c
== IPA_UNDESCRIBED_USE
);
3216 c
= combine_controlled_uses_counters (c
, d
);
3217 ipa_set_controlled_uses (new_root_info
, src_idx
, c
);
3218 if (c
== 0 && new_root_info
->ipcp_orig_node
)
3220 struct cgraph_node
*n
;
3221 struct ipa_ref
*ref
;
3222 tree t
= new_root_info
->known_csts
[src_idx
];
3224 if (t
&& TREE_CODE (t
) == ADDR_EXPR
3225 && TREE_CODE (TREE_OPERAND (t
, 0)) == FUNCTION_DECL
3226 && (n
= cgraph_node::get (TREE_OPERAND (t
, 0)))
3227 && (ref
= new_root
->find_reference (n
, NULL
, 0)))
3230 fprintf (dump_file
, "ipa-prop: Removing cloning-created "
3231 "reference from %s/%i to %s/%i.\n",
3232 xstrdup_for_dump (new_root
->name ()),
3234 xstrdup_for_dump (n
->name ()), n
->order
);
3235 ref
->remove_reference ();
3239 else if (jf
->type
== IPA_JF_CONST
3240 && (rdesc
= jfunc_rdesc_usable (jf
)))
3242 int d
= ipa_get_controlled_uses (old_root_info
, i
);
3243 int c
= rdesc
->refcount
;
3244 rdesc
->refcount
= combine_controlled_uses_counters (c
, d
);
3245 if (rdesc
->refcount
== 0)
3247 tree cst
= ipa_get_jf_constant (jf
);
3248 struct cgraph_node
*n
;
3249 gcc_checking_assert (TREE_CODE (cst
) == ADDR_EXPR
3250 && TREE_CODE (TREE_OPERAND (cst
, 0))
3252 n
= cgraph_node::get (TREE_OPERAND (cst
, 0));
3255 struct cgraph_node
*clone
;
3257 ok
= remove_described_reference (n
, rdesc
);
3258 gcc_checking_assert (ok
);
3261 while (clone
->global
.inlined_to
3262 && clone
!= rdesc
->cs
->caller
3263 && IPA_NODE_REF (clone
)->ipcp_orig_node
)
3265 struct ipa_ref
*ref
;
3266 ref
= clone
->find_reference (n
, NULL
, 0);
3270 fprintf (dump_file
, "ipa-prop: Removing "
3271 "cloning-created reference "
3272 "from %s/%i to %s/%i.\n",
3273 xstrdup_for_dump (clone
->name ()),
3275 xstrdup_for_dump (n
->name ()),
3277 ref
->remove_reference ();
3279 clone
= clone
->callers
->caller
;
3286 for (i
= ipa_get_param_count (old_root_info
);
3287 i
< ipa_get_cs_argument_count (args
);
3290 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
3292 if (jf
->type
== IPA_JF_CONST
)
3294 struct ipa_cst_ref_desc
*rdesc
= jfunc_rdesc_usable (jf
);
3296 rdesc
->refcount
= IPA_UNDESCRIBED_USE
;
3298 else if (jf
->type
== IPA_JF_PASS_THROUGH
)
3299 ipa_set_controlled_uses (new_root_info
,
3300 jf
->value
.pass_through
.formal_id
,
3301 IPA_UNDESCRIBED_USE
);
3305 /* Update jump functions and call note functions on inlining the call site CS.
3306 CS is expected to lead to a node already cloned by
3307 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3308 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3312 ipa_propagate_indirect_call_infos (struct cgraph_edge
*cs
,
3313 vec
<cgraph_edge
*> *new_edges
)
3316 /* Do nothing if the preparation phase has not been carried out yet
3317 (i.e. during early inlining). */
3318 if (!ipa_node_params_sum
)
3320 gcc_assert (ipa_edge_args_vector
);
3322 propagate_controlled_uses (cs
);
3323 changed
= propagate_info_to_inlined_callees (cs
, cs
->callee
, new_edges
);
3328 /* Frees all dynamically allocated structures that the argument info points
3332 ipa_free_edge_args_substructures (struct ipa_edge_args
*args
)
3334 vec_free (args
->jump_functions
);
3335 memset (args
, 0, sizeof (*args
));
3338 /* Free all ipa_edge structures. */
3341 ipa_free_all_edge_args (void)
3344 struct ipa_edge_args
*args
;
3346 if (!ipa_edge_args_vector
)
3349 FOR_EACH_VEC_ELT (*ipa_edge_args_vector
, i
, args
)
3350 ipa_free_edge_args_substructures (args
);
3352 vec_free (ipa_edge_args_vector
);
3355 /* Frees all dynamically allocated structures that the param info points
3358 ipa_node_params::~ipa_node_params ()
3360 descriptors
.release ();
3362 /* Lattice values and their sources are deallocated with their alocation
3364 known_contexts
.release ();
3367 ipcp_orig_node
= NULL
;
3370 do_clone_for_all_contexts
= 0;
3371 is_all_contexts_clone
= 0;
3375 /* Free all ipa_node_params structures. */
3378 ipa_free_all_node_params (void)
3380 delete ipa_node_params_sum
;
3381 ipa_node_params_sum
= NULL
;
3384 /* Grow ipcp_transformations if necessary. */
3387 ipcp_grow_transformations_if_necessary (void)
3389 if (vec_safe_length (ipcp_transformations
)
3390 <= (unsigned) symtab
->cgraph_max_uid
)
3391 vec_safe_grow_cleared (ipcp_transformations
, symtab
->cgraph_max_uid
+ 1);
3394 /* Set the aggregate replacements of NODE to be AGGVALS. */
3397 ipa_set_node_agg_value_chain (struct cgraph_node
*node
,
3398 struct ipa_agg_replacement_value
*aggvals
)
3400 ipcp_grow_transformations_if_necessary ();
3401 (*ipcp_transformations
)[node
->uid
].agg_values
= aggvals
;
3404 /* Hook that is called by cgraph.c when an edge is removed. */
3407 ipa_edge_removal_hook (struct cgraph_edge
*cs
, void *data ATTRIBUTE_UNUSED
)
3409 struct ipa_edge_args
*args
;
3411 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3412 if (vec_safe_length (ipa_edge_args_vector
) <= (unsigned)cs
->uid
)
3415 args
= IPA_EDGE_REF (cs
);
3416 if (args
->jump_functions
)
3418 struct ipa_jump_func
*jf
;
3420 FOR_EACH_VEC_ELT (*args
->jump_functions
, i
, jf
)
3422 struct ipa_cst_ref_desc
*rdesc
;
3423 try_decrement_rdesc_refcount (jf
);
3424 if (jf
->type
== IPA_JF_CONST
3425 && (rdesc
= ipa_get_jf_constant_rdesc (jf
))
3431 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs
));
3434 /* Hook that is called by cgraph.c when an edge is duplicated. */
3437 ipa_edge_duplication_hook (struct cgraph_edge
*src
, struct cgraph_edge
*dst
,
3440 struct ipa_edge_args
*old_args
, *new_args
;
3443 ipa_check_create_edge_args ();
3445 old_args
= IPA_EDGE_REF (src
);
3446 new_args
= IPA_EDGE_REF (dst
);
3448 new_args
->jump_functions
= vec_safe_copy (old_args
->jump_functions
);
3449 if (old_args
->polymorphic_call_contexts
)
3450 new_args
->polymorphic_call_contexts
3451 = vec_safe_copy (old_args
->polymorphic_call_contexts
);
3453 for (i
= 0; i
< vec_safe_length (old_args
->jump_functions
); i
++)
3455 struct ipa_jump_func
*src_jf
= ipa_get_ith_jump_func (old_args
, i
);
3456 struct ipa_jump_func
*dst_jf
= ipa_get_ith_jump_func (new_args
, i
);
3458 dst_jf
->agg
.items
= vec_safe_copy (dst_jf
->agg
.items
);
3460 if (src_jf
->type
== IPA_JF_CONST
)
3462 struct ipa_cst_ref_desc
*src_rdesc
= jfunc_rdesc_usable (src_jf
);
3465 dst_jf
->value
.constant
.rdesc
= NULL
;
3466 else if (src
->caller
== dst
->caller
)
3468 struct ipa_ref
*ref
;
3469 symtab_node
*n
= cgraph_node_for_jfunc (src_jf
);
3470 gcc_checking_assert (n
);
3471 ref
= src
->caller
->find_reference (n
, src
->call_stmt
,
3473 gcc_checking_assert (ref
);
3474 dst
->caller
->clone_reference (ref
, ref
->stmt
);
3476 gcc_checking_assert (ipa_refdesc_pool
);
3477 struct ipa_cst_ref_desc
*dst_rdesc
3478 = (struct ipa_cst_ref_desc
*) pool_alloc (ipa_refdesc_pool
);
3479 dst_rdesc
->cs
= dst
;
3480 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3481 dst_rdesc
->next_duplicate
= NULL
;
3482 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3484 else if (src_rdesc
->cs
== src
)
3486 struct ipa_cst_ref_desc
*dst_rdesc
;
3487 gcc_checking_assert (ipa_refdesc_pool
);
3489 = (struct ipa_cst_ref_desc
*) pool_alloc (ipa_refdesc_pool
);
3490 dst_rdesc
->cs
= dst
;
3491 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3492 dst_rdesc
->next_duplicate
= src_rdesc
->next_duplicate
;
3493 src_rdesc
->next_duplicate
= dst_rdesc
;
3494 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3498 struct ipa_cst_ref_desc
*dst_rdesc
;
3499 /* This can happen during inlining, when a JFUNC can refer to a
3500 reference taken in a function up in the tree of inline clones.
3501 We need to find the duplicate that refers to our tree of
3504 gcc_assert (dst
->caller
->global
.inlined_to
);
3505 for (dst_rdesc
= src_rdesc
->next_duplicate
;
3507 dst_rdesc
= dst_rdesc
->next_duplicate
)
3509 struct cgraph_node
*top
;
3510 top
= dst_rdesc
->cs
->caller
->global
.inlined_to
3511 ? dst_rdesc
->cs
->caller
->global
.inlined_to
3512 : dst_rdesc
->cs
->caller
;
3513 if (dst
->caller
->global
.inlined_to
== top
)
3516 gcc_assert (dst_rdesc
);
3517 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3520 else if (dst_jf
->type
== IPA_JF_PASS_THROUGH
3521 && src
->caller
== dst
->caller
)
3523 struct cgraph_node
*inline_root
= dst
->caller
->global
.inlined_to
3524 ? dst
->caller
->global
.inlined_to
: dst
->caller
;
3525 struct ipa_node_params
*root_info
= IPA_NODE_REF (inline_root
);
3526 int idx
= ipa_get_jf_pass_through_formal_id (dst_jf
);
3528 int c
= ipa_get_controlled_uses (root_info
, idx
);
3529 if (c
!= IPA_UNDESCRIBED_USE
)
3532 ipa_set_controlled_uses (root_info
, idx
, c
);
3538 /* Analyze newly added function into callgraph. */
3541 ipa_add_new_function (cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
3543 if (node
->has_gimple_body_p ())
3544 ipa_analyze_node (node
);
3547 /* Hook that is called by summary when a node is duplicated. */
3550 ipa_node_params_t::duplicate(cgraph_node
*src
, cgraph_node
*dst
,
3551 ipa_node_params
*old_info
,
3552 ipa_node_params
*new_info
)
3554 ipa_agg_replacement_value
*old_av
, *new_av
;
3556 new_info
->descriptors
= old_info
->descriptors
.copy ();
3557 new_info
->lattices
= NULL
;
3558 new_info
->ipcp_orig_node
= old_info
->ipcp_orig_node
;
3560 new_info
->analysis_done
= old_info
->analysis_done
;
3561 new_info
->node_enqueued
= old_info
->node_enqueued
;
3563 old_av
= ipa_get_agg_replacements_for_node (src
);
3569 struct ipa_agg_replacement_value
*v
;
3571 v
= ggc_alloc
<ipa_agg_replacement_value
> ();
3572 memcpy (v
, old_av
, sizeof (*v
));
3575 old_av
= old_av
->next
;
3577 ipa_set_node_agg_value_chain (dst
, new_av
);
3580 ipcp_transformation_summary
*src_trans
= ipcp_get_transformation_summary (src
);
3582 if (src_trans
&& vec_safe_length (src_trans
->alignments
) > 0)
3584 ipcp_grow_transformations_if_necessary ();
3585 src_trans
= ipcp_get_transformation_summary (src
);
3586 const vec
<ipa_alignment
, va_gc
> *src_alignments
= src_trans
->alignments
;
3587 vec
<ipa_alignment
, va_gc
> *&dst_alignments
3588 = ipcp_get_transformation_summary (dst
)->alignments
;
3589 vec_safe_reserve_exact (dst_alignments
, src_alignments
->length ());
3590 for (unsigned i
= 0; i
< src_alignments
->length (); ++i
)
3591 dst_alignments
->quick_push ((*src_alignments
)[i
]);
3595 /* Register our cgraph hooks if they are not already there. */
3598 ipa_register_cgraph_hooks (void)
3600 ipa_check_create_node_params ();
3602 if (!edge_removal_hook_holder
)
3603 edge_removal_hook_holder
=
3604 symtab
->add_edge_removal_hook (&ipa_edge_removal_hook
, NULL
);
3605 if (!edge_duplication_hook_holder
)
3606 edge_duplication_hook_holder
=
3607 symtab
->add_edge_duplication_hook (&ipa_edge_duplication_hook
, NULL
);
3608 function_insertion_hook_holder
=
3609 symtab
->add_cgraph_insertion_hook (&ipa_add_new_function
, NULL
);
3612 /* Unregister our cgraph hooks if they are not already there. */
3615 ipa_unregister_cgraph_hooks (void)
3617 symtab
->remove_edge_removal_hook (edge_removal_hook_holder
);
3618 edge_removal_hook_holder
= NULL
;
3619 symtab
->remove_edge_duplication_hook (edge_duplication_hook_holder
);
3620 edge_duplication_hook_holder
= NULL
;
3621 symtab
->remove_cgraph_insertion_hook (function_insertion_hook_holder
);
3622 function_insertion_hook_holder
= NULL
;
3625 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3626 longer needed after ipa-cp. */
3629 ipa_free_all_structures_after_ipa_cp (void)
3631 if (!optimize
&& !in_lto_p
)
3633 ipa_free_all_edge_args ();
3634 ipa_free_all_node_params ();
3635 free_alloc_pool (ipcp_sources_pool
);
3636 free_alloc_pool (ipcp_cst_values_pool
);
3637 free_alloc_pool (ipcp_poly_ctx_values_pool
);
3638 free_alloc_pool (ipcp_agg_lattice_pool
);
3639 ipa_unregister_cgraph_hooks ();
3640 if (ipa_refdesc_pool
)
3641 free_alloc_pool (ipa_refdesc_pool
);
3645 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3646 longer needed after indirect inlining. */
3649 ipa_free_all_structures_after_iinln (void)
3651 ipa_free_all_edge_args ();
3652 ipa_free_all_node_params ();
3653 ipa_unregister_cgraph_hooks ();
3654 if (ipcp_sources_pool
)
3655 free_alloc_pool (ipcp_sources_pool
);
3656 if (ipcp_cst_values_pool
)
3657 free_alloc_pool (ipcp_cst_values_pool
);
3658 if (ipcp_poly_ctx_values_pool
)
3659 free_alloc_pool (ipcp_poly_ctx_values_pool
);
3660 if (ipcp_agg_lattice_pool
)
3661 free_alloc_pool (ipcp_agg_lattice_pool
);
3662 if (ipa_refdesc_pool
)
3663 free_alloc_pool (ipa_refdesc_pool
);
3666 /* Print ipa_tree_map data structures of all functions in the
3670 ipa_print_node_params (FILE *f
, struct cgraph_node
*node
)
3673 struct ipa_node_params
*info
;
3675 if (!node
->definition
)
3677 info
= IPA_NODE_REF (node
);
3678 fprintf (f
, " function %s/%i parameter descriptors:\n",
3679 node
->name (), node
->order
);
3680 count
= ipa_get_param_count (info
);
3681 for (i
= 0; i
< count
; i
++)
3686 ipa_dump_param (f
, info
, i
);
3687 if (ipa_is_param_used (info
, i
))
3688 fprintf (f
, " used");
3689 c
= ipa_get_controlled_uses (info
, i
);
3690 if (c
== IPA_UNDESCRIBED_USE
)
3691 fprintf (f
, " undescribed_use");
3693 fprintf (f
, " controlled_uses=%i", c
);
3698 /* Print ipa_tree_map data structures of all functions in the
3702 ipa_print_all_params (FILE * f
)
3704 struct cgraph_node
*node
;
3706 fprintf (f
, "\nFunction parameters:\n");
3707 FOR_EACH_FUNCTION (node
)
3708 ipa_print_node_params (f
, node
);
3711 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3714 ipa_get_vector_of_formal_parms (tree fndecl
)
3720 gcc_assert (!flag_wpa
);
3721 count
= count_formal_params (fndecl
);
3722 args
.create (count
);
3723 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
3724 args
.quick_push (parm
);
3729 /* Return a heap allocated vector containing types of formal parameters of
3730 function type FNTYPE. */
3733 ipa_get_vector_of_formal_parm_types (tree fntype
)
3739 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
3742 types
.create (count
);
3743 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
3744 types
.quick_push (TREE_VALUE (t
));
3749 /* Modify the function declaration FNDECL and its type according to the plan in
3750 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3751 to reflect the actual parameters being modified which are determined by the
3752 base_index field. */
3755 ipa_modify_formal_parameters (tree fndecl
, ipa_parm_adjustment_vec adjustments
)
3757 vec
<tree
> oparms
= ipa_get_vector_of_formal_parms (fndecl
);
3758 tree orig_type
= TREE_TYPE (fndecl
);
3759 tree old_arg_types
= TYPE_ARG_TYPES (orig_type
);
3761 /* The following test is an ugly hack, some functions simply don't have any
3762 arguments in their type. This is probably a bug but well... */
3763 bool care_for_types
= (old_arg_types
!= NULL_TREE
);
3764 bool last_parm_void
;
3768 last_parm_void
= (TREE_VALUE (tree_last (old_arg_types
))
3770 otypes
= ipa_get_vector_of_formal_parm_types (orig_type
);
3772 gcc_assert (oparms
.length () + 1 == otypes
.length ());
3774 gcc_assert (oparms
.length () == otypes
.length ());
3778 last_parm_void
= false;
3782 int len
= adjustments
.length ();
3783 tree
*link
= &DECL_ARGUMENTS (fndecl
);
3784 tree new_arg_types
= NULL
;
3785 for (int i
= 0; i
< len
; i
++)
3787 struct ipa_parm_adjustment
*adj
;
3790 adj
= &adjustments
[i
];
3792 if (adj
->op
== IPA_PARM_OP_NEW
)
3795 parm
= oparms
[adj
->base_index
];
3798 if (adj
->op
== IPA_PARM_OP_COPY
)
3801 new_arg_types
= tree_cons (NULL_TREE
, otypes
[adj
->base_index
],
3804 link
= &DECL_CHAIN (parm
);
3806 else if (adj
->op
!= IPA_PARM_OP_REMOVE
)
3812 ptype
= build_pointer_type (adj
->type
);
3816 if (is_gimple_reg_type (ptype
))
3818 unsigned malign
= GET_MODE_ALIGNMENT (TYPE_MODE (ptype
));
3819 if (TYPE_ALIGN (ptype
) < malign
)
3820 ptype
= build_aligned_type (ptype
, malign
);
3825 new_arg_types
= tree_cons (NULL_TREE
, ptype
, new_arg_types
);
3827 new_parm
= build_decl (UNKNOWN_LOCATION
, PARM_DECL
, NULL_TREE
,
3829 const char *prefix
= adj
->arg_prefix
? adj
->arg_prefix
: "SYNTH";
3830 DECL_NAME (new_parm
) = create_tmp_var_name (prefix
);
3831 DECL_ARTIFICIAL (new_parm
) = 1;
3832 DECL_ARG_TYPE (new_parm
) = ptype
;
3833 DECL_CONTEXT (new_parm
) = fndecl
;
3834 TREE_USED (new_parm
) = 1;
3835 DECL_IGNORED_P (new_parm
) = 1;
3836 layout_decl (new_parm
, 0);
3838 if (adj
->op
== IPA_PARM_OP_NEW
)
3842 adj
->new_decl
= new_parm
;
3845 link
= &DECL_CHAIN (new_parm
);
3851 tree new_reversed
= NULL
;
3854 new_reversed
= nreverse (new_arg_types
);
3858 TREE_CHAIN (new_arg_types
) = void_list_node
;
3860 new_reversed
= void_list_node
;
3864 /* Use copy_node to preserve as much as possible from original type
3865 (debug info, attribute lists etc.)
3866 Exception is METHOD_TYPEs must have THIS argument.
3867 When we are asked to remove it, we need to build new FUNCTION_TYPE
3869 tree new_type
= NULL
;
3870 if (TREE_CODE (orig_type
) != METHOD_TYPE
3871 || (adjustments
[0].op
== IPA_PARM_OP_COPY
3872 && adjustments
[0].base_index
== 0))
3874 new_type
= build_distinct_type_copy (orig_type
);
3875 TYPE_ARG_TYPES (new_type
) = new_reversed
;
3880 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type
),
3882 TYPE_CONTEXT (new_type
) = TYPE_CONTEXT (orig_type
);
3883 DECL_VINDEX (fndecl
) = NULL_TREE
;
3886 /* When signature changes, we need to clear builtin info. */
3887 if (DECL_BUILT_IN (fndecl
))
3889 DECL_BUILT_IN_CLASS (fndecl
) = NOT_BUILT_IN
;
3890 DECL_FUNCTION_CODE (fndecl
) = (enum built_in_function
) 0;
3893 TREE_TYPE (fndecl
) = new_type
;
3894 DECL_VIRTUAL_P (fndecl
) = 0;
3895 DECL_LANG_SPECIFIC (fndecl
) = NULL
;
3900 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3901 If this is a directly recursive call, CS must be NULL. Otherwise it must
3902 contain the corresponding call graph edge. */
3905 ipa_modify_call_arguments (struct cgraph_edge
*cs
, gcall
*stmt
,
3906 ipa_parm_adjustment_vec adjustments
)
3908 struct cgraph_node
*current_node
= cgraph_node::get (current_function_decl
);
3910 vec
<tree
, va_gc
> **debug_args
= NULL
;
3912 gimple_stmt_iterator gsi
, prev_gsi
;
3916 len
= adjustments
.length ();
3918 callee_decl
= !cs
? gimple_call_fndecl (stmt
) : cs
->callee
->decl
;
3919 current_node
->remove_stmt_references (stmt
);
3921 gsi
= gsi_for_stmt (stmt
);
3923 gsi_prev (&prev_gsi
);
3924 for (i
= 0; i
< len
; i
++)
3926 struct ipa_parm_adjustment
*adj
;
3928 adj
= &adjustments
[i
];
3930 if (adj
->op
== IPA_PARM_OP_COPY
)
3932 tree arg
= gimple_call_arg (stmt
, adj
->base_index
);
3934 vargs
.quick_push (arg
);
3936 else if (adj
->op
!= IPA_PARM_OP_REMOVE
)
3938 tree expr
, base
, off
;
3940 unsigned int deref_align
= 0;
3941 bool deref_base
= false;
3943 /* We create a new parameter out of the value of the old one, we can
3944 do the following kind of transformations:
3946 - A scalar passed by reference is converted to a scalar passed by
3947 value. (adj->by_ref is false and the type of the original
3948 actual argument is a pointer to a scalar).
3950 - A part of an aggregate is passed instead of the whole aggregate.
3951 The part can be passed either by value or by reference, this is
3952 determined by value of adj->by_ref. Moreover, the code below
3953 handles both situations when the original aggregate is passed by
3954 value (its type is not a pointer) and when it is passed by
3955 reference (it is a pointer to an aggregate).
3957 When the new argument is passed by reference (adj->by_ref is true)
3958 it must be a part of an aggregate and therefore we form it by
3959 simply taking the address of a reference inside the original
3962 gcc_checking_assert (adj
->offset
% BITS_PER_UNIT
== 0);
3963 base
= gimple_call_arg (stmt
, adj
->base_index
);
3964 loc
= DECL_P (base
) ? DECL_SOURCE_LOCATION (base
)
3965 : EXPR_LOCATION (base
);
3967 if (TREE_CODE (base
) != ADDR_EXPR
3968 && POINTER_TYPE_P (TREE_TYPE (base
)))
3969 off
= build_int_cst (adj
->alias_ptr_type
,
3970 adj
->offset
/ BITS_PER_UNIT
);
3973 HOST_WIDE_INT base_offset
;
3977 if (TREE_CODE (base
) == ADDR_EXPR
)
3979 base
= TREE_OPERAND (base
, 0);
3985 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
3986 /* Aggregate arguments can have non-invariant addresses. */
3989 base
= build_fold_addr_expr (prev_base
);
3990 off
= build_int_cst (adj
->alias_ptr_type
,
3991 adj
->offset
/ BITS_PER_UNIT
);
3993 else if (TREE_CODE (base
) == MEM_REF
)
3998 deref_align
= TYPE_ALIGN (TREE_TYPE (base
));
4000 off
= build_int_cst (adj
->alias_ptr_type
,
4002 + adj
->offset
/ BITS_PER_UNIT
);
4003 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1),
4005 base
= TREE_OPERAND (base
, 0);
4009 off
= build_int_cst (adj
->alias_ptr_type
,
4011 + adj
->offset
/ BITS_PER_UNIT
);
4012 base
= build_fold_addr_expr (base
);
4018 tree type
= adj
->type
;
4020 unsigned HOST_WIDE_INT misalign
;
4024 align
= deref_align
;
4029 get_pointer_alignment_1 (base
, &align
, &misalign
);
4030 if (TYPE_ALIGN (type
) > align
)
4031 align
= TYPE_ALIGN (type
);
4033 misalign
+= (offset_int::from (off
, SIGNED
).to_short_addr ()
4035 misalign
= misalign
& (align
- 1);
4037 align
= (misalign
& -misalign
);
4038 if (align
< TYPE_ALIGN (type
))
4039 type
= build_aligned_type (type
, align
);
4040 base
= force_gimple_operand_gsi (&gsi
, base
,
4041 true, NULL
, true, GSI_SAME_STMT
);
4042 expr
= fold_build2_loc (loc
, MEM_REF
, type
, base
, off
);
4043 /* If expr is not a valid gimple call argument emit
4044 a load into a temporary. */
4045 if (is_gimple_reg_type (TREE_TYPE (expr
)))
4047 gimple tem
= gimple_build_assign (NULL_TREE
, expr
);
4048 if (gimple_in_ssa_p (cfun
))
4050 gimple_set_vuse (tem
, gimple_vuse (stmt
));
4051 expr
= make_ssa_name (TREE_TYPE (expr
), tem
);
4054 expr
= create_tmp_reg (TREE_TYPE (expr
));
4055 gimple_assign_set_lhs (tem
, expr
);
4056 gsi_insert_before (&gsi
, tem
, GSI_SAME_STMT
);
4061 expr
= fold_build2_loc (loc
, MEM_REF
, adj
->type
, base
, off
);
4062 expr
= build_fold_addr_expr (expr
);
4063 expr
= force_gimple_operand_gsi (&gsi
, expr
,
4064 true, NULL
, true, GSI_SAME_STMT
);
4066 vargs
.quick_push (expr
);
4068 if (adj
->op
!= IPA_PARM_OP_COPY
&& MAY_HAVE_DEBUG_STMTS
)
4071 tree ddecl
= NULL_TREE
, origin
= DECL_ORIGIN (adj
->base
), arg
;
4074 arg
= gimple_call_arg (stmt
, adj
->base_index
);
4075 if (!useless_type_conversion_p (TREE_TYPE (origin
), TREE_TYPE (arg
)))
4077 if (!fold_convertible_p (TREE_TYPE (origin
), arg
))
4079 arg
= fold_convert_loc (gimple_location (stmt
),
4080 TREE_TYPE (origin
), arg
);
4082 if (debug_args
== NULL
)
4083 debug_args
= decl_debug_args_insert (callee_decl
);
4084 for (ix
= 0; vec_safe_iterate (*debug_args
, ix
, &ddecl
); ix
+= 2)
4085 if (ddecl
== origin
)
4087 ddecl
= (**debug_args
)[ix
+ 1];
4092 ddecl
= make_node (DEBUG_EXPR_DECL
);
4093 DECL_ARTIFICIAL (ddecl
) = 1;
4094 TREE_TYPE (ddecl
) = TREE_TYPE (origin
);
4095 DECL_MODE (ddecl
) = DECL_MODE (origin
);
4097 vec_safe_push (*debug_args
, origin
);
4098 vec_safe_push (*debug_args
, ddecl
);
4100 def_temp
= gimple_build_debug_bind (ddecl
, unshare_expr (arg
), stmt
);
4101 gsi_insert_before (&gsi
, def_temp
, GSI_SAME_STMT
);
4105 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4107 fprintf (dump_file
, "replacing stmt:");
4108 print_gimple_stmt (dump_file
, gsi_stmt (gsi
), 0, 0);
4111 new_stmt
= gimple_build_call_vec (callee_decl
, vargs
);
4113 if (gimple_call_lhs (stmt
))
4114 gimple_call_set_lhs (new_stmt
, gimple_call_lhs (stmt
));
4116 gimple_set_block (new_stmt
, gimple_block (stmt
));
4117 if (gimple_has_location (stmt
))
4118 gimple_set_location (new_stmt
, gimple_location (stmt
));
4119 gimple_call_set_chain (new_stmt
, gimple_call_chain (stmt
));
4120 gimple_call_copy_flags (new_stmt
, stmt
);
4121 if (gimple_in_ssa_p (cfun
))
4123 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
4124 if (gimple_vdef (stmt
))
4126 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
4127 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
4131 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4133 fprintf (dump_file
, "with stmt:");
4134 print_gimple_stmt (dump_file
, new_stmt
, 0, 0);
4135 fprintf (dump_file
, "\n");
4137 gsi_replace (&gsi
, new_stmt
, true);
4139 cs
->set_call_stmt (new_stmt
);
4142 current_node
->record_stmt_references (gsi_stmt (gsi
));
4145 while (gsi_stmt (gsi
) != gsi_stmt (prev_gsi
));
4148 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4149 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4150 specifies whether the function should care about type incompatibility the
4151 current and new expressions. If it is false, the function will leave
4152 incompatibility issues to the caller. Return true iff the expression
4156 ipa_modify_expr (tree
*expr
, bool convert
,
4157 ipa_parm_adjustment_vec adjustments
)
4159 struct ipa_parm_adjustment
*cand
4160 = ipa_get_adjustment_candidate (&expr
, &convert
, adjustments
, false);
4166 src
= build_simple_mem_ref (cand
->new_decl
);
4168 src
= cand
->new_decl
;
4170 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4172 fprintf (dump_file
, "About to replace expr ");
4173 print_generic_expr (dump_file
, *expr
, 0);
4174 fprintf (dump_file
, " with ");
4175 print_generic_expr (dump_file
, src
, 0);
4176 fprintf (dump_file
, "\n");
4179 if (convert
&& !useless_type_conversion_p (TREE_TYPE (*expr
), cand
->type
))
4181 tree vce
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (*expr
), src
);
4189 /* If T is an SSA_NAME, return NULL if it is not a default def or
4190 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4191 the base variable is always returned, regardless if it is a default
4192 def. Return T if it is not an SSA_NAME. */
4195 get_ssa_base_param (tree t
, bool ignore_default_def
)
4197 if (TREE_CODE (t
) == SSA_NAME
)
4199 if (ignore_default_def
|| SSA_NAME_IS_DEFAULT_DEF (t
))
4200 return SSA_NAME_VAR (t
);
4207 /* Given an expression, return an adjustment entry specifying the
4208 transformation to be done on EXPR. If no suitable adjustment entry
4209 was found, returns NULL.
4211 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4212 default def, otherwise bail on them.
4214 If CONVERT is non-NULL, this function will set *CONVERT if the
4215 expression provided is a component reference. ADJUSTMENTS is the
4216 adjustments vector. */
4218 ipa_parm_adjustment
*
4219 ipa_get_adjustment_candidate (tree
**expr
, bool *convert
,
4220 ipa_parm_adjustment_vec adjustments
,
4221 bool ignore_default_def
)
4223 if (TREE_CODE (**expr
) == BIT_FIELD_REF
4224 || TREE_CODE (**expr
) == IMAGPART_EXPR
4225 || TREE_CODE (**expr
) == REALPART_EXPR
)
4227 *expr
= &TREE_OPERAND (**expr
, 0);
4232 HOST_WIDE_INT offset
, size
, max_size
;
4233 tree base
= get_ref_base_and_extent (**expr
, &offset
, &size
, &max_size
);
4234 if (!base
|| size
== -1 || max_size
== -1)
4237 if (TREE_CODE (base
) == MEM_REF
)
4239 offset
+= mem_ref_offset (base
).to_short_addr () * BITS_PER_UNIT
;
4240 base
= TREE_OPERAND (base
, 0);
4243 base
= get_ssa_base_param (base
, ignore_default_def
);
4244 if (!base
|| TREE_CODE (base
) != PARM_DECL
)
4247 struct ipa_parm_adjustment
*cand
= NULL
;
4248 unsigned int len
= adjustments
.length ();
4249 for (unsigned i
= 0; i
< len
; i
++)
4251 struct ipa_parm_adjustment
*adj
= &adjustments
[i
];
4253 if (adj
->base
== base
4254 && (adj
->offset
== offset
|| adj
->op
== IPA_PARM_OP_REMOVE
))
4261 if (!cand
|| cand
->op
== IPA_PARM_OP_COPY
|| cand
->op
== IPA_PARM_OP_REMOVE
)
4266 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4269 index_in_adjustments_multiple_times_p (int base_index
,
4270 ipa_parm_adjustment_vec adjustments
)
4272 int i
, len
= adjustments
.length ();
4275 for (i
= 0; i
< len
; i
++)
4277 struct ipa_parm_adjustment
*adj
;
4278 adj
= &adjustments
[i
];
4280 if (adj
->base_index
== base_index
)
4292 /* Return adjustments that should have the same effect on function parameters
4293 and call arguments as if they were first changed according to adjustments in
4294 INNER and then by adjustments in OUTER. */
4296 ipa_parm_adjustment_vec
4297 ipa_combine_adjustments (ipa_parm_adjustment_vec inner
,
4298 ipa_parm_adjustment_vec outer
)
4300 int i
, outlen
= outer
.length ();
4301 int inlen
= inner
.length ();
4303 ipa_parm_adjustment_vec adjustments
, tmp
;
4306 for (i
= 0; i
< inlen
; i
++)
4308 struct ipa_parm_adjustment
*n
;
4311 if (n
->op
== IPA_PARM_OP_REMOVE
)
4315 /* FIXME: Handling of new arguments are not implemented yet. */
4316 gcc_assert (n
->op
!= IPA_PARM_OP_NEW
);
4317 tmp
.quick_push (*n
);
4321 adjustments
.create (outlen
+ removals
);
4322 for (i
= 0; i
< outlen
; i
++)
4324 struct ipa_parm_adjustment r
;
4325 struct ipa_parm_adjustment
*out
= &outer
[i
];
4326 struct ipa_parm_adjustment
*in
= &tmp
[out
->base_index
];
4328 memset (&r
, 0, sizeof (r
));
4329 gcc_assert (in
->op
!= IPA_PARM_OP_REMOVE
);
4330 if (out
->op
== IPA_PARM_OP_REMOVE
)
4332 if (!index_in_adjustments_multiple_times_p (in
->base_index
, tmp
))
4334 r
.op
= IPA_PARM_OP_REMOVE
;
4335 adjustments
.quick_push (r
);
4341 /* FIXME: Handling of new arguments are not implemented yet. */
4342 gcc_assert (out
->op
!= IPA_PARM_OP_NEW
);
4345 r
.base_index
= in
->base_index
;
4348 /* FIXME: Create nonlocal value too. */
4350 if (in
->op
== IPA_PARM_OP_COPY
&& out
->op
== IPA_PARM_OP_COPY
)
4351 r
.op
= IPA_PARM_OP_COPY
;
4352 else if (in
->op
== IPA_PARM_OP_COPY
)
4353 r
.offset
= out
->offset
;
4354 else if (out
->op
== IPA_PARM_OP_COPY
)
4355 r
.offset
= in
->offset
;
4357 r
.offset
= in
->offset
+ out
->offset
;
4358 adjustments
.quick_push (r
);
4361 for (i
= 0; i
< inlen
; i
++)
4363 struct ipa_parm_adjustment
*n
= &inner
[i
];
4365 if (n
->op
== IPA_PARM_OP_REMOVE
)
4366 adjustments
.quick_push (*n
);
4373 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4374 friendly way, assuming they are meant to be applied to FNDECL. */
4377 ipa_dump_param_adjustments (FILE *file
, ipa_parm_adjustment_vec adjustments
,
4380 int i
, len
= adjustments
.length ();
4382 vec
<tree
> parms
= ipa_get_vector_of_formal_parms (fndecl
);
4384 fprintf (file
, "IPA param adjustments: ");
4385 for (i
= 0; i
< len
; i
++)
4387 struct ipa_parm_adjustment
*adj
;
4388 adj
= &adjustments
[i
];
4391 fprintf (file
, " ");
4395 fprintf (file
, "%i. base_index: %i - ", i
, adj
->base_index
);
4396 print_generic_expr (file
, parms
[adj
->base_index
], 0);
4399 fprintf (file
, ", base: ");
4400 print_generic_expr (file
, adj
->base
, 0);
4404 fprintf (file
, ", new_decl: ");
4405 print_generic_expr (file
, adj
->new_decl
, 0);
4407 if (adj
->new_ssa_base
)
4409 fprintf (file
, ", new_ssa_base: ");
4410 print_generic_expr (file
, adj
->new_ssa_base
, 0);
4413 if (adj
->op
== IPA_PARM_OP_COPY
)
4414 fprintf (file
, ", copy_param");
4415 else if (adj
->op
== IPA_PARM_OP_REMOVE
)
4416 fprintf (file
, ", remove_param");
4418 fprintf (file
, ", offset %li", (long) adj
->offset
);
4420 fprintf (file
, ", by_ref");
4421 print_node_brief (file
, ", type: ", adj
->type
, 0);
4422 fprintf (file
, "\n");
4427 /* Dump the AV linked list. */
4430 ipa_dump_agg_replacement_values (FILE *f
, struct ipa_agg_replacement_value
*av
)
4433 fprintf (f
, " Aggregate replacements:");
4434 for (; av
; av
= av
->next
)
4436 fprintf (f
, "%s %i[" HOST_WIDE_INT_PRINT_DEC
"]=", comma
? "," : "",
4437 av
->index
, av
->offset
);
4438 print_generic_expr (f
, av
->value
, 0);
4444 /* Stream out jump function JUMP_FUNC to OB. */
4447 ipa_write_jump_function (struct output_block
*ob
,
4448 struct ipa_jump_func
*jump_func
)
4450 struct ipa_agg_jf_item
*item
;
4451 struct bitpack_d bp
;
4454 streamer_write_uhwi (ob
, jump_func
->type
);
4455 switch (jump_func
->type
)
4457 case IPA_JF_UNKNOWN
:
4461 EXPR_LOCATION (jump_func
->value
.constant
.value
) == UNKNOWN_LOCATION
);
4462 stream_write_tree (ob
, jump_func
->value
.constant
.value
, true);
4464 case IPA_JF_PASS_THROUGH
:
4465 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.operation
);
4466 if (jump_func
->value
.pass_through
.operation
== NOP_EXPR
)
4468 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4469 bp
= bitpack_create (ob
->main_stream
);
4470 bp_pack_value (&bp
, jump_func
->value
.pass_through
.agg_preserved
, 1);
4471 streamer_write_bitpack (&bp
);
4475 stream_write_tree (ob
, jump_func
->value
.pass_through
.operand
, true);
4476 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4479 case IPA_JF_ANCESTOR
:
4480 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.offset
);
4481 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.formal_id
);
4482 bp
= bitpack_create (ob
->main_stream
);
4483 bp_pack_value (&bp
, jump_func
->value
.ancestor
.agg_preserved
, 1);
4484 streamer_write_bitpack (&bp
);
4488 count
= vec_safe_length (jump_func
->agg
.items
);
4489 streamer_write_uhwi (ob
, count
);
4492 bp
= bitpack_create (ob
->main_stream
);
4493 bp_pack_value (&bp
, jump_func
->agg
.by_ref
, 1);
4494 streamer_write_bitpack (&bp
);
4497 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, i
, item
)
4499 streamer_write_uhwi (ob
, item
->offset
);
4500 stream_write_tree (ob
, item
->value
, true);
4503 bp
= bitpack_create (ob
->main_stream
);
4504 bp_pack_value (&bp
, jump_func
->alignment
.known
, 1);
4505 streamer_write_bitpack (&bp
);
4506 if (jump_func
->alignment
.known
)
4508 streamer_write_uhwi (ob
, jump_func
->alignment
.align
);
4509 streamer_write_uhwi (ob
, jump_func
->alignment
.misalign
);
4513 /* Read in jump function JUMP_FUNC from IB. */
4516 ipa_read_jump_function (struct lto_input_block
*ib
,
4517 struct ipa_jump_func
*jump_func
,
4518 struct cgraph_edge
*cs
,
4519 struct data_in
*data_in
)
4521 enum jump_func_type jftype
;
4522 enum tree_code operation
;
4525 jftype
= (enum jump_func_type
) streamer_read_uhwi (ib
);
4528 case IPA_JF_UNKNOWN
:
4529 ipa_set_jf_unknown (jump_func
);
4532 ipa_set_jf_constant (jump_func
, stream_read_tree (ib
, data_in
), cs
);
4534 case IPA_JF_PASS_THROUGH
:
4535 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
4536 if (operation
== NOP_EXPR
)
4538 int formal_id
= streamer_read_uhwi (ib
);
4539 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4540 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4541 ipa_set_jf_simple_pass_through (jump_func
, formal_id
, agg_preserved
);
4545 tree operand
= stream_read_tree (ib
, data_in
);
4546 int formal_id
= streamer_read_uhwi (ib
);
4547 ipa_set_jf_arith_pass_through (jump_func
, formal_id
, operand
,
4551 case IPA_JF_ANCESTOR
:
4553 HOST_WIDE_INT offset
= streamer_read_uhwi (ib
);
4554 int formal_id
= streamer_read_uhwi (ib
);
4555 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4556 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4557 ipa_set_ancestor_jf (jump_func
, offset
, formal_id
, agg_preserved
);
4562 count
= streamer_read_uhwi (ib
);
4563 vec_alloc (jump_func
->agg
.items
, count
);
4566 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4567 jump_func
->agg
.by_ref
= bp_unpack_value (&bp
, 1);
4569 for (i
= 0; i
< count
; i
++)
4571 struct ipa_agg_jf_item item
;
4572 item
.offset
= streamer_read_uhwi (ib
);
4573 item
.value
= stream_read_tree (ib
, data_in
);
4574 jump_func
->agg
.items
->quick_push (item
);
4577 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4578 bool alignment_known
= bp_unpack_value (&bp
, 1);
4579 if (alignment_known
)
4581 jump_func
->alignment
.known
= true;
4582 jump_func
->alignment
.align
= streamer_read_uhwi (ib
);
4583 jump_func
->alignment
.misalign
= streamer_read_uhwi (ib
);
4586 jump_func
->alignment
.known
= false;
4589 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4590 relevant to indirect inlining to OB. */
4593 ipa_write_indirect_edge_info (struct output_block
*ob
,
4594 struct cgraph_edge
*cs
)
4596 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4597 struct bitpack_d bp
;
4599 streamer_write_hwi (ob
, ii
->param_index
);
4600 bp
= bitpack_create (ob
->main_stream
);
4601 bp_pack_value (&bp
, ii
->polymorphic
, 1);
4602 bp_pack_value (&bp
, ii
->agg_contents
, 1);
4603 bp_pack_value (&bp
, ii
->member_ptr
, 1);
4604 bp_pack_value (&bp
, ii
->by_ref
, 1);
4605 bp_pack_value (&bp
, ii
->vptr_changed
, 1);
4606 streamer_write_bitpack (&bp
);
4607 if (ii
->agg_contents
|| ii
->polymorphic
)
4608 streamer_write_hwi (ob
, ii
->offset
);
4610 gcc_assert (ii
->offset
== 0);
4612 if (ii
->polymorphic
)
4614 streamer_write_hwi (ob
, ii
->otr_token
);
4615 stream_write_tree (ob
, ii
->otr_type
, true);
4616 ii
->context
.stream_out (ob
);
4620 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4621 relevant to indirect inlining from IB. */
4624 ipa_read_indirect_edge_info (struct lto_input_block
*ib
,
4625 struct data_in
*data_in
,
4626 struct cgraph_edge
*cs
)
4628 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4629 struct bitpack_d bp
;
4631 ii
->param_index
= (int) streamer_read_hwi (ib
);
4632 bp
= streamer_read_bitpack (ib
);
4633 ii
->polymorphic
= bp_unpack_value (&bp
, 1);
4634 ii
->agg_contents
= bp_unpack_value (&bp
, 1);
4635 ii
->member_ptr
= bp_unpack_value (&bp
, 1);
4636 ii
->by_ref
= bp_unpack_value (&bp
, 1);
4637 ii
->vptr_changed
= bp_unpack_value (&bp
, 1);
4638 if (ii
->agg_contents
|| ii
->polymorphic
)
4639 ii
->offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4642 if (ii
->polymorphic
)
4644 ii
->otr_token
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4645 ii
->otr_type
= stream_read_tree (ib
, data_in
);
4646 ii
->context
.stream_in (ib
, data_in
);
4650 /* Stream out NODE info to OB. */
4653 ipa_write_node_info (struct output_block
*ob
, struct cgraph_node
*node
)
4656 lto_symtab_encoder_t encoder
;
4657 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
4659 struct cgraph_edge
*e
;
4660 struct bitpack_d bp
;
4662 encoder
= ob
->decl_state
->symtab_node_encoder
;
4663 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4664 streamer_write_uhwi (ob
, node_ref
);
4666 streamer_write_uhwi (ob
, ipa_get_param_count (info
));
4667 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4668 streamer_write_uhwi (ob
, ipa_get_param_move_cost (info
, j
));
4669 bp
= bitpack_create (ob
->main_stream
);
4670 gcc_assert (info
->analysis_done
4671 || ipa_get_param_count (info
) == 0);
4672 gcc_assert (!info
->node_enqueued
);
4673 gcc_assert (!info
->ipcp_orig_node
);
4674 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4675 bp_pack_value (&bp
, ipa_is_param_used (info
, j
), 1);
4676 streamer_write_bitpack (&bp
);
4677 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4678 streamer_write_hwi (ob
, ipa_get_controlled_uses (info
, j
));
4679 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4681 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4683 streamer_write_uhwi (ob
,
4684 ipa_get_cs_argument_count (args
) * 2
4685 + (args
->polymorphic_call_contexts
!= NULL
));
4686 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4688 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4689 if (args
->polymorphic_call_contexts
!= NULL
)
4690 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
4693 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4695 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4697 streamer_write_uhwi (ob
,
4698 ipa_get_cs_argument_count (args
) * 2
4699 + (args
->polymorphic_call_contexts
!= NULL
));
4700 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4702 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4703 if (args
->polymorphic_call_contexts
!= NULL
)
4704 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
4706 ipa_write_indirect_edge_info (ob
, e
);
4710 /* Stream in NODE info from IB. */
4713 ipa_read_node_info (struct lto_input_block
*ib
, struct cgraph_node
*node
,
4714 struct data_in
*data_in
)
4716 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
4718 struct cgraph_edge
*e
;
4719 struct bitpack_d bp
;
4721 ipa_alloc_node_params (node
, streamer_read_uhwi (ib
));
4723 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4724 info
->descriptors
[k
].move_cost
= streamer_read_uhwi (ib
);
4726 bp
= streamer_read_bitpack (ib
);
4727 if (ipa_get_param_count (info
) != 0)
4728 info
->analysis_done
= true;
4729 info
->node_enqueued
= false;
4730 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4731 ipa_set_param_used (info
, k
, bp_unpack_value (&bp
, 1));
4732 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4733 ipa_set_controlled_uses (info
, k
, streamer_read_hwi (ib
));
4734 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4736 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4737 int count
= streamer_read_uhwi (ib
);
4738 bool contexts_computed
= count
& 1;
4743 vec_safe_grow_cleared (args
->jump_functions
, count
);
4744 if (contexts_computed
)
4745 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
);
4747 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
4749 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4751 if (contexts_computed
)
4752 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in (ib
, data_in
);
4755 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4757 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4758 int count
= streamer_read_uhwi (ib
);
4759 bool contexts_computed
= count
& 1;
4764 vec_safe_grow_cleared (args
->jump_functions
, count
);
4765 if (contexts_computed
)
4766 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
);
4767 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
4769 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4771 if (contexts_computed
)
4772 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in (ib
, data_in
);
4775 ipa_read_indirect_edge_info (ib
, data_in
, e
);
4779 /* Write jump functions for nodes in SET. */
4782 ipa_prop_write_jump_functions (void)
4784 struct cgraph_node
*node
;
4785 struct output_block
*ob
;
4786 unsigned int count
= 0;
4787 lto_symtab_encoder_iterator lsei
;
4788 lto_symtab_encoder_t encoder
;
4790 if (!ipa_node_params_sum
)
4793 ob
= create_output_block (LTO_section_jump_functions
);
4794 encoder
= ob
->decl_state
->symtab_node_encoder
;
4796 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4797 lsei_next_function_in_partition (&lsei
))
4799 node
= lsei_cgraph_node (lsei
);
4800 if (node
->has_gimple_body_p ()
4801 && IPA_NODE_REF (node
) != NULL
)
4805 streamer_write_uhwi (ob
, count
);
4807 /* Process all of the functions. */
4808 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4809 lsei_next_function_in_partition (&lsei
))
4811 node
= lsei_cgraph_node (lsei
);
4812 if (node
->has_gimple_body_p ()
4813 && IPA_NODE_REF (node
) != NULL
)
4814 ipa_write_node_info (ob
, node
);
4816 streamer_write_char_stream (ob
->main_stream
, 0);
4817 produce_asm (ob
, NULL
);
4818 destroy_output_block (ob
);
4821 /* Read section in file FILE_DATA of length LEN with data DATA. */
4824 ipa_prop_read_section (struct lto_file_decl_data
*file_data
, const char *data
,
4827 const struct lto_function_header
*header
=
4828 (const struct lto_function_header
*) data
;
4829 const int cfg_offset
= sizeof (struct lto_function_header
);
4830 const int main_offset
= cfg_offset
+ header
->cfg_size
;
4831 const int string_offset
= main_offset
+ header
->main_size
;
4832 struct data_in
*data_in
;
4836 lto_input_block
ib_main ((const char *) data
+ main_offset
,
4840 lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
4841 header
->string_size
, vNULL
);
4842 count
= streamer_read_uhwi (&ib_main
);
4844 for (i
= 0; i
< count
; i
++)
4847 struct cgraph_node
*node
;
4848 lto_symtab_encoder_t encoder
;
4850 index
= streamer_read_uhwi (&ib_main
);
4851 encoder
= file_data
->symtab_node_encoder
;
4852 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
4854 gcc_assert (node
->definition
);
4855 ipa_read_node_info (&ib_main
, node
, data_in
);
4857 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
4859 lto_data_in_delete (data_in
);
4862 /* Read ipcp jump functions. */
4865 ipa_prop_read_jump_functions (void)
4867 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
4868 struct lto_file_decl_data
*file_data
;
4871 ipa_check_create_node_params ();
4872 ipa_check_create_edge_args ();
4873 ipa_register_cgraph_hooks ();
4875 while ((file_data
= file_data_vec
[j
++]))
4878 const char *data
= lto_get_section_data (file_data
, LTO_section_jump_functions
, NULL
, &len
);
4881 ipa_prop_read_section (file_data
, data
, len
);
4885 /* After merging units, we can get mismatch in argument counts.
4886 Also decl merging might've rendered parameter lists obsolete.
4887 Also compute called_with_variable_arg info. */
4890 ipa_update_after_lto_read (void)
4892 ipa_check_create_node_params ();
4893 ipa_check_create_edge_args ();
4897 write_ipcp_transformation_info (output_block
*ob
, cgraph_node
*node
)
4900 unsigned int count
= 0;
4901 lto_symtab_encoder_t encoder
;
4902 struct ipa_agg_replacement_value
*aggvals
, *av
;
4904 aggvals
= ipa_get_agg_replacements_for_node (node
);
4905 encoder
= ob
->decl_state
->symtab_node_encoder
;
4906 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4907 streamer_write_uhwi (ob
, node_ref
);
4909 for (av
= aggvals
; av
; av
= av
->next
)
4911 streamer_write_uhwi (ob
, count
);
4913 for (av
= aggvals
; av
; av
= av
->next
)
4915 struct bitpack_d bp
;
4917 streamer_write_uhwi (ob
, av
->offset
);
4918 streamer_write_uhwi (ob
, av
->index
);
4919 stream_write_tree (ob
, av
->value
, true);
4921 bp
= bitpack_create (ob
->main_stream
);
4922 bp_pack_value (&bp
, av
->by_ref
, 1);
4923 streamer_write_bitpack (&bp
);
4926 ipcp_transformation_summary
*ts
= ipcp_get_transformation_summary (node
);
4927 if (ts
&& vec_safe_length (ts
->alignments
) > 0)
4929 count
= ts
->alignments
->length ();
4931 streamer_write_uhwi (ob
, count
);
4932 for (unsigned i
= 0; i
< count
; ++i
)
4934 ipa_alignment
*parm_al
= &(*ts
->alignments
)[i
];
4936 struct bitpack_d bp
;
4937 bp
= bitpack_create (ob
->main_stream
);
4938 bp_pack_value (&bp
, parm_al
->known
, 1);
4939 streamer_write_bitpack (&bp
);
4942 streamer_write_uhwi (ob
, parm_al
->align
);
4943 streamer_write_hwi_in_range (ob
->main_stream
, 0, parm_al
->align
,
4949 streamer_write_uhwi (ob
, 0);
4952 /* Stream in the aggregate value replacement chain for NODE from IB. */
4955 read_ipcp_transformation_info (lto_input_block
*ib
, cgraph_node
*node
,
4958 struct ipa_agg_replacement_value
*aggvals
= NULL
;
4959 unsigned int count
, i
;
4961 count
= streamer_read_uhwi (ib
);
4962 for (i
= 0; i
<count
; i
++)
4964 struct ipa_agg_replacement_value
*av
;
4965 struct bitpack_d bp
;
4967 av
= ggc_alloc
<ipa_agg_replacement_value
> ();
4968 av
->offset
= streamer_read_uhwi (ib
);
4969 av
->index
= streamer_read_uhwi (ib
);
4970 av
->value
= stream_read_tree (ib
, data_in
);
4971 bp
= streamer_read_bitpack (ib
);
4972 av
->by_ref
= bp_unpack_value (&bp
, 1);
4976 ipa_set_node_agg_value_chain (node
, aggvals
);
4978 count
= streamer_read_uhwi (ib
);
4981 ipcp_grow_transformations_if_necessary ();
4983 ipcp_transformation_summary
*ts
= ipcp_get_transformation_summary (node
);
4984 vec_safe_grow_cleared (ts
->alignments
, count
);
4986 for (i
= 0; i
< count
; i
++)
4988 ipa_alignment
*parm_al
;
4989 parm_al
= &(*ts
->alignments
)[i
];
4990 struct bitpack_d bp
;
4991 bp
= streamer_read_bitpack (ib
);
4992 parm_al
->known
= bp_unpack_value (&bp
, 1);
4995 parm_al
->align
= streamer_read_uhwi (ib
);
4997 = streamer_read_hwi_in_range (ib
, "ipa-prop misalign",
5004 /* Write all aggregate replacement for nodes in set. */
5007 ipcp_write_transformation_summaries (void)
5009 struct cgraph_node
*node
;
5010 struct output_block
*ob
;
5011 unsigned int count
= 0;
5012 lto_symtab_encoder_iterator lsei
;
5013 lto_symtab_encoder_t encoder
;
5015 ob
= create_output_block (LTO_section_ipcp_transform
);
5016 encoder
= ob
->decl_state
->symtab_node_encoder
;
5018 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5019 lsei_next_function_in_partition (&lsei
))
5021 node
= lsei_cgraph_node (lsei
);
5022 if (node
->has_gimple_body_p ())
5026 streamer_write_uhwi (ob
, count
);
5028 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5029 lsei_next_function_in_partition (&lsei
))
5031 node
= lsei_cgraph_node (lsei
);
5032 if (node
->has_gimple_body_p ())
5033 write_ipcp_transformation_info (ob
, node
);
5035 streamer_write_char_stream (ob
->main_stream
, 0);
5036 produce_asm (ob
, NULL
);
5037 destroy_output_block (ob
);
5040 /* Read replacements section in file FILE_DATA of length LEN with data
5044 read_replacements_section (struct lto_file_decl_data
*file_data
,
5048 const struct lto_function_header
*header
=
5049 (const struct lto_function_header
*) data
;
5050 const int cfg_offset
= sizeof (struct lto_function_header
);
5051 const int main_offset
= cfg_offset
+ header
->cfg_size
;
5052 const int string_offset
= main_offset
+ header
->main_size
;
5053 struct data_in
*data_in
;
5057 lto_input_block
ib_main ((const char *) data
+ main_offset
,
5060 data_in
= lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
5061 header
->string_size
, vNULL
);
5062 count
= streamer_read_uhwi (&ib_main
);
5064 for (i
= 0; i
< count
; i
++)
5067 struct cgraph_node
*node
;
5068 lto_symtab_encoder_t encoder
;
5070 index
= streamer_read_uhwi (&ib_main
);
5071 encoder
= file_data
->symtab_node_encoder
;
5072 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
5074 gcc_assert (node
->definition
);
5075 read_ipcp_transformation_info (&ib_main
, node
, data_in
);
5077 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
5079 lto_data_in_delete (data_in
);
5082 /* Read IPA-CP aggregate replacements. */
5085 ipcp_read_transformation_summaries (void)
5087 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
5088 struct lto_file_decl_data
*file_data
;
5091 while ((file_data
= file_data_vec
[j
++]))
5094 const char *data
= lto_get_section_data (file_data
,
5095 LTO_section_ipcp_transform
,
5098 read_replacements_section (file_data
, data
, len
);
5102 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5106 adjust_agg_replacement_values (struct cgraph_node
*node
,
5107 struct ipa_agg_replacement_value
*aggval
)
5109 struct ipa_agg_replacement_value
*v
;
5110 int i
, c
= 0, d
= 0, *adj
;
5112 if (!node
->clone
.combined_args_to_skip
)
5115 for (v
= aggval
; v
; v
= v
->next
)
5117 gcc_assert (v
->index
>= 0);
5123 adj
= XALLOCAVEC (int, c
);
5124 for (i
= 0; i
< c
; i
++)
5125 if (bitmap_bit_p (node
->clone
.combined_args_to_skip
, i
))
5133 for (v
= aggval
; v
; v
= v
->next
)
5134 v
->index
= adj
[v
->index
];
5137 /* Dominator walker driving the ipcp modification phase. */
5139 class ipcp_modif_dom_walker
: public dom_walker
5142 ipcp_modif_dom_walker (struct func_body_info
*fbi
,
5143 vec
<ipa_param_descriptor
> descs
,
5144 struct ipa_agg_replacement_value
*av
,
5146 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
), m_descriptors (descs
),
5147 m_aggval (av
), m_something_changed (sc
), m_cfg_changed (cc
) {}
5149 virtual void before_dom_children (basic_block
);
5152 struct func_body_info
*m_fbi
;
5153 vec
<ipa_param_descriptor
> m_descriptors
;
5154 struct ipa_agg_replacement_value
*m_aggval
;
5155 bool *m_something_changed
, *m_cfg_changed
;
5159 ipcp_modif_dom_walker::before_dom_children (basic_block bb
)
5161 gimple_stmt_iterator gsi
;
5162 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5164 struct ipa_agg_replacement_value
*v
;
5165 gimple stmt
= gsi_stmt (gsi
);
5167 HOST_WIDE_INT offset
, size
;
5171 if (!gimple_assign_load_p (stmt
))
5173 rhs
= gimple_assign_rhs1 (stmt
);
5174 if (!is_gimple_reg_type (TREE_TYPE (rhs
)))
5179 while (handled_component_p (t
))
5181 /* V_C_E can do things like convert an array of integers to one
5182 bigger integer and similar things we do not handle below. */
5183 if (TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
)
5188 t
= TREE_OPERAND (t
, 0);
5193 if (!ipa_load_from_parm_agg_1 (m_fbi
, m_descriptors
, stmt
, rhs
, &index
,
5194 &offset
, &size
, &by_ref
))
5196 for (v
= m_aggval
; v
; v
= v
->next
)
5197 if (v
->index
== index
5198 && v
->offset
== offset
)
5201 || v
->by_ref
!= by_ref
5202 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v
->value
))) != size
)
5205 gcc_checking_assert (is_gimple_ip_invariant (v
->value
));
5206 if (!useless_type_conversion_p (TREE_TYPE (rhs
), TREE_TYPE (v
->value
)))
5208 if (fold_convertible_p (TREE_TYPE (rhs
), v
->value
))
5209 val
= fold_build1 (NOP_EXPR
, TREE_TYPE (rhs
), v
->value
);
5210 else if (TYPE_SIZE (TREE_TYPE (rhs
))
5211 == TYPE_SIZE (TREE_TYPE (v
->value
)))
5212 val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (rhs
), v
->value
);
5217 fprintf (dump_file
, " const ");
5218 print_generic_expr (dump_file
, v
->value
, 0);
5219 fprintf (dump_file
, " can't be converted to type of ");
5220 print_generic_expr (dump_file
, rhs
, 0);
5221 fprintf (dump_file
, "\n");
5229 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5231 fprintf (dump_file
, "Modifying stmt:\n ");
5232 print_gimple_stmt (dump_file
, stmt
, 0, 0);
5234 gimple_assign_set_rhs_from_tree (&gsi
, val
);
5237 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5239 fprintf (dump_file
, "into:\n ");
5240 print_gimple_stmt (dump_file
, stmt
, 0, 0);
5241 fprintf (dump_file
, "\n");
5244 *m_something_changed
= true;
5245 if (maybe_clean_eh_stmt (stmt
)
5246 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
5247 *m_cfg_changed
= true;
5252 /* Update alignment of formal parameters as described in
5253 ipcp_transformation_summary. */
5256 ipcp_update_alignments (struct cgraph_node
*node
)
5258 tree fndecl
= node
->decl
;
5259 tree parm
= DECL_ARGUMENTS (fndecl
);
5260 tree next_parm
= parm
;
5261 ipcp_transformation_summary
*ts
= ipcp_get_transformation_summary (node
);
5262 if (!ts
|| vec_safe_length (ts
->alignments
) == 0)
5264 const vec
<ipa_alignment
, va_gc
> &alignments
= *ts
->alignments
;
5265 unsigned count
= alignments
.length ();
5267 for (unsigned i
= 0; i
< count
; ++i
, parm
= next_parm
)
5269 if (node
->clone
.combined_args_to_skip
5270 && bitmap_bit_p (node
->clone
.combined_args_to_skip
, i
))
5272 gcc_checking_assert (parm
);
5273 next_parm
= DECL_CHAIN (parm
);
5275 if (!alignments
[i
].known
|| !is_gimple_reg (parm
))
5277 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
), parm
);
5282 fprintf (dump_file
, " Adjusting alignment of param %u to %u, "
5283 "misalignment to %u\n", i
, alignments
[i
].align
,
5284 alignments
[i
].misalign
);
5286 struct ptr_info_def
*pi
= get_ptr_info (ddef
);
5287 gcc_checking_assert (pi
);
5289 unsigned old_misalign
;
5290 bool old_known
= get_ptr_info_alignment (pi
, &old_align
, &old_misalign
);
5293 && old_align
>= alignments
[i
].align
)
5296 fprintf (dump_file
, " But the alignment was already %u.\n",
5300 set_ptr_info_alignment (pi
, alignments
[i
].align
, alignments
[i
].misalign
);
5304 /* IPCP transformation phase doing propagation of aggregate values. */
5307 ipcp_transform_function (struct cgraph_node
*node
)
5309 vec
<ipa_param_descriptor
> descriptors
= vNULL
;
5310 struct func_body_info fbi
;
5311 struct ipa_agg_replacement_value
*aggval
;
5313 bool cfg_changed
= false, something_changed
= false;
5315 gcc_checking_assert (cfun
);
5316 gcc_checking_assert (current_function_decl
);
5319 fprintf (dump_file
, "Modification phase of node %s/%i\n",
5320 node
->name (), node
->order
);
5322 ipcp_update_alignments (node
);
5323 aggval
= ipa_get_agg_replacements_for_node (node
);
5326 param_count
= count_formal_params (node
->decl
);
5327 if (param_count
== 0)
5329 adjust_agg_replacement_values (node
, aggval
);
5331 ipa_dump_agg_replacement_values (dump_file
, aggval
);
5335 fbi
.bb_infos
= vNULL
;
5336 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
));
5337 fbi
.param_count
= param_count
;
5340 descriptors
.safe_grow_cleared (param_count
);
5341 ipa_populate_param_decls (node
, descriptors
);
5342 calculate_dominance_info (CDI_DOMINATORS
);
5343 ipcp_modif_dom_walker (&fbi
, descriptors
, aggval
, &something_changed
,
5344 &cfg_changed
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5347 struct ipa_bb_info
*bi
;
5348 FOR_EACH_VEC_ELT (fbi
.bb_infos
, i
, bi
)
5349 free_ipa_bb_info (bi
);
5350 fbi
.bb_infos
.release ();
5351 free_dominance_info (CDI_DOMINATORS
);
5352 (*ipcp_transformations
)[node
->uid
].agg_values
= NULL
;
5353 (*ipcp_transformations
)[node
->uid
].alignments
= NULL
;
5354 descriptors
.release ();
5356 if (!something_changed
)
5358 else if (cfg_changed
)
5359 return TODO_update_ssa_only_virtuals
| TODO_cleanup_cfg
;
5361 return TODO_update_ssa_only_virtuals
;