1 /* Interprocedural analyses.
2 Copyright (C) 2005-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
30 #include "tree-streamer.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
51 #include "ipa-utils.h"
55 #include "tree-cfgcleanup.h"
57 /* Function summary where the parameter infos are actually stored. */
58 ipa_node_params_t
*ipa_node_params_sum
= NULL
;
60 function_summary
<ipcp_transformation
*> *ipcp_transformation_sum
= NULL
;
62 /* Edge summary for IPA-CP edge information. */
63 ipa_edge_args_sum_t
*ipa_edge_args_sum
;
65 /* Traits for a hash table for reusing already existing ipa_bits. */
67 struct ipa_bit_ggc_hash_traits
: public ggc_cache_remove
<ipa_bits
*>
69 typedef ipa_bits
*value_type
;
70 typedef ipa_bits
*compare_type
;
72 hash (const ipa_bits
*p
)
74 hashval_t t
= (hashval_t
) p
->value
.to_shwi ();
75 return iterative_hash_host_wide_int (p
->mask
.to_shwi (), t
);
78 equal (const ipa_bits
*a
, const ipa_bits
*b
)
80 return a
->value
== b
->value
&& a
->mask
== b
->mask
;
83 mark_empty (ipa_bits
*&p
)
88 is_empty (const ipa_bits
*p
)
93 is_deleted (const ipa_bits
*p
)
95 return p
== reinterpret_cast<const ipa_bits
*> (1);
98 mark_deleted (ipa_bits
*&p
)
100 p
= reinterpret_cast<ipa_bits
*> (1);
104 /* Hash table for avoid repeated allocations of equal ipa_bits. */
105 static GTY ((cache
)) hash_table
<ipa_bit_ggc_hash_traits
> *ipa_bits_hash_table
;
107 /* Traits for a hash table for reusing value_ranges used for IPA. Note that
108 the equiv bitmap is not hashed and is expected to be NULL. */
110 struct ipa_vr_ggc_hash_traits
: public ggc_cache_remove
<value_range_base
*>
112 typedef value_range_base
*value_type
;
113 typedef value_range_base
*compare_type
;
115 hash (const value_range_base
*p
)
117 inchash::hash
hstate (p
->kind ());
118 inchash::add_expr (p
->min (), hstate
);
119 inchash::add_expr (p
->max (), hstate
);
120 return hstate
.end ();
123 equal (const value_range_base
*a
, const value_range_base
*b
)
125 return a
->equal_p (*b
);
128 mark_empty (value_range_base
*&p
)
133 is_empty (const value_range_base
*p
)
138 is_deleted (const value_range_base
*p
)
140 return p
== reinterpret_cast<const value_range_base
*> (1);
143 mark_deleted (value_range_base
*&p
)
145 p
= reinterpret_cast<value_range_base
*> (1);
149 /* Hash table for avoid repeated allocations of equal value_ranges. */
150 static GTY ((cache
)) hash_table
<ipa_vr_ggc_hash_traits
> *ipa_vr_hash_table
;
152 /* Holders of ipa cgraph hooks: */
153 static struct cgraph_node_hook_list
*function_insertion_hook_holder
;
155 /* Description of a reference to an IPA constant. */
156 struct ipa_cst_ref_desc
158 /* Edge that corresponds to the statement which took the reference. */
159 struct cgraph_edge
*cs
;
160 /* Linked list of duplicates created when call graph edges are cloned. */
161 struct ipa_cst_ref_desc
*next_duplicate
;
162 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
163 if out of control. */
167 /* Allocation pool for reference descriptions. */
169 static object_allocator
<ipa_cst_ref_desc
> ipa_refdesc_pool
170 ("IPA-PROP ref descriptions");
172 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
173 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
176 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node
*node
)
178 tree fs_opts
= DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node
->decl
);
182 return !opt_for_fn (node
->decl
, optimize
) || !opt_for_fn (node
->decl
, flag_ipa_cp
);
185 /* Return index of the formal whose tree is PTREE in function which corresponds
189 ipa_get_param_decl_index_1 (vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
194 count
= vec_safe_length (descriptors
);
195 for (i
= 0; i
< count
; i
++)
196 if ((*descriptors
)[i
].decl_or_type
== ptree
)
202 /* Return index of the formal whose tree is PTREE in function which corresponds
206 ipa_get_param_decl_index (class ipa_node_params
*info
, tree ptree
)
208 return ipa_get_param_decl_index_1 (info
->descriptors
, ptree
);
211 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
215 ipa_populate_param_decls (struct cgraph_node
*node
,
216 vec
<ipa_param_descriptor
, va_gc
> &descriptors
)
224 gcc_assert (gimple_has_body_p (fndecl
));
225 fnargs
= DECL_ARGUMENTS (fndecl
);
227 for (parm
= fnargs
; parm
; parm
= DECL_CHAIN (parm
))
229 descriptors
[param_num
].decl_or_type
= parm
;
230 descriptors
[param_num
].move_cost
= estimate_move_cost (TREE_TYPE (parm
),
236 /* Return how many formal parameters FNDECL has. */
239 count_formal_params (tree fndecl
)
243 gcc_assert (gimple_has_body_p (fndecl
));
245 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
251 /* Return the declaration of Ith formal parameter of the function corresponding
252 to INFO. Note there is no setter function as this array is built just once
253 using ipa_initialize_node_params. */
256 ipa_dump_param (FILE *file
, class ipa_node_params
*info
, int i
)
258 fprintf (file
, "param #%i", i
);
259 if ((*info
->descriptors
)[i
].decl_or_type
)
262 print_generic_expr (file
, (*info
->descriptors
)[i
].decl_or_type
);
266 /* If necessary, allocate vector of parameter descriptors in info of NODE.
267 Return true if they were allocated, false if not. */
270 ipa_alloc_node_params (struct cgraph_node
*node
, int param_count
)
272 class ipa_node_params
*info
= IPA_NODE_REF (node
);
274 if (!info
->descriptors
&& param_count
)
276 vec_safe_grow_cleared (info
->descriptors
, param_count
);
283 /* Initialize the ipa_node_params structure associated with NODE by counting
284 the function parameters, creating the descriptors and populating their
288 ipa_initialize_node_params (struct cgraph_node
*node
)
290 class ipa_node_params
*info
= IPA_NODE_REF (node
);
292 if (!info
->descriptors
293 && ipa_alloc_node_params (node
, count_formal_params (node
->decl
)))
294 ipa_populate_param_decls (node
, *info
->descriptors
);
297 /* Print the jump functions associated with call graph edge CS to file F. */
300 ipa_print_node_jump_functions_for_edge (FILE *f
, struct cgraph_edge
*cs
)
304 count
= ipa_get_cs_argument_count (IPA_EDGE_REF (cs
));
305 for (i
= 0; i
< count
; i
++)
307 struct ipa_jump_func
*jump_func
;
308 enum jump_func_type type
;
310 jump_func
= ipa_get_ith_jump_func (IPA_EDGE_REF (cs
), i
);
311 type
= jump_func
->type
;
313 fprintf (f
, " param %d: ", i
);
314 if (type
== IPA_JF_UNKNOWN
)
315 fprintf (f
, "UNKNOWN\n");
316 else if (type
== IPA_JF_CONST
)
318 tree val
= jump_func
->value
.constant
.value
;
319 fprintf (f
, "CONST: ");
320 print_generic_expr (f
, val
);
321 if (TREE_CODE (val
) == ADDR_EXPR
322 && TREE_CODE (TREE_OPERAND (val
, 0)) == CONST_DECL
)
325 print_generic_expr (f
, DECL_INITIAL (TREE_OPERAND (val
, 0)));
329 else if (type
== IPA_JF_PASS_THROUGH
)
331 fprintf (f
, "PASS THROUGH: ");
332 fprintf (f
, "%d, op %s",
333 jump_func
->value
.pass_through
.formal_id
,
334 get_tree_code_name(jump_func
->value
.pass_through
.operation
));
335 if (jump_func
->value
.pass_through
.operation
!= NOP_EXPR
)
338 print_generic_expr (f
, jump_func
->value
.pass_through
.operand
);
340 if (jump_func
->value
.pass_through
.agg_preserved
)
341 fprintf (f
, ", agg_preserved");
344 else if (type
== IPA_JF_ANCESTOR
)
346 fprintf (f
, "ANCESTOR: ");
347 fprintf (f
, "%d, offset " HOST_WIDE_INT_PRINT_DEC
,
348 jump_func
->value
.ancestor
.formal_id
,
349 jump_func
->value
.ancestor
.offset
);
350 if (jump_func
->value
.ancestor
.agg_preserved
)
351 fprintf (f
, ", agg_preserved");
355 if (jump_func
->agg
.items
)
357 struct ipa_agg_jf_item
*item
;
360 fprintf (f
, " Aggregate passed by %s:\n",
361 jump_func
->agg
.by_ref
? "reference" : "value");
362 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, j
, item
)
364 fprintf (f
, " offset: " HOST_WIDE_INT_PRINT_DEC
", ",
366 if (TYPE_P (item
->value
))
367 fprintf (f
, "clobber of " HOST_WIDE_INT_PRINT_DEC
" bits",
368 tree_to_uhwi (TYPE_SIZE (item
->value
)));
371 fprintf (f
, "cst: ");
372 print_generic_expr (f
, item
->value
);
378 class ipa_polymorphic_call_context
*ctx
379 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs
), i
);
380 if (ctx
&& !ctx
->useless_p ())
382 fprintf (f
, " Context: ");
383 ctx
->dump (dump_file
);
388 fprintf (f
, " value: ");
389 print_hex (jump_func
->bits
->value
, f
);
390 fprintf (f
, ", mask: ");
391 print_hex (jump_func
->bits
->mask
, f
);
395 fprintf (f
, " Unknown bits\n");
401 (jump_func
->m_vr
->kind () == VR_ANTI_RANGE
) ? "~" : "");
402 print_decs (wi::to_wide (jump_func
->m_vr
->min ()), f
);
404 print_decs (wi::to_wide (jump_func
->m_vr
->max ()), f
);
408 fprintf (f
, " Unknown VR\n");
413 /* Print the jump functions of all arguments on all call graph edges going from
417 ipa_print_node_jump_functions (FILE *f
, struct cgraph_node
*node
)
419 struct cgraph_edge
*cs
;
421 fprintf (f
, " Jump functions of caller %s:\n", node
->dump_name ());
422 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
424 if (!ipa_edge_args_info_available_for_edge_p (cs
))
427 fprintf (f
, " callsite %s -> %s : \n",
429 cs
->callee
->dump_name ());
430 ipa_print_node_jump_functions_for_edge (f
, cs
);
433 for (cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
435 class cgraph_indirect_call_info
*ii
;
436 if (!ipa_edge_args_info_available_for_edge_p (cs
))
439 ii
= cs
->indirect_info
;
440 if (ii
->agg_contents
)
441 fprintf (f
, " indirect %s callsite, calling param %i, "
442 "offset " HOST_WIDE_INT_PRINT_DEC
", %s",
443 ii
->member_ptr
? "member ptr" : "aggregate",
444 ii
->param_index
, ii
->offset
,
445 ii
->by_ref
? "by reference" : "by_value");
447 fprintf (f
, " indirect %s callsite, calling param %i, "
448 "offset " HOST_WIDE_INT_PRINT_DEC
,
449 ii
->polymorphic
? "polymorphic" : "simple", ii
->param_index
,
454 fprintf (f
, ", for stmt ");
455 print_gimple_stmt (f
, cs
->call_stmt
, 0, TDF_SLIM
);
460 ii
->context
.dump (f
);
461 ipa_print_node_jump_functions_for_edge (f
, cs
);
465 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
468 ipa_print_all_jump_functions (FILE *f
)
470 struct cgraph_node
*node
;
472 fprintf (f
, "\nJump functions:\n");
473 FOR_EACH_FUNCTION (node
)
475 ipa_print_node_jump_functions (f
, node
);
479 /* Set jfunc to be a know-really nothing jump function. */
482 ipa_set_jf_unknown (struct ipa_jump_func
*jfunc
)
484 jfunc
->type
= IPA_JF_UNKNOWN
;
489 /* Set JFUNC to be a copy of another jmp (to be used by jump function
490 combination code). The two functions will share their rdesc. */
493 ipa_set_jf_cst_copy (struct ipa_jump_func
*dst
,
494 struct ipa_jump_func
*src
)
497 gcc_checking_assert (src
->type
== IPA_JF_CONST
);
498 dst
->type
= IPA_JF_CONST
;
499 dst
->value
.constant
= src
->value
.constant
;
502 /* Set JFUNC to be a constant jmp function. */
505 ipa_set_jf_constant (struct ipa_jump_func
*jfunc
, tree constant
,
506 struct cgraph_edge
*cs
)
508 jfunc
->type
= IPA_JF_CONST
;
509 jfunc
->value
.constant
.value
= unshare_expr_without_location (constant
);
511 if (TREE_CODE (constant
) == ADDR_EXPR
512 && TREE_CODE (TREE_OPERAND (constant
, 0)) == FUNCTION_DECL
)
514 struct ipa_cst_ref_desc
*rdesc
;
516 rdesc
= ipa_refdesc_pool
.allocate ();
518 rdesc
->next_duplicate
= NULL
;
520 jfunc
->value
.constant
.rdesc
= rdesc
;
523 jfunc
->value
.constant
.rdesc
= NULL
;
526 /* Set JFUNC to be a simple pass-through jump function. */
528 ipa_set_jf_simple_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
531 jfunc
->type
= IPA_JF_PASS_THROUGH
;
532 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
533 jfunc
->value
.pass_through
.formal_id
= formal_id
;
534 jfunc
->value
.pass_through
.operation
= NOP_EXPR
;
535 jfunc
->value
.pass_through
.agg_preserved
= agg_preserved
;
538 /* Set JFUNC to be an unary pass through jump function. */
541 ipa_set_jf_unary_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
542 enum tree_code operation
)
544 jfunc
->type
= IPA_JF_PASS_THROUGH
;
545 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
546 jfunc
->value
.pass_through
.formal_id
= formal_id
;
547 jfunc
->value
.pass_through
.operation
= operation
;
548 jfunc
->value
.pass_through
.agg_preserved
= false;
550 /* Set JFUNC to be an arithmetic pass through jump function. */
553 ipa_set_jf_arith_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
554 tree operand
, enum tree_code operation
)
556 jfunc
->type
= IPA_JF_PASS_THROUGH
;
557 jfunc
->value
.pass_through
.operand
= unshare_expr_without_location (operand
);
558 jfunc
->value
.pass_through
.formal_id
= formal_id
;
559 jfunc
->value
.pass_through
.operation
= operation
;
560 jfunc
->value
.pass_through
.agg_preserved
= false;
563 /* Set JFUNC to be an ancestor jump function. */
566 ipa_set_ancestor_jf (struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
,
567 int formal_id
, bool agg_preserved
)
569 jfunc
->type
= IPA_JF_ANCESTOR
;
570 jfunc
->value
.ancestor
.formal_id
= formal_id
;
571 jfunc
->value
.ancestor
.offset
= offset
;
572 jfunc
->value
.ancestor
.agg_preserved
= agg_preserved
;
575 /* Get IPA BB information about the given BB. FBI is the context of analyzis
576 of this function body. */
578 static struct ipa_bb_info
*
579 ipa_get_bb_info (struct ipa_func_body_info
*fbi
, basic_block bb
)
581 gcc_checking_assert (fbi
);
582 return &fbi
->bb_infos
[bb
->index
];
585 /* Structure to be passed in between detect_type_change and
586 check_stmt_for_type_change. */
588 struct prop_type_change_info
590 /* Offset into the object where there is the virtual method pointer we are
592 HOST_WIDE_INT offset
;
593 /* The declaration or SSA_NAME pointer of the base that we are checking for
596 /* Set to true if dynamic type change has been detected. */
597 bool type_maybe_changed
;
600 /* Return true if STMT can modify a virtual method table pointer.
602 This function makes special assumptions about both constructors and
603 destructors which are all the functions that are allowed to alter the VMT
604 pointers. It assumes that destructors begin with assignment into all VMT
605 pointers and that constructors essentially look in the following way:
607 1) The very first thing they do is that they call constructors of ancestor
608 sub-objects that have them.
610 2) Then VMT pointers of this and all its ancestors is set to new values
611 corresponding to the type corresponding to the constructor.
613 3) Only afterwards, other stuff such as constructor of member sub-objects
614 and the code written by the user is run. Only this may include calling
615 virtual functions, directly or indirectly.
617 There is no way to call a constructor of an ancestor sub-object in any
620 This means that we do not have to care whether constructors get the correct
621 type information because they will always change it (in fact, if we define
622 the type to be given by the VMT pointer, it is undefined).
624 The most important fact to derive from the above is that if, for some
625 statement in the section 3, we try to detect whether the dynamic type has
626 changed, we can safely ignore all calls as we examine the function body
627 backwards until we reach statements in section 2 because these calls cannot
628 be ancestor constructors or destructors (if the input is not bogus) and so
629 do not change the dynamic type (this holds true only for automatically
630 allocated objects but at the moment we devirtualize only these). We then
631 must detect that statements in section 2 change the dynamic type and can try
632 to derive the new type. That is enough and we can stop, we will never see
633 the calls into constructors of sub-objects in this code. Therefore we can
634 safely ignore all call statements that we traverse.
638 stmt_may_be_vtbl_ptr_store (gimple
*stmt
)
640 if (is_gimple_call (stmt
))
642 if (gimple_clobber_p (stmt
))
644 else if (is_gimple_assign (stmt
))
646 tree lhs
= gimple_assign_lhs (stmt
);
648 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs
)))
650 if (flag_strict_aliasing
651 && !POINTER_TYPE_P (TREE_TYPE (lhs
)))
654 if (TREE_CODE (lhs
) == COMPONENT_REF
655 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
657 /* In the future we might want to use get_ref_base_and_extent to find
658 if there is a field corresponding to the offset and if so, proceed
659 almost like if it was a component ref. */
665 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
666 to check whether a particular statement may modify the virtual table
667 pointerIt stores its result into DATA, which points to a
668 prop_type_change_info structure. */
671 check_stmt_for_type_change (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef
, void *data
)
673 gimple
*stmt
= SSA_NAME_DEF_STMT (vdef
);
674 struct prop_type_change_info
*tci
= (struct prop_type_change_info
*) data
;
676 if (stmt_may_be_vtbl_ptr_store (stmt
))
678 tci
->type_maybe_changed
= true;
685 /* See if ARG is PARAM_DECl describing instance passed by pointer
686 or reference in FUNCTION. Return false if the dynamic type may change
687 in between beggining of the function until CALL is invoked.
689 Generally functions are not allowed to change type of such instances,
690 but they call destructors. We assume that methods cannot destroy the THIS
691 pointer. Also as a special cases, constructor and destructors may change
692 type of the THIS pointer. */
695 param_type_may_change_p (tree function
, tree arg
, gimple
*call
)
697 /* Pure functions cannot do any changes on the dynamic type;
698 that require writting to memory. */
699 if (flags_from_decl_or_type (function
) & (ECF_PURE
| ECF_CONST
))
701 /* We need to check if we are within inlined consturctor
702 or destructor (ideally we would have way to check that the
703 inline cdtor is actually working on ARG, but we don't have
704 easy tie on this, so punt on all non-pure cdtors.
705 We may also record the types of cdtors and once we know type
706 of the instance match them.
708 Also code unification optimizations may merge calls from
709 different blocks making return values unreliable. So
710 do nothing during late optimization. */
711 if (DECL_STRUCT_FUNCTION (function
)->after_inlining
)
713 if (TREE_CODE (arg
) == SSA_NAME
714 && SSA_NAME_IS_DEFAULT_DEF (arg
)
715 && TREE_CODE (SSA_NAME_VAR (arg
)) == PARM_DECL
)
717 /* Normal (non-THIS) argument. */
718 if ((SSA_NAME_VAR (arg
) != DECL_ARGUMENTS (function
)
719 || TREE_CODE (TREE_TYPE (function
)) != METHOD_TYPE
)
720 /* THIS pointer of an method - here we want to watch constructors
721 and destructors as those definitely may change the dynamic
723 || (TREE_CODE (TREE_TYPE (function
)) == METHOD_TYPE
724 && !DECL_CXX_CONSTRUCTOR_P (function
)
725 && !DECL_CXX_DESTRUCTOR_P (function
)
726 && (SSA_NAME_VAR (arg
) == DECL_ARGUMENTS (function
))))
728 /* Walk the inline stack and watch out for ctors/dtors. */
729 for (tree block
= gimple_block (call
); block
&& TREE_CODE (block
) == BLOCK
;
730 block
= BLOCK_SUPERCONTEXT (block
))
731 if (inlined_polymorphic_ctor_dtor_block_p (block
, false))
739 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
740 callsite CALL) by looking for assignments to its virtual table pointer. If
741 it is, return true and fill in the jump function JFUNC with relevant type
742 information or set it to unknown. ARG is the object itself (not a pointer
743 to it, unless dereferenced). BASE is the base of the memory access as
744 returned by get_ref_base_and_extent, as is the offset.
746 This is helper function for detect_type_change and detect_type_change_ssa
747 that does the heavy work which is usually unnecesary. */
750 detect_type_change_from_memory_writes (ipa_func_body_info
*fbi
, tree arg
,
751 tree base
, tree comp_type
, gcall
*call
,
752 struct ipa_jump_func
*jfunc
,
753 HOST_WIDE_INT offset
)
755 struct prop_type_change_info tci
;
758 gcc_checking_assert (DECL_P (arg
)
759 || TREE_CODE (arg
) == MEM_REF
760 || handled_component_p (arg
));
762 comp_type
= TYPE_MAIN_VARIANT (comp_type
);
764 /* Const calls cannot call virtual methods through VMT and so type changes do
766 if (!flag_devirtualize
|| !gimple_vuse (call
)
767 /* Be sure expected_type is polymorphic. */
769 || TREE_CODE (comp_type
) != RECORD_TYPE
770 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))
771 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))))
774 ao_ref_init (&ao
, arg
);
777 ao
.size
= POINTER_SIZE
;
778 ao
.max_size
= ao
.size
;
781 tci
.object
= get_base_address (arg
);
782 tci
.type_maybe_changed
= false;
785 = walk_aliased_vdefs (&ao
, gimple_vuse (call
), check_stmt_for_type_change
,
786 &tci
, NULL
, NULL
, fbi
->aa_walk_budget
+ 1);
788 if (walked
>= 0 && !tci
.type_maybe_changed
)
791 ipa_set_jf_unknown (jfunc
);
795 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
796 If it is, return true and fill in the jump function JFUNC with relevant type
797 information or set it to unknown. ARG is the object itself (not a pointer
798 to it, unless dereferenced). BASE is the base of the memory access as
799 returned by get_ref_base_and_extent, as is the offset. */
802 detect_type_change (ipa_func_body_info
*fbi
, tree arg
, tree base
,
803 tree comp_type
, gcall
*call
, struct ipa_jump_func
*jfunc
,
804 HOST_WIDE_INT offset
)
806 if (!flag_devirtualize
)
809 if (TREE_CODE (base
) == MEM_REF
810 && !param_type_may_change_p (current_function_decl
,
811 TREE_OPERAND (base
, 0),
814 return detect_type_change_from_memory_writes (fbi
, arg
, base
, comp_type
,
815 call
, jfunc
, offset
);
818 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
819 SSA name (its dereference will become the base and the offset is assumed to
823 detect_type_change_ssa (ipa_func_body_info
*fbi
, tree arg
, tree comp_type
,
824 gcall
*call
, struct ipa_jump_func
*jfunc
)
826 gcc_checking_assert (TREE_CODE (arg
) == SSA_NAME
);
827 if (!flag_devirtualize
828 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
831 if (!param_type_may_change_p (current_function_decl
, arg
, call
))
834 arg
= build2 (MEM_REF
, ptr_type_node
, arg
,
835 build_int_cst (ptr_type_node
, 0));
837 return detect_type_change_from_memory_writes (fbi
, arg
, arg
, comp_type
,
841 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
842 boolean variable pointed to by DATA. */
845 mark_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
848 bool *b
= (bool *) data
;
853 /* Find the nearest valid aa status for parameter specified by INDEX that
856 static struct ipa_param_aa_status
*
857 find_dominating_aa_status (struct ipa_func_body_info
*fbi
, basic_block bb
,
862 bb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
865 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
866 if (!bi
->param_aa_statuses
.is_empty ()
867 && bi
->param_aa_statuses
[index
].valid
)
868 return &bi
->param_aa_statuses
[index
];
872 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
873 structures and/or intialize the result with a dominating description as
876 static struct ipa_param_aa_status
*
877 parm_bb_aa_status_for_bb (struct ipa_func_body_info
*fbi
, basic_block bb
,
880 gcc_checking_assert (fbi
);
881 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
882 if (bi
->param_aa_statuses
.is_empty ())
883 bi
->param_aa_statuses
.safe_grow_cleared (fbi
->param_count
);
884 struct ipa_param_aa_status
*paa
= &bi
->param_aa_statuses
[index
];
887 gcc_checking_assert (!paa
->parm_modified
888 && !paa
->ref_modified
889 && !paa
->pt_modified
);
890 struct ipa_param_aa_status
*dom_paa
;
891 dom_paa
= find_dominating_aa_status (fbi
, bb
, index
);
901 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
902 a value known not to be modified in this function before reaching the
903 statement STMT. FBI holds information about the function we have so far
904 gathered but do not survive the summary building stage. */
907 parm_preserved_before_stmt_p (struct ipa_func_body_info
*fbi
, int index
,
908 gimple
*stmt
, tree parm_load
)
910 struct ipa_param_aa_status
*paa
;
911 bool modified
= false;
914 tree base
= get_base_address (parm_load
);
915 gcc_assert (TREE_CODE (base
) == PARM_DECL
);
916 if (TREE_READONLY (base
))
919 gcc_checking_assert (fbi
);
920 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
921 if (paa
->parm_modified
)
924 gcc_checking_assert (gimple_vuse (stmt
) != NULL_TREE
);
925 ao_ref_init (&refd
, parm_load
);
926 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
927 &modified
, NULL
, NULL
,
928 fbi
->aa_walk_budget
+ 1);
933 fbi
->aa_walk_budget
= 0;
936 fbi
->aa_walk_budget
-= walked
;
938 paa
->parm_modified
= true;
942 /* If STMT is an assignment that loads a value from an parameter declaration,
943 return the index of the parameter in ipa_node_params which has not been
944 modified. Otherwise return -1. */
947 load_from_unmodified_param (struct ipa_func_body_info
*fbi
,
948 vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
954 if (!gimple_assign_single_p (stmt
))
957 op1
= gimple_assign_rhs1 (stmt
);
958 if (TREE_CODE (op1
) != PARM_DECL
)
961 index
= ipa_get_param_decl_index_1 (descriptors
, op1
);
963 || !parm_preserved_before_stmt_p (fbi
, index
, stmt
, op1
))
969 /* Return true if memory reference REF (which must be a load through parameter
970 with INDEX) loads data that are known to be unmodified in this function
971 before reaching statement STMT. */
974 parm_ref_data_preserved_p (struct ipa_func_body_info
*fbi
,
975 int index
, gimple
*stmt
, tree ref
)
977 struct ipa_param_aa_status
*paa
;
978 bool modified
= false;
981 gcc_checking_assert (fbi
);
982 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
983 if (paa
->ref_modified
)
986 gcc_checking_assert (gimple_vuse (stmt
));
987 ao_ref_init (&refd
, ref
);
988 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
989 &modified
, NULL
, NULL
,
990 fbi
->aa_walk_budget
+ 1);
994 fbi
->aa_walk_budget
= 0;
997 fbi
->aa_walk_budget
-= walked
;
999 paa
->ref_modified
= true;
1003 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1004 is known to be unmodified in this function before reaching call statement
1005 CALL into which it is passed. FBI describes the function body. */
1008 parm_ref_data_pass_through_p (struct ipa_func_body_info
*fbi
, int index
,
1009 gimple
*call
, tree parm
)
1011 bool modified
= false;
1014 /* It's unnecessary to calculate anything about memory contnets for a const
1015 function because it is not goin to use it. But do not cache the result
1016 either. Also, no such calculations for non-pointers. */
1017 if (!gimple_vuse (call
)
1018 || !POINTER_TYPE_P (TREE_TYPE (parm
)))
1021 struct ipa_param_aa_status
*paa
= parm_bb_aa_status_for_bb (fbi
,
1024 if (paa
->pt_modified
)
1027 ao_ref_init_from_ptr_and_size (&refd
, parm
, NULL_TREE
);
1028 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (call
), mark_modified
,
1029 &modified
, NULL
, NULL
,
1030 fbi
->aa_walk_budget
+ 1);
1033 fbi
->aa_walk_budget
= 0;
1037 fbi
->aa_walk_budget
-= walked
;
1039 paa
->pt_modified
= true;
1043 /* Return true if we can prove that OP is a memory reference loading
1044 data from an aggregate passed as a parameter.
1046 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1047 false if it cannot prove that the value has not been modified before the
1048 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1049 if it cannot prove the value has not been modified, in that case it will
1050 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1052 INFO and PARMS_AINFO describe parameters of the current function (but the
1053 latter can be NULL), STMT is the load statement. If function returns true,
1054 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1055 within the aggregate and whether it is a load from a value passed by
1056 reference respectively. */
1059 ipa_load_from_parm_agg (struct ipa_func_body_info
*fbi
,
1060 vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
1061 gimple
*stmt
, tree op
, int *index_p
,
1062 HOST_WIDE_INT
*offset_p
, poly_int64
*size_p
,
1063 bool *by_ref_p
, bool *guaranteed_unmodified
)
1068 tree base
= get_ref_base_and_extent_hwi (op
, offset_p
, &size
, &reverse
);
1075 int index
= ipa_get_param_decl_index_1 (descriptors
, base
);
1077 && parm_preserved_before_stmt_p (fbi
, index
, stmt
, op
))
1083 if (guaranteed_unmodified
)
1084 *guaranteed_unmodified
= true;
1090 if (TREE_CODE (base
) != MEM_REF
1091 || TREE_CODE (TREE_OPERAND (base
, 0)) != SSA_NAME
1092 || !integer_zerop (TREE_OPERAND (base
, 1)))
1095 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base
, 0)))
1097 tree parm
= SSA_NAME_VAR (TREE_OPERAND (base
, 0));
1098 index
= ipa_get_param_decl_index_1 (descriptors
, parm
);
1102 /* This branch catches situations where a pointer parameter is not a
1103 gimple register, for example:
1105 void hip7(S*) (struct S * p)
1107 void (*<T2e4>) (struct S *) D.1867;
1112 D.1867_2 = p.1_1->f;
1117 gimple
*def
= SSA_NAME_DEF_STMT (TREE_OPERAND (base
, 0));
1118 index
= load_from_unmodified_param (fbi
, descriptors
, def
);
1123 bool data_preserved
= parm_ref_data_preserved_p (fbi
, index
, stmt
, op
);
1124 if (!data_preserved
&& !guaranteed_unmodified
)
1131 if (guaranteed_unmodified
)
1132 *guaranteed_unmodified
= data_preserved
;
1138 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1139 of an assignment statement STMT, try to determine whether we are actually
1140 handling any of the following cases and construct an appropriate jump
1141 function into JFUNC if so:
1143 1) The passed value is loaded from a formal parameter which is not a gimple
1144 register (most probably because it is addressable, the value has to be
1145 scalar) and we can guarantee the value has not changed. This case can
1146 therefore be described by a simple pass-through jump function. For example:
1155 2) The passed value can be described by a simple arithmetic pass-through
1162 D.2064_4 = a.1(D) + 4;
1165 This case can also occur in combination of the previous one, e.g.:
1173 D.2064_4 = a.0_3 + 4;
1176 3) The passed value is an address of an object within another one (which
1177 also passed by reference). Such situations are described by an ancestor
1178 jump function and describe situations such as:
1180 B::foo() (struct B * const this)
1184 D.1845_2 = &this_1(D)->D.1748;
1187 INFO is the structure describing individual parameters access different
1188 stages of IPA optimizations. PARMS_AINFO contains the information that is
1189 only needed for intraprocedural analysis. */
1192 compute_complex_assign_jump_func (struct ipa_func_body_info
*fbi
,
1193 class ipa_node_params
*info
,
1194 struct ipa_jump_func
*jfunc
,
1195 gcall
*call
, gimple
*stmt
, tree name
,
1198 HOST_WIDE_INT offset
, size
;
1199 tree op1
, tc_ssa
, base
, ssa
;
1203 op1
= gimple_assign_rhs1 (stmt
);
1205 if (TREE_CODE (op1
) == SSA_NAME
)
1207 if (SSA_NAME_IS_DEFAULT_DEF (op1
))
1208 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (op1
));
1210 index
= load_from_unmodified_param (fbi
, info
->descriptors
,
1211 SSA_NAME_DEF_STMT (op1
));
1216 index
= load_from_unmodified_param (fbi
, info
->descriptors
, stmt
);
1217 tc_ssa
= gimple_assign_lhs (stmt
);
1222 switch (gimple_assign_rhs_class (stmt
))
1224 case GIMPLE_BINARY_RHS
:
1226 tree op2
= gimple_assign_rhs2 (stmt
);
1227 if (!is_gimple_ip_invariant (op2
)
1228 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt
))
1230 && !useless_type_conversion_p (TREE_TYPE (name
),
1234 ipa_set_jf_arith_pass_through (jfunc
, index
, op2
,
1235 gimple_assign_rhs_code (stmt
));
1238 case GIMPLE_SINGLE_RHS
:
1240 bool agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
,
1242 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1245 case GIMPLE_UNARY_RHS
:
1246 if (is_gimple_assign (stmt
)
1247 && gimple_assign_rhs_class (stmt
) == GIMPLE_UNARY_RHS
1248 && ! CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
)))
1249 ipa_set_jf_unary_pass_through (jfunc
, index
,
1250 gimple_assign_rhs_code (stmt
));
1256 if (TREE_CODE (op1
) != ADDR_EXPR
)
1258 op1
= TREE_OPERAND (op1
, 0);
1259 if (TREE_CODE (TREE_TYPE (op1
)) != RECORD_TYPE
)
1261 base
= get_ref_base_and_extent_hwi (op1
, &offset
, &size
, &reverse
);
1262 offset_int mem_offset
;
1264 || TREE_CODE (base
) != MEM_REF
1265 || !mem_ref_offset (base
).is_constant (&mem_offset
))
1267 offset
+= mem_offset
.to_short_addr () * BITS_PER_UNIT
;
1268 ssa
= TREE_OPERAND (base
, 0);
1269 if (TREE_CODE (ssa
) != SSA_NAME
1270 || !SSA_NAME_IS_DEFAULT_DEF (ssa
)
1274 /* Dynamic types are changed in constructors and destructors. */
1275 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (ssa
));
1276 if (index
>= 0 && param_type
&& POINTER_TYPE_P (param_type
))
1277 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1278 parm_ref_data_pass_through_p (fbi
, index
, call
, ssa
));
1281 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1284 iftmp.1_3 = &obj_2(D)->D.1762;
1286 The base of the MEM_REF must be a default definition SSA NAME of a
1287 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1288 whole MEM_REF expression is returned and the offset calculated from any
1289 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1290 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1293 get_ancestor_addr_info (gimple
*assign
, tree
*obj_p
, HOST_WIDE_INT
*offset
)
1296 tree expr
, parm
, obj
;
1299 if (!gimple_assign_single_p (assign
))
1301 expr
= gimple_assign_rhs1 (assign
);
1303 if (TREE_CODE (expr
) != ADDR_EXPR
)
1305 expr
= TREE_OPERAND (expr
, 0);
1307 expr
= get_ref_base_and_extent_hwi (expr
, offset
, &size
, &reverse
);
1309 offset_int mem_offset
;
1311 || TREE_CODE (expr
) != MEM_REF
1312 || !mem_ref_offset (expr
).is_constant (&mem_offset
))
1314 parm
= TREE_OPERAND (expr
, 0);
1315 if (TREE_CODE (parm
) != SSA_NAME
1316 || !SSA_NAME_IS_DEFAULT_DEF (parm
)
1317 || TREE_CODE (SSA_NAME_VAR (parm
)) != PARM_DECL
)
1320 *offset
+= mem_offset
.to_short_addr () * BITS_PER_UNIT
;
1326 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1327 statement PHI, try to find out whether NAME is in fact a
1328 multiple-inheritance typecast from a descendant into an ancestor of a formal
1329 parameter and thus can be described by an ancestor jump function and if so,
1330 write the appropriate function into JFUNC.
1332 Essentially we want to match the following pattern:
1340 iftmp.1_3 = &obj_2(D)->D.1762;
1343 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1344 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1348 compute_complex_ancestor_jump_func (struct ipa_func_body_info
*fbi
,
1349 class ipa_node_params
*info
,
1350 struct ipa_jump_func
*jfunc
,
1351 gcall
*call
, gphi
*phi
)
1353 HOST_WIDE_INT offset
;
1354 gimple
*assign
, *cond
;
1355 basic_block phi_bb
, assign_bb
, cond_bb
;
1356 tree tmp
, parm
, expr
, obj
;
1359 if (gimple_phi_num_args (phi
) != 2)
1362 if (integer_zerop (PHI_ARG_DEF (phi
, 1)))
1363 tmp
= PHI_ARG_DEF (phi
, 0);
1364 else if (integer_zerop (PHI_ARG_DEF (phi
, 0)))
1365 tmp
= PHI_ARG_DEF (phi
, 1);
1368 if (TREE_CODE (tmp
) != SSA_NAME
1369 || SSA_NAME_IS_DEFAULT_DEF (tmp
)
1370 || !POINTER_TYPE_P (TREE_TYPE (tmp
))
1371 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp
))) != RECORD_TYPE
)
1374 assign
= SSA_NAME_DEF_STMT (tmp
);
1375 assign_bb
= gimple_bb (assign
);
1376 if (!single_pred_p (assign_bb
))
1378 expr
= get_ancestor_addr_info (assign
, &obj
, &offset
);
1381 parm
= TREE_OPERAND (expr
, 0);
1382 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (parm
));
1386 cond_bb
= single_pred (assign_bb
);
1387 cond
= last_stmt (cond_bb
);
1389 || gimple_code (cond
) != GIMPLE_COND
1390 || gimple_cond_code (cond
) != NE_EXPR
1391 || gimple_cond_lhs (cond
) != parm
1392 || !integer_zerop (gimple_cond_rhs (cond
)))
1395 phi_bb
= gimple_bb (phi
);
1396 for (i
= 0; i
< 2; i
++)
1398 basic_block pred
= EDGE_PRED (phi_bb
, i
)->src
;
1399 if (pred
!= assign_bb
&& pred
!= cond_bb
)
1403 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1404 parm_ref_data_pass_through_p (fbi
, index
, call
, parm
));
1407 /* Inspect the given TYPE and return true iff it has the same structure (the
1408 same number of fields of the same types) as a C++ member pointer. If
1409 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1410 corresponding fields there. */
1413 type_like_member_ptr_p (tree type
, tree
*method_ptr
, tree
*delta
)
1417 if (TREE_CODE (type
) != RECORD_TYPE
)
1420 fld
= TYPE_FIELDS (type
);
1421 if (!fld
|| !POINTER_TYPE_P (TREE_TYPE (fld
))
1422 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld
))) != METHOD_TYPE
1423 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1429 fld
= DECL_CHAIN (fld
);
1430 if (!fld
|| INTEGRAL_TYPE_P (fld
)
1431 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1436 if (DECL_CHAIN (fld
))
1442 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1443 return the rhs of its defining statement. Otherwise return RHS as it
1447 get_ssa_def_if_simple_copy (tree rhs
)
1449 while (TREE_CODE (rhs
) == SSA_NAME
&& !SSA_NAME_IS_DEFAULT_DEF (rhs
))
1451 gimple
*def_stmt
= SSA_NAME_DEF_STMT (rhs
);
1453 if (gimple_assign_single_p (def_stmt
))
1454 rhs
= gimple_assign_rhs1 (def_stmt
);
1461 /* Simple linked list, describing known contents of an aggregate before
1464 struct ipa_known_agg_contents_list
1466 /* Offset and size of the described part of the aggregate. */
1467 HOST_WIDE_INT offset
, size
;
1468 /* Known constant value or NULL if the contents is known to be unknown. */
1470 /* Pointer to the next structure in the list. */
1471 struct ipa_known_agg_contents_list
*next
;
1474 /* Add a known content item into a linked list of ipa_known_agg_contents_list
1475 structure, in which all elements are sorted ascendingly by offset. */
1478 add_to_agg_contents_list (struct ipa_known_agg_contents_list
**plist
,
1479 struct ipa_known_agg_contents_list
*item
)
1481 struct ipa_known_agg_contents_list
*list
= *plist
;
1483 for (; list
; list
= list
->next
)
1485 if (list
->offset
>= item
->offset
)
1488 plist
= &list
->next
;
1495 /* Check whether a given known content is clobbered by certain element in
1496 a linked list of ipa_known_agg_contents_list. */
1499 clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list
*list
,
1500 struct ipa_known_agg_contents_list
*item
)
1502 for (; list
; list
= list
->next
)
1504 if (list
->offset
>= item
->offset
)
1505 return list
->offset
< item
->offset
+ item
->size
;
1507 if (list
->offset
+ list
->size
> item
->offset
)
1514 /* Build aggregate jump function from LIST, assuming there are exactly
1515 CONST_COUNT constant entries there and that offset of the passed argument
1516 is ARG_OFFSET and store it into JFUNC. */
1519 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list
*list
,
1520 int const_count
, HOST_WIDE_INT arg_offset
,
1521 struct ipa_jump_func
*jfunc
)
1523 vec_alloc (jfunc
->agg
.items
, const_count
);
1528 struct ipa_agg_jf_item item
;
1529 item
.offset
= list
->offset
- arg_offset
;
1530 gcc_assert ((item
.offset
% BITS_PER_UNIT
) == 0);
1531 item
.value
= unshare_expr_without_location (list
->constant
);
1532 jfunc
->agg
.items
->quick_push (item
);
1538 /* If STMT is a memory store to the object whose address is BASE, extract
1539 information (offset, size, and value) into CONTENT, and return true,
1540 otherwise we conservatively assume the whole object is modified with
1541 unknown content, and return false. CHECK_REF means that access to object
1542 is expected to be in form of MEM_REF expression. */
1545 extract_mem_content (gimple
*stmt
, tree base
, bool check_ref
,
1546 struct ipa_known_agg_contents_list
*content
)
1548 HOST_WIDE_INT lhs_offset
, lhs_size
;
1549 tree lhs
, rhs
, lhs_base
;
1552 if (!gimple_assign_single_p (stmt
))
1555 lhs
= gimple_assign_lhs (stmt
);
1556 rhs
= gimple_assign_rhs1 (stmt
);
1558 if (!is_gimple_reg_type (TREE_TYPE (rhs
))
1559 || TREE_CODE (lhs
) == BIT_FIELD_REF
1560 || contains_bitfld_component_ref_p (lhs
))
1563 lhs_base
= get_ref_base_and_extent_hwi (lhs
, &lhs_offset
,
1564 &lhs_size
, &reverse
);
1570 if (TREE_CODE (lhs_base
) != MEM_REF
1571 || TREE_OPERAND (lhs_base
, 0) != base
1572 || !integer_zerop (TREE_OPERAND (lhs_base
, 1)))
1575 else if (lhs_base
!= base
)
1578 rhs
= get_ssa_def_if_simple_copy (rhs
);
1580 content
->size
= lhs_size
;
1581 content
->offset
= lhs_offset
;
1582 content
->constant
= is_gimple_ip_invariant (rhs
) ? rhs
: NULL_TREE
;
1583 content
->next
= NULL
;
1588 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1589 in ARG is filled in with constant values. ARG can either be an aggregate
1590 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1591 aggregate. JFUNC is the jump function into which the constants are
1592 subsequently stored. AA_WALK_BUDGET_P points to limit on number of
1593 statements we allow get_continuation_for_phi to examine. */
1596 determine_known_aggregate_parts (gcall
*call
, tree arg
,
1598 struct ipa_jump_func
*jfunc
,
1599 unsigned *aa_walk_budget_p
)
1601 struct ipa_known_agg_contents_list
*list
= NULL
, *all_list
= NULL
;
1602 bitmap visited
= NULL
;
1603 int item_count
= 0, const_count
= 0;
1604 int ipa_max_agg_items
= PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
);
1605 HOST_WIDE_INT arg_offset
, arg_size
;
1607 bool check_ref
, by_ref
;
1610 if (ipa_max_agg_items
== 0)
1613 /* The function operates in three stages. First, we prepare check_ref, r,
1614 arg_base and arg_offset based on what is actually passed as an actual
1617 if (POINTER_TYPE_P (arg_type
))
1620 if (TREE_CODE (arg
) == SSA_NAME
)
1623 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type
)))
1624 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
1629 type_size
= TYPE_SIZE (TREE_TYPE (arg_type
));
1630 arg_size
= tree_to_uhwi (type_size
);
1631 ao_ref_init_from_ptr_and_size (&r
, arg_base
, NULL_TREE
);
1633 else if (TREE_CODE (arg
) == ADDR_EXPR
)
1637 arg
= TREE_OPERAND (arg
, 0);
1638 arg_base
= get_ref_base_and_extent_hwi (arg
, &arg_offset
,
1639 &arg_size
, &reverse
);
1642 if (DECL_P (arg_base
))
1645 ao_ref_init (&r
, arg_base
);
1657 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg
)));
1661 arg_base
= get_ref_base_and_extent_hwi (arg
, &arg_offset
,
1662 &arg_size
, &reverse
);
1666 ao_ref_init (&r
, arg
);
1669 /* Second stage traverses virtual SSA web backwards starting from the call
1670 statement, only looks at individual dominating virtual operand (its
1671 definition dominates the call), as long as it is confident that content
1672 of the aggregate is affected by definition of the virtual operand, it
1673 builds a sorted linked list of ipa_agg_jf_list describing that. */
1675 for (tree dom_vuse
= gimple_vuse (call
); dom_vuse
;)
1677 gimple
*stmt
= SSA_NAME_DEF_STMT (dom_vuse
);
1679 if (gimple_code (stmt
) == GIMPLE_PHI
)
1681 dom_vuse
= get_continuation_for_phi (stmt
, &r
, true,
1683 &visited
, false, NULL
, NULL
);
1687 if (stmt_may_clobber_ref_p_1 (stmt
, &r
))
1689 struct ipa_known_agg_contents_list
*content
1690 = XALLOCA (struct ipa_known_agg_contents_list
);
1692 if (!extract_mem_content (stmt
, arg_base
, check_ref
, content
))
1695 /* Now we get a dominating virtual operand, and need to check
1696 whether its value is clobbered any other dominating one. */
1697 if (content
->constant
1698 && !clobber_by_agg_contents_list_p (all_list
, content
))
1700 struct ipa_known_agg_contents_list
*copy
1701 = XALLOCA (struct ipa_known_agg_contents_list
);
1703 /* Add to the list consisting of only dominating virtual
1704 operands, whose definitions can finally reach the call. */
1705 add_to_agg_contents_list (&list
, (*copy
= *content
, copy
));
1707 if (++const_count
== ipa_max_agg_items
)
1711 /* Add to the list consisting of all dominating virtual operands. */
1712 add_to_agg_contents_list (&all_list
, content
);
1714 if (++item_count
== 2 * ipa_max_agg_items
)
1717 dom_vuse
= gimple_vuse (stmt
);
1721 BITMAP_FREE (visited
);
1723 /* Third stage just goes over the list and creates an appropriate vector of
1724 ipa_agg_jf_item structures out of it, of course only if there are
1725 any known constants to begin with. */
1729 jfunc
->agg
.by_ref
= by_ref
;
1730 build_agg_jump_func_from_list (list
, const_count
, arg_offset
, jfunc
);
1735 /* Return the Ith param type of callee associated with call graph
1739 ipa_get_callee_param_type (struct cgraph_edge
*e
, int i
)
1742 tree type
= (e
->callee
1743 ? TREE_TYPE (e
->callee
->decl
)
1744 : gimple_call_fntype (e
->call_stmt
));
1745 tree t
= TYPE_ARG_TYPES (type
);
1747 for (n
= 0; n
< i
; n
++)
1754 return TREE_VALUE (t
);
1757 t
= DECL_ARGUMENTS (e
->callee
->decl
);
1758 for (n
= 0; n
< i
; n
++)
1765 return TREE_TYPE (t
);
1769 /* Return ipa_bits with VALUE and MASK values, which can be either a newly
1770 allocated structure or a previously existing one shared with other jump
1771 functions and/or transformation summaries. */
1774 ipa_get_ipa_bits_for_value (const widest_int
&value
, const widest_int
&mask
)
1780 ipa_bits
**slot
= ipa_bits_hash_table
->find_slot (&tmp
, INSERT
);
1784 ipa_bits
*res
= ggc_alloc
<ipa_bits
> ();
1792 /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
1793 table in order to avoid creating multiple same ipa_bits structures. */
1796 ipa_set_jfunc_bits (ipa_jump_func
*jf
, const widest_int
&value
,
1797 const widest_int
&mask
)
1799 jf
->bits
= ipa_get_ipa_bits_for_value (value
, mask
);
1802 /* Return a pointer to a value_range just like *TMP, but either find it in
1803 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
1805 static value_range_base
*
1806 ipa_get_value_range (value_range_base
*tmp
)
1808 value_range_base
**slot
= ipa_vr_hash_table
->find_slot (tmp
, INSERT
);
1812 value_range_base
*vr
= ggc_alloc
<value_range_base
> ();
1819 /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
1820 equiv set. Use hash table in order to avoid creating multiple same copies of
1823 static value_range_base
*
1824 ipa_get_value_range (enum value_range_kind type
, tree min
, tree max
)
1826 value_range_base
tmp (type
, min
, max
);
1827 return ipa_get_value_range (&tmp
);
1830 /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
1831 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
1832 same value_range structures. */
1835 ipa_set_jfunc_vr (ipa_jump_func
*jf
, enum value_range_kind type
,
1838 jf
->m_vr
= ipa_get_value_range (type
, min
, max
);
1841 /* Assign to JF a pointer to a value_range just like TMP but either fetch a
1842 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
1845 ipa_set_jfunc_vr (ipa_jump_func
*jf
, value_range_base
*tmp
)
1847 jf
->m_vr
= ipa_get_value_range (tmp
);
1850 /* Compute jump function for all arguments of callsite CS and insert the
1851 information in the jump_functions array in the ipa_edge_args corresponding
1852 to this callsite. */
1855 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info
*fbi
,
1856 struct cgraph_edge
*cs
)
1858 class ipa_node_params
*info
= IPA_NODE_REF (cs
->caller
);
1859 class ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
1860 gcall
*call
= cs
->call_stmt
;
1861 int n
, arg_num
= gimple_call_num_args (call
);
1862 bool useful_context
= false;
1864 if (arg_num
== 0 || args
->jump_functions
)
1866 vec_safe_grow_cleared (args
->jump_functions
, arg_num
);
1867 if (flag_devirtualize
)
1868 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, arg_num
);
1870 if (gimple_call_internal_p (call
))
1872 if (ipa_func_spec_opts_forbid_analysis_p (cs
->caller
))
1875 for (n
= 0; n
< arg_num
; n
++)
1877 struct ipa_jump_func
*jfunc
= ipa_get_ith_jump_func (args
, n
);
1878 tree arg
= gimple_call_arg (call
, n
);
1879 tree param_type
= ipa_get_callee_param_type (cs
, n
);
1880 if (flag_devirtualize
&& POINTER_TYPE_P (TREE_TYPE (arg
)))
1883 class ipa_polymorphic_call_context
context (cs
->caller
->decl
,
1886 context
.get_dynamic_type (instance
, arg
, NULL
, cs
->call_stmt
,
1887 &fbi
->aa_walk_budget
);
1888 *ipa_get_ith_polymorhic_call_context (args
, n
) = context
;
1889 if (!context
.useless_p ())
1890 useful_context
= true;
1893 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
1895 bool addr_nonzero
= false;
1896 bool strict_overflow
= false;
1898 if (TREE_CODE (arg
) == SSA_NAME
1900 && get_ptr_nonnull (arg
))
1901 addr_nonzero
= true;
1902 else if (tree_single_nonzero_warnv_p (arg
, &strict_overflow
))
1903 addr_nonzero
= true;
1907 tree z
= build_int_cst (TREE_TYPE (arg
), 0);
1908 ipa_set_jfunc_vr (jfunc
, VR_ANTI_RANGE
, z
, z
);
1911 gcc_assert (!jfunc
->m_vr
);
1916 value_range_kind type
;
1917 if (TREE_CODE (arg
) == SSA_NAME
1919 && (type
= get_range_info (arg
, &min
, &max
))
1920 && (type
== VR_RANGE
|| type
== VR_ANTI_RANGE
))
1922 value_range_base resvr
;
1923 value_range_base
tmpvr (type
,
1924 wide_int_to_tree (TREE_TYPE (arg
), min
),
1925 wide_int_to_tree (TREE_TYPE (arg
), max
));
1926 extract_range_from_unary_expr (&resvr
, NOP_EXPR
, param_type
,
1927 &tmpvr
, TREE_TYPE (arg
));
1928 if (!resvr
.undefined_p () && !resvr
.varying_p ())
1929 ipa_set_jfunc_vr (jfunc
, &resvr
);
1931 gcc_assert (!jfunc
->m_vr
);
1934 gcc_assert (!jfunc
->m_vr
);
1937 if (INTEGRAL_TYPE_P (TREE_TYPE (arg
))
1938 && (TREE_CODE (arg
) == SSA_NAME
|| TREE_CODE (arg
) == INTEGER_CST
))
1940 if (TREE_CODE (arg
) == SSA_NAME
)
1941 ipa_set_jfunc_bits (jfunc
, 0,
1942 widest_int::from (get_nonzero_bits (arg
),
1943 TYPE_SIGN (TREE_TYPE (arg
))));
1945 ipa_set_jfunc_bits (jfunc
, wi::to_widest (arg
), 0);
1947 else if (POINTER_TYPE_P (TREE_TYPE (arg
)))
1949 unsigned HOST_WIDE_INT bitpos
;
1952 get_pointer_alignment_1 (arg
, &align
, &bitpos
);
1953 widest_int mask
= wi::bit_and_not
1954 (wi::mask
<widest_int
> (TYPE_PRECISION (TREE_TYPE (arg
)), false),
1955 align
/ BITS_PER_UNIT
- 1);
1956 widest_int value
= bitpos
/ BITS_PER_UNIT
;
1957 ipa_set_jfunc_bits (jfunc
, value
, mask
);
1960 gcc_assert (!jfunc
->bits
);
1962 if (is_gimple_ip_invariant (arg
)
1964 && is_global_var (arg
)
1965 && TREE_READONLY (arg
)))
1966 ipa_set_jf_constant (jfunc
, arg
, cs
);
1967 else if (!is_gimple_reg_type (TREE_TYPE (arg
))
1968 && TREE_CODE (arg
) == PARM_DECL
)
1970 int index
= ipa_get_param_decl_index (info
, arg
);
1972 gcc_assert (index
>=0);
1973 /* Aggregate passed by value, check for pass-through, otherwise we
1974 will attempt to fill in aggregate contents later in this
1976 if (parm_preserved_before_stmt_p (fbi
, index
, call
, arg
))
1978 ipa_set_jf_simple_pass_through (jfunc
, index
, false);
1982 else if (TREE_CODE (arg
) == SSA_NAME
)
1984 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
1986 int index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (arg
));
1990 agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
, arg
);
1991 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1996 gimple
*stmt
= SSA_NAME_DEF_STMT (arg
);
1997 if (is_gimple_assign (stmt
))
1998 compute_complex_assign_jump_func (fbi
, info
, jfunc
,
1999 call
, stmt
, arg
, param_type
);
2000 else if (gimple_code (stmt
) == GIMPLE_PHI
)
2001 compute_complex_ancestor_jump_func (fbi
, info
, jfunc
,
2003 as_a
<gphi
*> (stmt
));
2007 /* If ARG is pointer, we cannot use its type to determine the type of aggregate
2008 passed (because type conversions are ignored in gimple). Usually we can
2009 safely get type from function declaration, but in case of K&R prototypes or
2010 variadic functions we can try our luck with type of the pointer passed.
2011 TODO: Since we look for actual initialization of the memory object, we may better
2012 work out the type based on the memory stores we find. */
2014 param_type
= TREE_TYPE (arg
);
2016 if ((jfunc
->type
!= IPA_JF_PASS_THROUGH
2017 || !ipa_get_jf_pass_through_agg_preserved (jfunc
))
2018 && (jfunc
->type
!= IPA_JF_ANCESTOR
2019 || !ipa_get_jf_ancestor_agg_preserved (jfunc
))
2020 && (AGGREGATE_TYPE_P (TREE_TYPE (arg
))
2021 || POINTER_TYPE_P (param_type
)))
2022 determine_known_aggregate_parts (call
, arg
, param_type
, jfunc
,
2023 &fbi
->aa_walk_budget
);
2025 if (!useful_context
)
2026 vec_free (args
->polymorphic_call_contexts
);
2029 /* Compute jump functions for all edges - both direct and indirect - outgoing
2033 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info
*fbi
, basic_block bb
)
2035 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
2037 struct cgraph_edge
*cs
;
2039 FOR_EACH_VEC_ELT_REVERSE (bi
->cg_edges
, i
, cs
)
2041 struct cgraph_node
*callee
= cs
->callee
;
2045 callee
->ultimate_alias_target ();
2046 /* We do not need to bother analyzing calls to unknown functions
2047 unless they may become known during lto/whopr. */
2048 if (!callee
->definition
&& !flag_lto
)
2051 ipa_compute_jump_functions_for_edge (fbi
, cs
);
2055 /* If STMT looks like a statement loading a value from a member pointer formal
2056 parameter, return that parameter and store the offset of the field to
2057 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2058 might be clobbered). If USE_DELTA, then we look for a use of the delta
2059 field rather than the pfn. */
2062 ipa_get_stmt_member_ptr_load_param (gimple
*stmt
, bool use_delta
,
2063 HOST_WIDE_INT
*offset_p
)
2065 tree rhs
, rec
, ref_field
, ref_offset
, fld
, ptr_field
, delta_field
;
2067 if (!gimple_assign_single_p (stmt
))
2070 rhs
= gimple_assign_rhs1 (stmt
);
2071 if (TREE_CODE (rhs
) == COMPONENT_REF
)
2073 ref_field
= TREE_OPERAND (rhs
, 1);
2074 rhs
= TREE_OPERAND (rhs
, 0);
2077 ref_field
= NULL_TREE
;
2078 if (TREE_CODE (rhs
) != MEM_REF
)
2080 rec
= TREE_OPERAND (rhs
, 0);
2081 if (TREE_CODE (rec
) != ADDR_EXPR
)
2083 rec
= TREE_OPERAND (rec
, 0);
2084 if (TREE_CODE (rec
) != PARM_DECL
2085 || !type_like_member_ptr_p (TREE_TYPE (rec
), &ptr_field
, &delta_field
))
2087 ref_offset
= TREE_OPERAND (rhs
, 1);
2094 *offset_p
= int_bit_position (fld
);
2098 if (integer_nonzerop (ref_offset
))
2100 return ref_field
== fld
? rec
: NULL_TREE
;
2103 return tree_int_cst_equal (byte_position (fld
), ref_offset
) ? rec
2107 /* Returns true iff T is an SSA_NAME defined by a statement. */
2110 ipa_is_ssa_with_stmt_def (tree t
)
2112 if (TREE_CODE (t
) == SSA_NAME
2113 && !SSA_NAME_IS_DEFAULT_DEF (t
))
2119 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2120 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2121 indirect call graph edge. */
2123 static struct cgraph_edge
*
2124 ipa_note_param_call (struct cgraph_node
*node
, int param_index
,
2127 struct cgraph_edge
*cs
;
2129 cs
= node
->get_edge (stmt
);
2130 cs
->indirect_info
->param_index
= param_index
;
2131 cs
->indirect_info
->agg_contents
= 0;
2132 cs
->indirect_info
->member_ptr
= 0;
2133 cs
->indirect_info
->guaranteed_unmodified
= 0;
2137 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2138 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2139 intermediate information about each formal parameter. Currently it checks
2140 whether the call calls a pointer that is a formal parameter and if so, the
2141 parameter is marked with the called flag and an indirect call graph edge
2142 describing the call is created. This is very simple for ordinary pointers
2143 represented in SSA but not-so-nice when it comes to member pointers. The
2144 ugly part of this function does nothing more than trying to match the
2145 pattern of such a call. An example of such a pattern is the gimple dump
2146 below, the call is on the last line:
2149 f$__delta_5 = f.__delta;
2150 f$__pfn_24 = f.__pfn;
2154 f$__delta_5 = MEM[(struct *)&f];
2155 f$__pfn_24 = MEM[(struct *)&f + 4B];
2157 and a few lines below:
2160 D.2496_3 = (int) f$__pfn_24;
2161 D.2497_4 = D.2496_3 & 1;
2168 D.2500_7 = (unsigned int) f$__delta_5;
2169 D.2501_8 = &S + D.2500_7;
2170 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2171 D.2503_10 = *D.2502_9;
2172 D.2504_12 = f$__pfn_24 + -1;
2173 D.2505_13 = (unsigned int) D.2504_12;
2174 D.2506_14 = D.2503_10 + D.2505_13;
2175 D.2507_15 = *D.2506_14;
2176 iftmp.11_16 = (String:: *) D.2507_15;
2179 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2180 D.2500_19 = (unsigned int) f$__delta_5;
2181 D.2508_20 = &S + D.2500_19;
2182 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2184 Such patterns are results of simple calls to a member pointer:
2186 int doprinting (int (MyString::* f)(int) const)
2188 MyString S ("somestring");
2193 Moreover, the function also looks for called pointers loaded from aggregates
2194 passed by value or reference. */
2197 ipa_analyze_indirect_call_uses (struct ipa_func_body_info
*fbi
, gcall
*call
,
2200 class ipa_node_params
*info
= fbi
->info
;
2201 HOST_WIDE_INT offset
;
2204 if (SSA_NAME_IS_DEFAULT_DEF (target
))
2206 tree var
= SSA_NAME_VAR (target
);
2207 int index
= ipa_get_param_decl_index (info
, var
);
2209 ipa_note_param_call (fbi
->node
, index
, call
);
2214 gimple
*def
= SSA_NAME_DEF_STMT (target
);
2215 bool guaranteed_unmodified
;
2216 if (gimple_assign_single_p (def
)
2217 && ipa_load_from_parm_agg (fbi
, info
->descriptors
, def
,
2218 gimple_assign_rhs1 (def
), &index
, &offset
,
2219 NULL
, &by_ref
, &guaranteed_unmodified
))
2221 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2222 cs
->indirect_info
->offset
= offset
;
2223 cs
->indirect_info
->agg_contents
= 1;
2224 cs
->indirect_info
->by_ref
= by_ref
;
2225 cs
->indirect_info
->guaranteed_unmodified
= guaranteed_unmodified
;
2229 /* Now we need to try to match the complex pattern of calling a member
2231 if (gimple_code (def
) != GIMPLE_PHI
2232 || gimple_phi_num_args (def
) != 2
2233 || !POINTER_TYPE_P (TREE_TYPE (target
))
2234 || TREE_CODE (TREE_TYPE (TREE_TYPE (target
))) != METHOD_TYPE
)
2237 /* First, we need to check whether one of these is a load from a member
2238 pointer that is a parameter to this function. */
2239 tree n1
= PHI_ARG_DEF (def
, 0);
2240 tree n2
= PHI_ARG_DEF (def
, 1);
2241 if (!ipa_is_ssa_with_stmt_def (n1
) || !ipa_is_ssa_with_stmt_def (n2
))
2243 gimple
*d1
= SSA_NAME_DEF_STMT (n1
);
2244 gimple
*d2
= SSA_NAME_DEF_STMT (n2
);
2247 basic_block bb
, virt_bb
;
2248 basic_block join
= gimple_bb (def
);
2249 if ((rec
= ipa_get_stmt_member_ptr_load_param (d1
, false, &offset
)))
2251 if (ipa_get_stmt_member_ptr_load_param (d2
, false, NULL
))
2254 bb
= EDGE_PRED (join
, 0)->src
;
2255 virt_bb
= gimple_bb (d2
);
2257 else if ((rec
= ipa_get_stmt_member_ptr_load_param (d2
, false, &offset
)))
2259 bb
= EDGE_PRED (join
, 1)->src
;
2260 virt_bb
= gimple_bb (d1
);
2265 /* Second, we need to check that the basic blocks are laid out in the way
2266 corresponding to the pattern. */
2268 if (!single_pred_p (virt_bb
) || !single_succ_p (virt_bb
)
2269 || single_pred (virt_bb
) != bb
2270 || single_succ (virt_bb
) != join
)
2273 /* Third, let's see that the branching is done depending on the least
2274 significant bit of the pfn. */
2276 gimple
*branch
= last_stmt (bb
);
2277 if (!branch
|| gimple_code (branch
) != GIMPLE_COND
)
2280 if ((gimple_cond_code (branch
) != NE_EXPR
2281 && gimple_cond_code (branch
) != EQ_EXPR
)
2282 || !integer_zerop (gimple_cond_rhs (branch
)))
2285 tree cond
= gimple_cond_lhs (branch
);
2286 if (!ipa_is_ssa_with_stmt_def (cond
))
2289 def
= SSA_NAME_DEF_STMT (cond
);
2290 if (!is_gimple_assign (def
)
2291 || gimple_assign_rhs_code (def
) != BIT_AND_EXPR
2292 || !integer_onep (gimple_assign_rhs2 (def
)))
2295 cond
= gimple_assign_rhs1 (def
);
2296 if (!ipa_is_ssa_with_stmt_def (cond
))
2299 def
= SSA_NAME_DEF_STMT (cond
);
2301 if (is_gimple_assign (def
)
2302 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
2304 cond
= gimple_assign_rhs1 (def
);
2305 if (!ipa_is_ssa_with_stmt_def (cond
))
2307 def
= SSA_NAME_DEF_STMT (cond
);
2311 rec2
= ipa_get_stmt_member_ptr_load_param (def
,
2312 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2313 == ptrmemfunc_vbit_in_delta
),
2318 index
= ipa_get_param_decl_index (info
, rec
);
2320 && parm_preserved_before_stmt_p (fbi
, index
, call
, rec
))
2322 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2323 cs
->indirect_info
->offset
= offset
;
2324 cs
->indirect_info
->agg_contents
= 1;
2325 cs
->indirect_info
->member_ptr
= 1;
2326 cs
->indirect_info
->guaranteed_unmodified
= 1;
2332 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2333 object referenced in the expression is a formal parameter of the caller
2334 FBI->node (described by FBI->info), create a call note for the
2338 ipa_analyze_virtual_call_uses (struct ipa_func_body_info
*fbi
,
2339 gcall
*call
, tree target
)
2341 tree obj
= OBJ_TYPE_REF_OBJECT (target
);
2343 HOST_WIDE_INT anc_offset
;
2345 if (!flag_devirtualize
)
2348 if (TREE_CODE (obj
) != SSA_NAME
)
2351 class ipa_node_params
*info
= fbi
->info
;
2352 if (SSA_NAME_IS_DEFAULT_DEF (obj
))
2354 struct ipa_jump_func jfunc
;
2355 if (TREE_CODE (SSA_NAME_VAR (obj
)) != PARM_DECL
)
2359 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (obj
));
2360 gcc_assert (index
>= 0);
2361 if (detect_type_change_ssa (fbi
, obj
, obj_type_ref_class (target
),
2367 struct ipa_jump_func jfunc
;
2368 gimple
*stmt
= SSA_NAME_DEF_STMT (obj
);
2371 expr
= get_ancestor_addr_info (stmt
, &obj
, &anc_offset
);
2374 index
= ipa_get_param_decl_index (info
,
2375 SSA_NAME_VAR (TREE_OPERAND (expr
, 0)));
2376 gcc_assert (index
>= 0);
2377 if (detect_type_change (fbi
, obj
, expr
, obj_type_ref_class (target
),
2378 call
, &jfunc
, anc_offset
))
2382 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2383 class cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
2384 ii
->offset
= anc_offset
;
2385 ii
->otr_token
= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target
));
2386 ii
->otr_type
= obj_type_ref_class (target
);
2387 ii
->polymorphic
= 1;
2390 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2391 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2392 containing intermediate information about each formal parameter. */
2395 ipa_analyze_call_uses (struct ipa_func_body_info
*fbi
, gcall
*call
)
2397 tree target
= gimple_call_fn (call
);
2400 || (TREE_CODE (target
) != SSA_NAME
2401 && !virtual_method_call_p (target
)))
2404 struct cgraph_edge
*cs
= fbi
->node
->get_edge (call
);
2405 /* If we previously turned the call into a direct call, there is
2406 no need to analyze. */
2407 if (cs
&& !cs
->indirect_unknown_callee
)
2410 if (cs
->indirect_info
->polymorphic
&& flag_devirtualize
)
2413 tree target
= gimple_call_fn (call
);
2414 ipa_polymorphic_call_context
context (current_function_decl
,
2415 target
, call
, &instance
);
2417 gcc_checking_assert (cs
->indirect_info
->otr_type
2418 == obj_type_ref_class (target
));
2419 gcc_checking_assert (cs
->indirect_info
->otr_token
2420 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target
)));
2422 cs
->indirect_info
->vptr_changed
2423 = !context
.get_dynamic_type (instance
,
2424 OBJ_TYPE_REF_OBJECT (target
),
2425 obj_type_ref_class (target
), call
,
2426 &fbi
->aa_walk_budget
);
2427 cs
->indirect_info
->context
= context
;
2430 if (TREE_CODE (target
) == SSA_NAME
)
2431 ipa_analyze_indirect_call_uses (fbi
, call
, target
);
2432 else if (virtual_method_call_p (target
))
2433 ipa_analyze_virtual_call_uses (fbi
, call
, target
);
2437 /* Analyze the call statement STMT with respect to formal parameters (described
2438 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2439 formal parameters are called. */
2442 ipa_analyze_stmt_uses (struct ipa_func_body_info
*fbi
, gimple
*stmt
)
2444 if (is_gimple_call (stmt
))
2445 ipa_analyze_call_uses (fbi
, as_a
<gcall
*> (stmt
));
2448 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2449 If OP is a parameter declaration, mark it as used in the info structure
2453 visit_ref_for_mod_analysis (gimple
*, tree op
, tree
, void *data
)
2455 class ipa_node_params
*info
= (class ipa_node_params
*) data
;
2457 op
= get_base_address (op
);
2459 && TREE_CODE (op
) == PARM_DECL
)
2461 int index
= ipa_get_param_decl_index (info
, op
);
2462 gcc_assert (index
>= 0);
2463 ipa_set_param_used (info
, index
, true);
2469 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2470 the findings in various structures of the associated ipa_node_params
2471 structure, such as parameter flags, notes etc. FBI holds various data about
2472 the function being analyzed. */
2475 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info
*fbi
, basic_block bb
)
2477 gimple_stmt_iterator gsi
;
2478 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2480 gimple
*stmt
= gsi_stmt (gsi
);
2482 if (is_gimple_debug (stmt
))
2485 ipa_analyze_stmt_uses (fbi
, stmt
);
2486 walk_stmt_load_store_addr_ops (stmt
, fbi
->info
,
2487 visit_ref_for_mod_analysis
,
2488 visit_ref_for_mod_analysis
,
2489 visit_ref_for_mod_analysis
);
2491 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2492 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), fbi
->info
,
2493 visit_ref_for_mod_analysis
,
2494 visit_ref_for_mod_analysis
,
2495 visit_ref_for_mod_analysis
);
2498 /* Calculate controlled uses of parameters of NODE. */
2501 ipa_analyze_controlled_uses (struct cgraph_node
*node
)
2503 class ipa_node_params
*info
= IPA_NODE_REF (node
);
2505 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2507 tree parm
= ipa_get_param (info
, i
);
2508 int controlled_uses
= 0;
2510 /* For SSA regs see if parameter is used. For non-SSA we compute
2511 the flag during modification analysis. */
2512 if (is_gimple_reg (parm
))
2514 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
),
2516 if (ddef
&& !has_zero_uses (ddef
))
2518 imm_use_iterator imm_iter
;
2519 use_operand_p use_p
;
2521 ipa_set_param_used (info
, i
, true);
2522 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, ddef
)
2523 if (!is_gimple_call (USE_STMT (use_p
)))
2525 if (!is_gimple_debug (USE_STMT (use_p
)))
2527 controlled_uses
= IPA_UNDESCRIBED_USE
;
2535 controlled_uses
= 0;
2538 controlled_uses
= IPA_UNDESCRIBED_USE
;
2539 ipa_set_controlled_uses (info
, i
, controlled_uses
);
2543 /* Free stuff in BI. */
2546 free_ipa_bb_info (struct ipa_bb_info
*bi
)
2548 bi
->cg_edges
.release ();
2549 bi
->param_aa_statuses
.release ();
2552 /* Dominator walker driving the analysis. */
2554 class analysis_dom_walker
: public dom_walker
2557 analysis_dom_walker (struct ipa_func_body_info
*fbi
)
2558 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
) {}
2560 virtual edge
before_dom_children (basic_block
);
2563 struct ipa_func_body_info
*m_fbi
;
2567 analysis_dom_walker::before_dom_children (basic_block bb
)
2569 ipa_analyze_params_uses_in_bb (m_fbi
, bb
);
2570 ipa_compute_jump_functions_for_bb (m_fbi
, bb
);
2574 /* Release body info FBI. */
2577 ipa_release_body_info (struct ipa_func_body_info
*fbi
)
2580 struct ipa_bb_info
*bi
;
2582 FOR_EACH_VEC_ELT (fbi
->bb_infos
, i
, bi
)
2583 free_ipa_bb_info (bi
);
2584 fbi
->bb_infos
.release ();
2587 /* Initialize the array describing properties of formal parameters
2588 of NODE, analyze their uses and compute jump functions associated
2589 with actual arguments of calls from within NODE. */
2592 ipa_analyze_node (struct cgraph_node
*node
)
2594 struct ipa_func_body_info fbi
;
2595 class ipa_node_params
*info
;
2597 ipa_check_create_node_params ();
2598 ipa_check_create_edge_args ();
2599 info
= IPA_NODE_REF (node
);
2601 if (info
->analysis_done
)
2603 info
->analysis_done
= 1;
2605 if (ipa_func_spec_opts_forbid_analysis_p (node
))
2607 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2609 ipa_set_param_used (info
, i
, true);
2610 ipa_set_controlled_uses (info
, i
, IPA_UNDESCRIBED_USE
);
2615 struct function
*func
= DECL_STRUCT_FUNCTION (node
->decl
);
2617 calculate_dominance_info (CDI_DOMINATORS
);
2618 ipa_initialize_node_params (node
);
2619 ipa_analyze_controlled_uses (node
);
2622 fbi
.info
= IPA_NODE_REF (node
);
2623 fbi
.bb_infos
= vNULL
;
2624 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
));
2625 fbi
.param_count
= ipa_get_param_count (info
);
2626 fbi
.aa_walk_budget
= PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS
);
2628 for (struct cgraph_edge
*cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
2630 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
2631 bi
->cg_edges
.safe_push (cs
);
2634 for (struct cgraph_edge
*cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
2636 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
2637 bi
->cg_edges
.safe_push (cs
);
2640 analysis_dom_walker (&fbi
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
2642 ipa_release_body_info (&fbi
);
2643 free_dominance_info (CDI_DOMINATORS
);
2647 /* Update the jump functions associated with call graph edge E when the call
2648 graph edge CS is being inlined, assuming that E->caller is already (possibly
2649 indirectly) inlined into CS->callee and that E has not been inlined. */
2652 update_jump_functions_after_inlining (struct cgraph_edge
*cs
,
2653 struct cgraph_edge
*e
)
2655 class ipa_edge_args
*top
= IPA_EDGE_REF (cs
);
2656 class ipa_edge_args
*args
= IPA_EDGE_REF (e
);
2657 int count
= ipa_get_cs_argument_count (args
);
2660 for (i
= 0; i
< count
; i
++)
2662 struct ipa_jump_func
*dst
= ipa_get_ith_jump_func (args
, i
);
2663 class ipa_polymorphic_call_context
*dst_ctx
2664 = ipa_get_ith_polymorhic_call_context (args
, i
);
2666 if (dst
->type
== IPA_JF_ANCESTOR
)
2668 struct ipa_jump_func
*src
;
2669 int dst_fid
= dst
->value
.ancestor
.formal_id
;
2670 class ipa_polymorphic_call_context
*src_ctx
2671 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
2673 /* Variable number of arguments can cause havoc if we try to access
2674 one that does not exist in the inlined edge. So make sure we
2676 if (dst_fid
>= ipa_get_cs_argument_count (top
))
2678 ipa_set_jf_unknown (dst
);
2682 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2684 if (src_ctx
&& !src_ctx
->useless_p ())
2686 class ipa_polymorphic_call_context ctx
= *src_ctx
;
2688 /* TODO: Make type preserved safe WRT contexts. */
2689 if (!ipa_get_jf_ancestor_type_preserved (dst
))
2690 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
2691 ctx
.offset_by (dst
->value
.ancestor
.offset
);
2692 if (!ctx
.useless_p ())
2696 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
2698 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
2701 dst_ctx
->combine_with (ctx
);
2706 && (dst
->value
.ancestor
.agg_preserved
|| !src
->agg
.by_ref
))
2708 struct ipa_agg_jf_item
*item
;
2711 /* Currently we do not produce clobber aggregate jump functions,
2712 replace with merging when we do. */
2713 gcc_assert (!dst
->agg
.items
);
2715 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2716 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2717 FOR_EACH_VEC_SAFE_ELT (dst
->agg
.items
, j
, item
)
2718 item
->offset
-= dst
->value
.ancestor
.offset
;
2721 if (src
->type
== IPA_JF_PASS_THROUGH
2722 && src
->value
.pass_through
.operation
== NOP_EXPR
)
2724 dst
->value
.ancestor
.formal_id
= src
->value
.pass_through
.formal_id
;
2725 dst
->value
.ancestor
.agg_preserved
&=
2726 src
->value
.pass_through
.agg_preserved
;
2728 else if (src
->type
== IPA_JF_PASS_THROUGH
2729 && TREE_CODE_CLASS (src
->value
.pass_through
.operation
) == tcc_unary
)
2731 dst
->value
.ancestor
.formal_id
= src
->value
.pass_through
.formal_id
;
2732 dst
->value
.ancestor
.agg_preserved
= false;
2734 else if (src
->type
== IPA_JF_ANCESTOR
)
2736 dst
->value
.ancestor
.formal_id
= src
->value
.ancestor
.formal_id
;
2737 dst
->value
.ancestor
.offset
+= src
->value
.ancestor
.offset
;
2738 dst
->value
.ancestor
.agg_preserved
&=
2739 src
->value
.ancestor
.agg_preserved
;
2742 ipa_set_jf_unknown (dst
);
2744 else if (dst
->type
== IPA_JF_PASS_THROUGH
)
2746 struct ipa_jump_func
*src
;
2747 /* We must check range due to calls with variable number of arguments
2748 and we cannot combine jump functions with operations. */
2749 if (dst
->value
.pass_through
.operation
== NOP_EXPR
2750 && (dst
->value
.pass_through
.formal_id
2751 < ipa_get_cs_argument_count (top
)))
2753 int dst_fid
= dst
->value
.pass_through
.formal_id
;
2754 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2755 bool dst_agg_p
= ipa_get_jf_pass_through_agg_preserved (dst
);
2756 class ipa_polymorphic_call_context
*src_ctx
2757 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
2759 if (src_ctx
&& !src_ctx
->useless_p ())
2761 class ipa_polymorphic_call_context ctx
= *src_ctx
;
2763 /* TODO: Make type preserved safe WRT contexts. */
2764 if (!ipa_get_jf_pass_through_type_preserved (dst
))
2765 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
2766 if (!ctx
.useless_p ())
2770 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
2772 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
2774 dst_ctx
->combine_with (ctx
);
2779 case IPA_JF_UNKNOWN
:
2780 ipa_set_jf_unknown (dst
);
2783 ipa_set_jf_cst_copy (dst
, src
);
2786 case IPA_JF_PASS_THROUGH
:
2788 int formal_id
= ipa_get_jf_pass_through_formal_id (src
);
2789 enum tree_code operation
;
2790 operation
= ipa_get_jf_pass_through_operation (src
);
2792 if (operation
== NOP_EXPR
)
2796 && ipa_get_jf_pass_through_agg_preserved (src
);
2797 ipa_set_jf_simple_pass_through (dst
, formal_id
, agg_p
);
2799 else if (TREE_CODE_CLASS (operation
) == tcc_unary
)
2800 ipa_set_jf_unary_pass_through (dst
, formal_id
, operation
);
2803 tree operand
= ipa_get_jf_pass_through_operand (src
);
2804 ipa_set_jf_arith_pass_through (dst
, formal_id
, operand
,
2809 case IPA_JF_ANCESTOR
:
2813 && ipa_get_jf_ancestor_agg_preserved (src
);
2814 ipa_set_ancestor_jf (dst
,
2815 ipa_get_jf_ancestor_offset (src
),
2816 ipa_get_jf_ancestor_formal_id (src
),
2825 && (dst_agg_p
|| !src
->agg
.by_ref
))
2827 /* Currently we do not produce clobber aggregate jump
2828 functions, replace with merging when we do. */
2829 gcc_assert (!dst
->agg
.items
);
2831 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2832 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2836 ipa_set_jf_unknown (dst
);
2841 /* If TARGET is an addr_expr of a function declaration, make it the
2842 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2843 Otherwise, return NULL. */
2845 struct cgraph_edge
*
2846 ipa_make_edge_direct_to_target (struct cgraph_edge
*ie
, tree target
,
2849 struct cgraph_node
*callee
;
2850 bool unreachable
= false;
2852 if (TREE_CODE (target
) == ADDR_EXPR
)
2853 target
= TREE_OPERAND (target
, 0);
2854 if (TREE_CODE (target
) != FUNCTION_DECL
)
2856 target
= canonicalize_constructor_val (target
, NULL
);
2857 if (!target
|| TREE_CODE (target
) != FUNCTION_DECL
)
2859 /* Member pointer call that goes through a VMT lookup. */
2860 if (ie
->indirect_info
->member_ptr
2861 /* Or if target is not an invariant expression and we do not
2862 know if it will evaulate to function at runtime.
2863 This can happen when folding through &VAR, where &VAR
2864 is IP invariant, but VAR itself is not.
2866 TODO: Revisit this when GCC 5 is branched. It seems that
2867 member_ptr check is not needed and that we may try to fold
2868 the expression and see if VAR is readonly. */
2869 || !is_gimple_ip_invariant (target
))
2871 if (dump_enabled_p ())
2873 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, ie
->call_stmt
,
2874 "discovered direct call non-invariant %s\n",
2875 ie
->caller
->dump_name ());
2881 if (dump_enabled_p ())
2883 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, ie
->call_stmt
,
2884 "discovered direct call to non-function in %s, "
2885 "making it __builtin_unreachable\n",
2886 ie
->caller
->dump_name ());
2889 target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2890 callee
= cgraph_node::get_create (target
);
2894 callee
= cgraph_node::get (target
);
2897 callee
= cgraph_node::get (target
);
2899 /* Because may-edges are not explicitely represented and vtable may be external,
2900 we may create the first reference to the object in the unit. */
2901 if (!callee
|| callee
->global
.inlined_to
)
2904 /* We are better to ensure we can refer to it.
2905 In the case of static functions we are out of luck, since we already
2906 removed its body. In the case of public functions we may or may
2907 not introduce the reference. */
2908 if (!canonicalize_constructor_val (target
, NULL
)
2909 || !TREE_PUBLIC (target
))
2912 fprintf (dump_file
, "ipa-prop: Discovered call to a known target "
2913 "(%s -> %s) but cannot refer to it. Giving up.\n",
2914 ie
->caller
->dump_name (),
2915 ie
->callee
->dump_name ());
2918 callee
= cgraph_node::get_create (target
);
2921 /* If the edge is already speculated. */
2922 if (speculative
&& ie
->speculative
)
2924 struct cgraph_edge
*e2
;
2925 struct ipa_ref
*ref
;
2926 ie
->speculative_call_info (e2
, ie
, ref
);
2927 if (e2
->callee
->ultimate_alias_target ()
2928 != callee
->ultimate_alias_target ())
2931 fprintf (dump_file
, "ipa-prop: Discovered call to a speculative "
2932 "target (%s -> %s) but the call is already "
2933 "speculated to %s. Giving up.\n",
2934 ie
->caller
->dump_name (), callee
->dump_name (),
2935 e2
->callee
->dump_name ());
2940 fprintf (dump_file
, "ipa-prop: Discovered call to a speculative target "
2941 "(%s -> %s) this agree with previous speculation.\n",
2942 ie
->caller
->dump_name (), callee
->dump_name ());
2947 if (!dbg_cnt (devirt
))
2950 ipa_check_create_node_params ();
2952 /* We cannot make edges to inline clones. It is bug that someone removed
2953 the cgraph node too early. */
2954 gcc_assert (!callee
->global
.inlined_to
);
2956 if (dump_file
&& !unreachable
)
2958 fprintf (dump_file
, "ipa-prop: Discovered %s call to a %s target "
2959 "(%s -> %s), for stmt ",
2960 ie
->indirect_info
->polymorphic
? "a virtual" : "an indirect",
2961 speculative
? "speculative" : "known",
2962 ie
->caller
->dump_name (),
2963 callee
->dump_name ());
2965 print_gimple_stmt (dump_file
, ie
->call_stmt
, 2, TDF_SLIM
);
2967 fprintf (dump_file
, "with uid %i\n", ie
->lto_stmt_uid
);
2969 if (dump_enabled_p ())
2971 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, ie
->call_stmt
,
2972 "converting indirect call in %s to direct call to %s\n",
2973 ie
->caller
->name (), callee
->name ());
2977 struct cgraph_edge
*orig
= ie
;
2978 ie
= ie
->make_direct (callee
);
2979 /* If we resolved speculative edge the cost is already up to date
2980 for direct call (adjusted by inline_edge_duplication_hook). */
2983 ipa_call_summary
*es
= ipa_call_summaries
->get (ie
);
2984 es
->call_stmt_size
-= (eni_size_weights
.indirect_call_cost
2985 - eni_size_weights
.call_cost
);
2986 es
->call_stmt_time
-= (eni_time_weights
.indirect_call_cost
2987 - eni_time_weights
.call_cost
);
2992 if (!callee
->can_be_discarded_p ())
2995 alias
= dyn_cast
<cgraph_node
*> (callee
->noninterposable_alias ());
2999 /* make_speculative will update ie's cost to direct call cost. */
3000 ie
= ie
->make_speculative
3001 (callee
, ie
->count
.apply_scale (8, 10));
3007 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3008 CONSTRUCTOR and return it. Return NULL if the search fails for some
3012 find_constructor_constant_at_offset (tree constructor
, HOST_WIDE_INT req_offset
)
3014 tree type
= TREE_TYPE (constructor
);
3015 if (TREE_CODE (type
) != ARRAY_TYPE
3016 && TREE_CODE (type
) != RECORD_TYPE
)
3021 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor
), ix
, index
, val
)
3023 HOST_WIDE_INT elt_offset
;
3024 if (TREE_CODE (type
) == ARRAY_TYPE
)
3027 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (type
));
3028 gcc_assert (TREE_CODE (unit_size
) == INTEGER_CST
);
3032 if (TREE_CODE (index
) == RANGE_EXPR
)
3033 off
= wi::to_offset (TREE_OPERAND (index
, 0));
3035 off
= wi::to_offset (index
);
3036 if (TYPE_DOMAIN (type
) && TYPE_MIN_VALUE (TYPE_DOMAIN (type
)))
3038 tree low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
3039 gcc_assert (TREE_CODE (unit_size
) == INTEGER_CST
);
3040 off
= wi::sext (off
- wi::to_offset (low_bound
),
3041 TYPE_PRECISION (TREE_TYPE (index
)));
3043 off
*= wi::to_offset (unit_size
);
3044 /* ??? Handle more than just the first index of a
3048 off
= wi::to_offset (unit_size
) * ix
;
3050 off
= wi::lshift (off
, LOG2_BITS_PER_UNIT
);
3051 if (!wi::fits_shwi_p (off
) || wi::neg_p (off
))
3053 elt_offset
= off
.to_shwi ();
3055 else if (TREE_CODE (type
) == RECORD_TYPE
)
3057 gcc_checking_assert (index
&& TREE_CODE (index
) == FIELD_DECL
);
3058 if (DECL_BIT_FIELD (index
))
3060 elt_offset
= int_bit_position (index
);
3065 if (elt_offset
> req_offset
)
3068 if (TREE_CODE (val
) == CONSTRUCTOR
)
3069 return find_constructor_constant_at_offset (val
,
3070 req_offset
- elt_offset
);
3072 if (elt_offset
== req_offset
3073 && is_gimple_reg_type (TREE_TYPE (val
))
3074 && is_gimple_ip_invariant (val
))
3080 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3081 invariant from a static constructor and if so, return it. Otherwise return
3085 ipa_find_agg_cst_from_init (tree scalar
, HOST_WIDE_INT offset
, bool by_ref
)
3089 if (TREE_CODE (scalar
) != ADDR_EXPR
)
3091 scalar
= TREE_OPERAND (scalar
, 0);
3095 || !is_global_var (scalar
)
3096 || !TREE_READONLY (scalar
)
3097 || !DECL_INITIAL (scalar
)
3098 || TREE_CODE (DECL_INITIAL (scalar
)) != CONSTRUCTOR
)
3101 return find_constructor_constant_at_offset (DECL_INITIAL (scalar
), offset
);
3104 /* Retrieve value from aggregate jump function AGG or static initializer of
3105 SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
3106 none. BY_REF specifies whether the value has to be passed by reference or
3107 by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
3108 to is set to true if the value comes from an initializer of a constant. */
3111 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function
*agg
, tree scalar
,
3112 HOST_WIDE_INT offset
, bool by_ref
,
3113 bool *from_global_constant
)
3115 struct ipa_agg_jf_item
*item
;
3120 tree res
= ipa_find_agg_cst_from_init (scalar
, offset
, by_ref
);
3123 if (from_global_constant
)
3124 *from_global_constant
= true;
3130 || by_ref
!= agg
->by_ref
)
3133 FOR_EACH_VEC_SAFE_ELT (agg
->items
, i
, item
)
3134 if (item
->offset
== offset
)
3136 /* Currently we do not have clobber values, return NULL for them once
3138 gcc_checking_assert (is_gimple_ip_invariant (item
->value
));
3139 if (from_global_constant
)
3140 *from_global_constant
= false;
3146 /* Remove a reference to SYMBOL from the list of references of a node given by
3147 reference description RDESC. Return true if the reference has been
3148 successfully found and removed. */
3151 remove_described_reference (symtab_node
*symbol
, struct ipa_cst_ref_desc
*rdesc
)
3153 struct ipa_ref
*to_del
;
3154 struct cgraph_edge
*origin
;
3159 to_del
= origin
->caller
->find_reference (symbol
, origin
->call_stmt
,
3160 origin
->lto_stmt_uid
);
3164 to_del
->remove_reference ();
3166 fprintf (dump_file
, "ipa-prop: Removed a reference from %s to %s.\n",
3167 origin
->caller
->dump_name (), xstrdup_for_dump (symbol
->name ()));
3171 /* If JFUNC has a reference description with refcount different from
3172 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3173 NULL. JFUNC must be a constant jump function. */
3175 static struct ipa_cst_ref_desc
*
3176 jfunc_rdesc_usable (struct ipa_jump_func
*jfunc
)
3178 struct ipa_cst_ref_desc
*rdesc
= ipa_get_jf_constant_rdesc (jfunc
);
3179 if (rdesc
&& rdesc
->refcount
!= IPA_UNDESCRIBED_USE
)
3185 /* If the value of constant jump function JFUNC is an address of a function
3186 declaration, return the associated call graph node. Otherwise return
3189 static cgraph_node
*
3190 cgraph_node_for_jfunc (struct ipa_jump_func
*jfunc
)
3192 gcc_checking_assert (jfunc
->type
== IPA_JF_CONST
);
3193 tree cst
= ipa_get_jf_constant (jfunc
);
3194 if (TREE_CODE (cst
) != ADDR_EXPR
3195 || TREE_CODE (TREE_OPERAND (cst
, 0)) != FUNCTION_DECL
)
3198 return cgraph_node::get (TREE_OPERAND (cst
, 0));
3202 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3203 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3204 the edge specified in the rdesc. Return false if either the symbol or the
3205 reference could not be found, otherwise return true. */
3208 try_decrement_rdesc_refcount (struct ipa_jump_func
*jfunc
)
3210 struct ipa_cst_ref_desc
*rdesc
;
3211 if (jfunc
->type
== IPA_JF_CONST
3212 && (rdesc
= jfunc_rdesc_usable (jfunc
))
3213 && --rdesc
->refcount
== 0)
3215 symtab_node
*symbol
= cgraph_node_for_jfunc (jfunc
);
3219 return remove_described_reference (symbol
, rdesc
);
3224 /* Try to find a destination for indirect edge IE that corresponds to a simple
3225 call or a call of a member function pointer and where the destination is a
3226 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3227 the type of the parameter to which the result of JFUNC is passed. If it can
3228 be determined, return the newly direct edge, otherwise return NULL.
3229 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3231 static struct cgraph_edge
*
3232 try_make_edge_direct_simple_call (struct cgraph_edge
*ie
,
3233 struct ipa_jump_func
*jfunc
, tree target_type
,
3234 class ipa_node_params
*new_root_info
)
3236 struct cgraph_edge
*cs
;
3238 bool agg_contents
= ie
->indirect_info
->agg_contents
;
3239 tree scalar
= ipa_value_from_jfunc (new_root_info
, jfunc
, target_type
);
3242 bool from_global_constant
;
3243 target
= ipa_find_agg_cst_for_param (&jfunc
->agg
, scalar
,
3244 ie
->indirect_info
->offset
,
3245 ie
->indirect_info
->by_ref
,
3246 &from_global_constant
);
3248 && !from_global_constant
3249 && !ie
->indirect_info
->guaranteed_unmodified
)
3256 cs
= ipa_make_edge_direct_to_target (ie
, target
);
3258 if (cs
&& !agg_contents
)
3261 gcc_checking_assert (cs
->callee
3263 || jfunc
->type
!= IPA_JF_CONST
3264 || !cgraph_node_for_jfunc (jfunc
)
3265 || cs
->callee
== cgraph_node_for_jfunc (jfunc
)));
3266 ok
= try_decrement_rdesc_refcount (jfunc
);
3267 gcc_checking_assert (ok
);
3273 /* Return the target to be used in cases of impossible devirtualization. IE
3274 and target (the latter can be NULL) are dumped when dumping is enabled. */
3277 ipa_impossible_devirt_target (struct cgraph_edge
*ie
, tree target
)
3283 "Type inconsistent devirtualization: %s->%s\n",
3284 ie
->caller
->dump_name (),
3285 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target
)));
3288 "No devirtualization target in %s\n",
3289 ie
->caller
->dump_name ());
3291 tree new_target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
3292 cgraph_node::get_create (new_target
);
3296 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3297 call based on a formal parameter which is described by jump function JFUNC
3298 and if it can be determined, make it direct and return the direct edge.
3299 Otherwise, return NULL. CTX describes the polymorphic context that the
3300 parameter the call is based on brings along with it. */
3302 static struct cgraph_edge
*
3303 try_make_edge_direct_virtual_call (struct cgraph_edge
*ie
,
3304 struct ipa_jump_func
*jfunc
,
3305 class ipa_polymorphic_call_context ctx
)
3308 bool speculative
= false;
3310 if (!opt_for_fn (ie
->caller
->decl
, flag_devirtualize
))
3313 gcc_assert (!ie
->indirect_info
->by_ref
);
3315 /* Try to do lookup via known virtual table pointer value. */
3316 if (!ie
->indirect_info
->vptr_changed
3317 || opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
))
3320 unsigned HOST_WIDE_INT offset
;
3321 tree scalar
= (jfunc
->type
== IPA_JF_CONST
) ? ipa_get_jf_constant (jfunc
)
3323 tree t
= ipa_find_agg_cst_for_param (&jfunc
->agg
, scalar
,
3324 ie
->indirect_info
->offset
,
3326 if (t
&& vtable_pointer_value_to_vtable (t
, &vtable
, &offset
))
3329 t
= gimple_get_virt_method_for_vtable (ie
->indirect_info
->otr_token
,
3330 vtable
, offset
, &can_refer
);
3334 || fndecl_built_in_p (t
, BUILT_IN_UNREACHABLE
)
3335 || !possible_polymorphic_call_target_p
3336 (ie
, cgraph_node::get (t
)))
3338 /* Do not speculate builtin_unreachable, it is stupid! */
3339 if (!ie
->indirect_info
->vptr_changed
)
3340 target
= ipa_impossible_devirt_target (ie
, target
);
3347 speculative
= ie
->indirect_info
->vptr_changed
;
3353 ipa_polymorphic_call_context
ie_context (ie
);
3354 vec
<cgraph_node
*>targets
;
3357 ctx
.offset_by (ie
->indirect_info
->offset
);
3358 if (ie
->indirect_info
->vptr_changed
)
3359 ctx
.possible_dynamic_type_change (ie
->in_polymorphic_cdtor
,
3360 ie
->indirect_info
->otr_type
);
3361 ctx
.combine_with (ie_context
, ie
->indirect_info
->otr_type
);
3362 targets
= possible_polymorphic_call_targets
3363 (ie
->indirect_info
->otr_type
,
3364 ie
->indirect_info
->otr_token
,
3366 if (final
&& targets
.length () <= 1)
3368 speculative
= false;
3369 if (targets
.length () == 1)
3370 target
= targets
[0]->decl
;
3372 target
= ipa_impossible_devirt_target (ie
, NULL_TREE
);
3374 else if (!target
&& opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
)
3375 && !ie
->speculative
&& ie
->maybe_hot_p ())
3378 n
= try_speculative_devirtualization (ie
->indirect_info
->otr_type
,
3379 ie
->indirect_info
->otr_token
,
3380 ie
->indirect_info
->context
);
3390 if (!possible_polymorphic_call_target_p
3391 (ie
, cgraph_node::get_create (target
)))
3395 target
= ipa_impossible_devirt_target (ie
, target
);
3397 return ipa_make_edge_direct_to_target (ie
, target
, speculative
);
3403 /* Update the param called notes associated with NODE when CS is being inlined,
3404 assuming NODE is (potentially indirectly) inlined into CS->callee.
3405 Moreover, if the callee is discovered to be constant, create a new cgraph
3406 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3407 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3410 update_indirect_edges_after_inlining (struct cgraph_edge
*cs
,
3411 struct cgraph_node
*node
,
3412 vec
<cgraph_edge
*> *new_edges
)
3414 class ipa_edge_args
*top
;
3415 struct cgraph_edge
*ie
, *next_ie
, *new_direct_edge
;
3416 class ipa_node_params
*new_root_info
, *inlined_node_info
;
3419 ipa_check_create_edge_args ();
3420 top
= IPA_EDGE_REF (cs
);
3421 new_root_info
= IPA_NODE_REF (cs
->caller
->global
.inlined_to
3422 ? cs
->caller
->global
.inlined_to
3424 inlined_node_info
= IPA_NODE_REF (cs
->callee
->function_symbol ());
3426 for (ie
= node
->indirect_calls
; ie
; ie
= next_ie
)
3428 class cgraph_indirect_call_info
*ici
= ie
->indirect_info
;
3429 struct ipa_jump_func
*jfunc
;
3431 cgraph_node
*spec_target
= NULL
;
3433 next_ie
= ie
->next_callee
;
3435 if (ici
->param_index
== -1)
3438 /* We must check range due to calls with variable number of arguments: */
3439 if (ici
->param_index
>= ipa_get_cs_argument_count (top
))
3441 ici
->param_index
= -1;
3445 param_index
= ici
->param_index
;
3446 jfunc
= ipa_get_ith_jump_func (top
, param_index
);
3448 if (ie
->speculative
)
3450 struct cgraph_edge
*de
;
3451 struct ipa_ref
*ref
;
3452 ie
->speculative_call_info (de
, ie
, ref
);
3453 spec_target
= de
->callee
;
3456 if (!opt_for_fn (node
->decl
, flag_indirect_inlining
))
3457 new_direct_edge
= NULL
;
3458 else if (ici
->polymorphic
)
3460 ipa_polymorphic_call_context ctx
;
3461 ctx
= ipa_context_from_jfunc (new_root_info
, cs
, param_index
, jfunc
);
3462 new_direct_edge
= try_make_edge_direct_virtual_call (ie
, jfunc
, ctx
);
3466 tree target_type
= ipa_get_type (inlined_node_info
, param_index
);
3467 new_direct_edge
= try_make_edge_direct_simple_call (ie
, jfunc
,
3472 /* If speculation was removed, then we need to do nothing. */
3473 if (new_direct_edge
&& new_direct_edge
!= ie
3474 && new_direct_edge
->callee
== spec_target
)
3476 new_direct_edge
->indirect_inlining_edge
= 1;
3477 top
= IPA_EDGE_REF (cs
);
3479 if (!new_direct_edge
->speculative
)
3482 else if (new_direct_edge
)
3484 new_direct_edge
->indirect_inlining_edge
= 1;
3485 if (new_direct_edge
->call_stmt
)
3486 new_direct_edge
->call_stmt_cannot_inline_p
3487 = !gimple_check_call_matching_types (
3488 new_direct_edge
->call_stmt
,
3489 new_direct_edge
->callee
->decl
, false);
3492 new_edges
->safe_push (new_direct_edge
);
3495 top
= IPA_EDGE_REF (cs
);
3496 /* If speculative edge was introduced we still need to update
3497 call info of the indirect edge. */
3498 if (!new_direct_edge
->speculative
)
3501 if (jfunc
->type
== IPA_JF_PASS_THROUGH
3502 && ipa_get_jf_pass_through_operation (jfunc
) == NOP_EXPR
)
3504 if (ici
->agg_contents
3505 && !ipa_get_jf_pass_through_agg_preserved (jfunc
)
3506 && !ici
->polymorphic
)
3507 ici
->param_index
= -1;
3510 ici
->param_index
= ipa_get_jf_pass_through_formal_id (jfunc
);
3511 if (ici
->polymorphic
3512 && !ipa_get_jf_pass_through_type_preserved (jfunc
))
3513 ici
->vptr_changed
= true;
3516 else if (jfunc
->type
== IPA_JF_ANCESTOR
)
3518 if (ici
->agg_contents
3519 && !ipa_get_jf_ancestor_agg_preserved (jfunc
)
3520 && !ici
->polymorphic
)
3521 ici
->param_index
= -1;
3524 ici
->param_index
= ipa_get_jf_ancestor_formal_id (jfunc
);
3525 ici
->offset
+= ipa_get_jf_ancestor_offset (jfunc
);
3526 if (ici
->polymorphic
3527 && !ipa_get_jf_ancestor_type_preserved (jfunc
))
3528 ici
->vptr_changed
= true;
3532 /* Either we can find a destination for this edge now or never. */
3533 ici
->param_index
= -1;
3539 /* Recursively traverse subtree of NODE (including node) made of inlined
3540 cgraph_edges when CS has been inlined and invoke
3541 update_indirect_edges_after_inlining on all nodes and
3542 update_jump_functions_after_inlining on all non-inlined edges that lead out
3543 of this subtree. Newly discovered indirect edges will be added to
3544 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3548 propagate_info_to_inlined_callees (struct cgraph_edge
*cs
,
3549 struct cgraph_node
*node
,
3550 vec
<cgraph_edge
*> *new_edges
)
3552 struct cgraph_edge
*e
;
3555 res
= update_indirect_edges_after_inlining (cs
, node
, new_edges
);
3557 for (e
= node
->callees
; e
; e
= e
->next_callee
)
3558 if (!e
->inline_failed
)
3559 res
|= propagate_info_to_inlined_callees (cs
, e
->callee
, new_edges
);
3561 update_jump_functions_after_inlining (cs
, e
);
3562 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
3563 update_jump_functions_after_inlining (cs
, e
);
3568 /* Combine two controlled uses counts as done during inlining. */
3571 combine_controlled_uses_counters (int c
, int d
)
3573 if (c
== IPA_UNDESCRIBED_USE
|| d
== IPA_UNDESCRIBED_USE
)
3574 return IPA_UNDESCRIBED_USE
;
3579 /* Propagate number of controlled users from CS->caleee to the new root of the
3580 tree of inlined nodes. */
3583 propagate_controlled_uses (struct cgraph_edge
*cs
)
3585 class ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
3586 struct cgraph_node
*new_root
= cs
->caller
->global
.inlined_to
3587 ? cs
->caller
->global
.inlined_to
: cs
->caller
;
3588 class ipa_node_params
*new_root_info
= IPA_NODE_REF (new_root
);
3589 class ipa_node_params
*old_root_info
= IPA_NODE_REF (cs
->callee
);
3592 count
= MIN (ipa_get_cs_argument_count (args
),
3593 ipa_get_param_count (old_root_info
));
3594 for (i
= 0; i
< count
; i
++)
3596 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
3597 struct ipa_cst_ref_desc
*rdesc
;
3599 if (jf
->type
== IPA_JF_PASS_THROUGH
)
3602 src_idx
= ipa_get_jf_pass_through_formal_id (jf
);
3603 c
= ipa_get_controlled_uses (new_root_info
, src_idx
);
3604 d
= ipa_get_controlled_uses (old_root_info
, i
);
3606 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf
)
3607 == NOP_EXPR
|| c
== IPA_UNDESCRIBED_USE
);
3608 c
= combine_controlled_uses_counters (c
, d
);
3609 ipa_set_controlled_uses (new_root_info
, src_idx
, c
);
3610 if (c
== 0 && new_root_info
->ipcp_orig_node
)
3612 struct cgraph_node
*n
;
3613 struct ipa_ref
*ref
;
3614 tree t
= new_root_info
->known_csts
[src_idx
];
3616 if (t
&& TREE_CODE (t
) == ADDR_EXPR
3617 && TREE_CODE (TREE_OPERAND (t
, 0)) == FUNCTION_DECL
3618 && (n
= cgraph_node::get (TREE_OPERAND (t
, 0)))
3619 && (ref
= new_root
->find_reference (n
, NULL
, 0)))
3622 fprintf (dump_file
, "ipa-prop: Removing cloning-created "
3623 "reference from %s to %s.\n",
3624 new_root
->dump_name (),
3626 ref
->remove_reference ();
3630 else if (jf
->type
== IPA_JF_CONST
3631 && (rdesc
= jfunc_rdesc_usable (jf
)))
3633 int d
= ipa_get_controlled_uses (old_root_info
, i
);
3634 int c
= rdesc
->refcount
;
3635 rdesc
->refcount
= combine_controlled_uses_counters (c
, d
);
3636 if (rdesc
->refcount
== 0)
3638 tree cst
= ipa_get_jf_constant (jf
);
3639 struct cgraph_node
*n
;
3640 gcc_checking_assert (TREE_CODE (cst
) == ADDR_EXPR
3641 && TREE_CODE (TREE_OPERAND (cst
, 0))
3643 n
= cgraph_node::get (TREE_OPERAND (cst
, 0));
3646 struct cgraph_node
*clone
;
3648 ok
= remove_described_reference (n
, rdesc
);
3649 gcc_checking_assert (ok
);
3652 while (clone
->global
.inlined_to
3653 && clone
!= rdesc
->cs
->caller
3654 && IPA_NODE_REF (clone
)->ipcp_orig_node
)
3656 struct ipa_ref
*ref
;
3657 ref
= clone
->find_reference (n
, NULL
, 0);
3661 fprintf (dump_file
, "ipa-prop: Removing "
3662 "cloning-created reference "
3664 clone
->dump_name (),
3666 ref
->remove_reference ();
3668 clone
= clone
->callers
->caller
;
3675 for (i
= ipa_get_param_count (old_root_info
);
3676 i
< ipa_get_cs_argument_count (args
);
3679 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
3681 if (jf
->type
== IPA_JF_CONST
)
3683 struct ipa_cst_ref_desc
*rdesc
= jfunc_rdesc_usable (jf
);
3685 rdesc
->refcount
= IPA_UNDESCRIBED_USE
;
3687 else if (jf
->type
== IPA_JF_PASS_THROUGH
)
3688 ipa_set_controlled_uses (new_root_info
,
3689 jf
->value
.pass_through
.formal_id
,
3690 IPA_UNDESCRIBED_USE
);
3694 /* Update jump functions and call note functions on inlining the call site CS.
3695 CS is expected to lead to a node already cloned by
3696 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3697 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3701 ipa_propagate_indirect_call_infos (struct cgraph_edge
*cs
,
3702 vec
<cgraph_edge
*> *new_edges
)
3705 /* Do nothing if the preparation phase has not been carried out yet
3706 (i.e. during early inlining). */
3707 if (!ipa_node_params_sum
)
3709 gcc_assert (ipa_edge_args_sum
);
3711 propagate_controlled_uses (cs
);
3712 changed
= propagate_info_to_inlined_callees (cs
, cs
->callee
, new_edges
);
3717 /* Ensure that array of edge arguments infos is big enough to accommodate a
3718 structure for all edges and reallocates it if not. Also, allocate
3719 associated hash tables is they do not already exist. */
3722 ipa_check_create_edge_args (void)
3724 if (!ipa_edge_args_sum
)
3726 = (new (ggc_cleared_alloc
<ipa_edge_args_sum_t
> ())
3727 ipa_edge_args_sum_t (symtab
, true));
3728 if (!ipa_bits_hash_table
)
3729 ipa_bits_hash_table
= hash_table
<ipa_bit_ggc_hash_traits
>::create_ggc (37);
3730 if (!ipa_vr_hash_table
)
3731 ipa_vr_hash_table
= hash_table
<ipa_vr_ggc_hash_traits
>::create_ggc (37);
3734 /* Free all ipa_edge structures. */
3737 ipa_free_all_edge_args (void)
3739 if (!ipa_edge_args_sum
)
3742 ipa_edge_args_sum
->release ();
3743 ipa_edge_args_sum
= NULL
;
3746 /* Free all ipa_node_params structures. */
3749 ipa_free_all_node_params (void)
3751 ipa_node_params_sum
->release ();
3752 ipa_node_params_sum
= NULL
;
3755 /* Initialize IPA CP transformation summary and also allocate any necessary hash
3756 tables if they do not already exist. */
3759 ipcp_transformation_initialize (void)
3761 if (!ipa_bits_hash_table
)
3762 ipa_bits_hash_table
= hash_table
<ipa_bit_ggc_hash_traits
>::create_ggc (37);
3763 if (!ipa_vr_hash_table
)
3764 ipa_vr_hash_table
= hash_table
<ipa_vr_ggc_hash_traits
>::create_ggc (37);
3765 if (ipcp_transformation_sum
== NULL
)
3766 ipcp_transformation_sum
= ipcp_transformation_t::create_ggc (symtab
);
3769 /* Set the aggregate replacements of NODE to be AGGVALS. */
3772 ipa_set_node_agg_value_chain (struct cgraph_node
*node
,
3773 struct ipa_agg_replacement_value
*aggvals
)
3775 ipcp_transformation_initialize ();
3776 ipcp_transformation
*s
= ipcp_transformation_sum
->get_create (node
);
3777 s
->agg_values
= aggvals
;
3780 /* Hook that is called by cgraph.c when an edge is removed. Adjust reference
3781 count data structures accordingly. */
3784 ipa_edge_args_sum_t::remove (cgraph_edge
*cs
, ipa_edge_args
*args
)
3786 if (args
->jump_functions
)
3788 struct ipa_jump_func
*jf
;
3790 FOR_EACH_VEC_ELT (*args
->jump_functions
, i
, jf
)
3792 struct ipa_cst_ref_desc
*rdesc
;
3793 try_decrement_rdesc_refcount (jf
);
3794 if (jf
->type
== IPA_JF_CONST
3795 && (rdesc
= ipa_get_jf_constant_rdesc (jf
))
3802 /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
3803 reference count data strucutres accordingly. */
3806 ipa_edge_args_sum_t::duplicate (cgraph_edge
*src
, cgraph_edge
*dst
,
3807 ipa_edge_args
*old_args
, ipa_edge_args
*new_args
)
3811 new_args
->jump_functions
= vec_safe_copy (old_args
->jump_functions
);
3812 if (old_args
->polymorphic_call_contexts
)
3813 new_args
->polymorphic_call_contexts
3814 = vec_safe_copy (old_args
->polymorphic_call_contexts
);
3816 for (i
= 0; i
< vec_safe_length (old_args
->jump_functions
); i
++)
3818 struct ipa_jump_func
*src_jf
= ipa_get_ith_jump_func (old_args
, i
);
3819 struct ipa_jump_func
*dst_jf
= ipa_get_ith_jump_func (new_args
, i
);
3821 dst_jf
->agg
.items
= vec_safe_copy (dst_jf
->agg
.items
);
3823 if (src_jf
->type
== IPA_JF_CONST
)
3825 struct ipa_cst_ref_desc
*src_rdesc
= jfunc_rdesc_usable (src_jf
);
3828 dst_jf
->value
.constant
.rdesc
= NULL
;
3829 else if (src
->caller
== dst
->caller
)
3831 struct ipa_ref
*ref
;
3832 symtab_node
*n
= cgraph_node_for_jfunc (src_jf
);
3833 gcc_checking_assert (n
);
3834 ref
= src
->caller
->find_reference (n
, src
->call_stmt
,
3836 gcc_checking_assert (ref
);
3837 dst
->caller
->clone_reference (ref
, ref
->stmt
);
3839 struct ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
3840 dst_rdesc
->cs
= dst
;
3841 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3842 dst_rdesc
->next_duplicate
= NULL
;
3843 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3845 else if (src_rdesc
->cs
== src
)
3847 struct ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
3848 dst_rdesc
->cs
= dst
;
3849 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3850 dst_rdesc
->next_duplicate
= src_rdesc
->next_duplicate
;
3851 src_rdesc
->next_duplicate
= dst_rdesc
;
3852 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3856 struct ipa_cst_ref_desc
*dst_rdesc
;
3857 /* This can happen during inlining, when a JFUNC can refer to a
3858 reference taken in a function up in the tree of inline clones.
3859 We need to find the duplicate that refers to our tree of
3862 gcc_assert (dst
->caller
->global
.inlined_to
);
3863 for (dst_rdesc
= src_rdesc
->next_duplicate
;
3865 dst_rdesc
= dst_rdesc
->next_duplicate
)
3867 struct cgraph_node
*top
;
3868 top
= dst_rdesc
->cs
->caller
->global
.inlined_to
3869 ? dst_rdesc
->cs
->caller
->global
.inlined_to
3870 : dst_rdesc
->cs
->caller
;
3871 if (dst
->caller
->global
.inlined_to
== top
)
3874 gcc_assert (dst_rdesc
);
3875 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3878 else if (dst_jf
->type
== IPA_JF_PASS_THROUGH
3879 && src
->caller
== dst
->caller
)
3881 struct cgraph_node
*inline_root
= dst
->caller
->global
.inlined_to
3882 ? dst
->caller
->global
.inlined_to
: dst
->caller
;
3883 class ipa_node_params
*root_info
= IPA_NODE_REF (inline_root
);
3884 int idx
= ipa_get_jf_pass_through_formal_id (dst_jf
);
3886 int c
= ipa_get_controlled_uses (root_info
, idx
);
3887 if (c
!= IPA_UNDESCRIBED_USE
)
3890 ipa_set_controlled_uses (root_info
, idx
, c
);
3896 /* Analyze newly added function into callgraph. */
3899 ipa_add_new_function (cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
3901 if (node
->has_gimple_body_p ())
3902 ipa_analyze_node (node
);
3905 /* Hook that is called by summary when a node is duplicated. */
3908 ipa_node_params_t::duplicate(cgraph_node
*src
, cgraph_node
*dst
,
3909 ipa_node_params
*old_info
,
3910 ipa_node_params
*new_info
)
3912 ipa_agg_replacement_value
*old_av
, *new_av
;
3914 new_info
->descriptors
= vec_safe_copy (old_info
->descriptors
);
3915 new_info
->lattices
= NULL
;
3916 new_info
->ipcp_orig_node
= old_info
->ipcp_orig_node
;
3917 new_info
->known_csts
= old_info
->known_csts
.copy ();
3918 new_info
->known_contexts
= old_info
->known_contexts
.copy ();
3920 new_info
->analysis_done
= old_info
->analysis_done
;
3921 new_info
->node_enqueued
= old_info
->node_enqueued
;
3922 new_info
->versionable
= old_info
->versionable
;
3924 old_av
= ipa_get_agg_replacements_for_node (src
);
3930 struct ipa_agg_replacement_value
*v
;
3932 v
= ggc_alloc
<ipa_agg_replacement_value
> ();
3933 memcpy (v
, old_av
, sizeof (*v
));
3936 old_av
= old_av
->next
;
3938 ipa_set_node_agg_value_chain (dst
, new_av
);
3941 ipcp_transformation
*src_trans
= ipcp_get_transformation_summary (src
);
3945 ipcp_transformation_initialize ();
3946 src_trans
= ipcp_transformation_sum
->get_create (src
);
3947 ipcp_transformation
*dst_trans
3948 = ipcp_transformation_sum
->get_create (dst
);
3950 dst_trans
->bits
= vec_safe_copy (src_trans
->bits
);
3952 const vec
<ipa_vr
, va_gc
> *src_vr
= src_trans
->m_vr
;
3953 vec
<ipa_vr
, va_gc
> *&dst_vr
3954 = ipcp_get_transformation_summary (dst
)->m_vr
;
3955 if (vec_safe_length (src_trans
->m_vr
) > 0)
3957 vec_safe_reserve_exact (dst_vr
, src_vr
->length ());
3958 for (unsigned i
= 0; i
< src_vr
->length (); ++i
)
3959 dst_vr
->quick_push ((*src_vr
)[i
]);
3964 /* Register our cgraph hooks if they are not already there. */
3967 ipa_register_cgraph_hooks (void)
3969 ipa_check_create_node_params ();
3970 ipa_check_create_edge_args ();
3972 function_insertion_hook_holder
=
3973 symtab
->add_cgraph_insertion_hook (&ipa_add_new_function
, NULL
);
3976 /* Unregister our cgraph hooks if they are not already there. */
3979 ipa_unregister_cgraph_hooks (void)
3981 symtab
->remove_cgraph_insertion_hook (function_insertion_hook_holder
);
3982 function_insertion_hook_holder
= NULL
;
3985 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3986 longer needed after ipa-cp. */
3989 ipa_free_all_structures_after_ipa_cp (void)
3991 if (!optimize
&& !in_lto_p
)
3993 ipa_free_all_edge_args ();
3994 ipa_free_all_node_params ();
3995 ipcp_sources_pool
.release ();
3996 ipcp_cst_values_pool
.release ();
3997 ipcp_poly_ctx_values_pool
.release ();
3998 ipcp_agg_lattice_pool
.release ();
3999 ipa_unregister_cgraph_hooks ();
4000 ipa_refdesc_pool
.release ();
4004 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4005 longer needed after indirect inlining. */
4008 ipa_free_all_structures_after_iinln (void)
4010 ipa_free_all_edge_args ();
4011 ipa_free_all_node_params ();
4012 ipa_unregister_cgraph_hooks ();
4013 ipcp_sources_pool
.release ();
4014 ipcp_cst_values_pool
.release ();
4015 ipcp_poly_ctx_values_pool
.release ();
4016 ipcp_agg_lattice_pool
.release ();
4017 ipa_refdesc_pool
.release ();
4020 /* Print ipa_tree_map data structures of all functions in the
4024 ipa_print_node_params (FILE *f
, struct cgraph_node
*node
)
4027 class ipa_node_params
*info
;
4029 if (!node
->definition
)
4031 info
= IPA_NODE_REF (node
);
4032 fprintf (f
, " function %s parameter descriptors:\n", node
->dump_name ());
4033 count
= ipa_get_param_count (info
);
4034 for (i
= 0; i
< count
; i
++)
4039 ipa_dump_param (f
, info
, i
);
4040 if (ipa_is_param_used (info
, i
))
4041 fprintf (f
, " used");
4042 c
= ipa_get_controlled_uses (info
, i
);
4043 if (c
== IPA_UNDESCRIBED_USE
)
4044 fprintf (f
, " undescribed_use");
4046 fprintf (f
, " controlled_uses=%i", c
);
4051 /* Print ipa_tree_map data structures of all functions in the
4055 ipa_print_all_params (FILE * f
)
4057 struct cgraph_node
*node
;
4059 fprintf (f
, "\nFunction parameters:\n");
4060 FOR_EACH_FUNCTION (node
)
4061 ipa_print_node_params (f
, node
);
4064 /* Dump the AV linked list. */
4067 ipa_dump_agg_replacement_values (FILE *f
, struct ipa_agg_replacement_value
*av
)
4070 fprintf (f
, " Aggregate replacements:");
4071 for (; av
; av
= av
->next
)
4073 fprintf (f
, "%s %i[" HOST_WIDE_INT_PRINT_DEC
"]=", comma
? "," : "",
4074 av
->index
, av
->offset
);
4075 print_generic_expr (f
, av
->value
);
4081 /* Stream out jump function JUMP_FUNC to OB. */
4084 ipa_write_jump_function (struct output_block
*ob
,
4085 struct ipa_jump_func
*jump_func
)
4087 struct ipa_agg_jf_item
*item
;
4088 struct bitpack_d bp
;
4092 /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4093 as well as WPA memory by handling them specially. */
4094 if (jump_func
->type
== IPA_JF_CONST
4095 && TREE_CODE (jump_func
->value
.constant
.value
) == ADDR_EXPR
)
4098 streamer_write_uhwi (ob
, jump_func
->type
* 2 + flag
);
4099 switch (jump_func
->type
)
4101 case IPA_JF_UNKNOWN
:
4105 EXPR_LOCATION (jump_func
->value
.constant
.value
) == UNKNOWN_LOCATION
);
4106 stream_write_tree (ob
,
4108 ? TREE_OPERAND (jump_func
->value
.constant
.value
, 0)
4109 : jump_func
->value
.constant
.value
, true);
4111 case IPA_JF_PASS_THROUGH
:
4112 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.operation
);
4113 if (jump_func
->value
.pass_through
.operation
== NOP_EXPR
)
4115 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4116 bp
= bitpack_create (ob
->main_stream
);
4117 bp_pack_value (&bp
, jump_func
->value
.pass_through
.agg_preserved
, 1);
4118 streamer_write_bitpack (&bp
);
4120 else if (TREE_CODE_CLASS (jump_func
->value
.pass_through
.operation
)
4122 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4125 stream_write_tree (ob
, jump_func
->value
.pass_through
.operand
, true);
4126 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4129 case IPA_JF_ANCESTOR
:
4130 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.offset
);
4131 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.formal_id
);
4132 bp
= bitpack_create (ob
->main_stream
);
4133 bp_pack_value (&bp
, jump_func
->value
.ancestor
.agg_preserved
, 1);
4134 streamer_write_bitpack (&bp
);
4138 count
= vec_safe_length (jump_func
->agg
.items
);
4139 streamer_write_uhwi (ob
, count
);
4142 bp
= bitpack_create (ob
->main_stream
);
4143 bp_pack_value (&bp
, jump_func
->agg
.by_ref
, 1);
4144 streamer_write_bitpack (&bp
);
4147 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, i
, item
)
4149 streamer_write_uhwi (ob
, item
->offset
);
4150 stream_write_tree (ob
, item
->value
, true);
4153 bp
= bitpack_create (ob
->main_stream
);
4154 bp_pack_value (&bp
, !!jump_func
->bits
, 1);
4155 streamer_write_bitpack (&bp
);
4156 if (jump_func
->bits
)
4158 streamer_write_widest_int (ob
, jump_func
->bits
->value
);
4159 streamer_write_widest_int (ob
, jump_func
->bits
->mask
);
4161 bp_pack_value (&bp
, !!jump_func
->m_vr
, 1);
4162 streamer_write_bitpack (&bp
);
4163 if (jump_func
->m_vr
)
4165 streamer_write_enum (ob
->main_stream
, value_rang_type
,
4166 VR_LAST
, jump_func
->m_vr
->kind ());
4167 stream_write_tree (ob
, jump_func
->m_vr
->min (), true);
4168 stream_write_tree (ob
, jump_func
->m_vr
->max (), true);
4172 /* Read in jump function JUMP_FUNC from IB. */
4175 ipa_read_jump_function (class lto_input_block
*ib
,
4176 struct ipa_jump_func
*jump_func
,
4177 struct cgraph_edge
*cs
,
4178 class data_in
*data_in
,
4181 enum jump_func_type jftype
;
4182 enum tree_code operation
;
4184 int val
= streamer_read_uhwi (ib
);
4185 bool flag
= val
& 1;
4187 jftype
= (enum jump_func_type
) (val
/ 2);
4190 case IPA_JF_UNKNOWN
:
4191 ipa_set_jf_unknown (jump_func
);
4195 tree t
= stream_read_tree (ib
, data_in
);
4196 if (flag
&& prevails
)
4197 t
= build_fold_addr_expr (t
);
4198 ipa_set_jf_constant (jump_func
, t
, cs
);
4201 case IPA_JF_PASS_THROUGH
:
4202 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
4203 if (operation
== NOP_EXPR
)
4205 int formal_id
= streamer_read_uhwi (ib
);
4206 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4207 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4208 ipa_set_jf_simple_pass_through (jump_func
, formal_id
, agg_preserved
);
4210 else if (TREE_CODE_CLASS (operation
) == tcc_unary
)
4212 int formal_id
= streamer_read_uhwi (ib
);
4213 ipa_set_jf_unary_pass_through (jump_func
, formal_id
, operation
);
4217 tree operand
= stream_read_tree (ib
, data_in
);
4218 int formal_id
= streamer_read_uhwi (ib
);
4219 ipa_set_jf_arith_pass_through (jump_func
, formal_id
, operand
,
4223 case IPA_JF_ANCESTOR
:
4225 HOST_WIDE_INT offset
= streamer_read_uhwi (ib
);
4226 int formal_id
= streamer_read_uhwi (ib
);
4227 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4228 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4229 ipa_set_ancestor_jf (jump_func
, offset
, formal_id
, agg_preserved
);
4233 fatal_error (UNKNOWN_LOCATION
, "invalid jump function in LTO stream");
4236 count
= streamer_read_uhwi (ib
);
4238 vec_alloc (jump_func
->agg
.items
, count
);
4241 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4242 jump_func
->agg
.by_ref
= bp_unpack_value (&bp
, 1);
4244 for (i
= 0; i
< count
; i
++)
4246 struct ipa_agg_jf_item item
;
4247 item
.offset
= streamer_read_uhwi (ib
);
4248 item
.value
= stream_read_tree (ib
, data_in
);
4250 jump_func
->agg
.items
->quick_push (item
);
4253 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4254 bool bits_known
= bp_unpack_value (&bp
, 1);
4257 widest_int value
= streamer_read_widest_int (ib
);
4258 widest_int mask
= streamer_read_widest_int (ib
);
4260 ipa_set_jfunc_bits (jump_func
, value
, mask
);
4263 jump_func
->bits
= NULL
;
4265 struct bitpack_d vr_bp
= streamer_read_bitpack (ib
);
4266 bool vr_known
= bp_unpack_value (&vr_bp
, 1);
4269 enum value_range_kind type
= streamer_read_enum (ib
, value_range_kind
,
4271 tree min
= stream_read_tree (ib
, data_in
);
4272 tree max
= stream_read_tree (ib
, data_in
);
4274 ipa_set_jfunc_vr (jump_func
, type
, min
, max
);
4277 jump_func
->m_vr
= NULL
;
4280 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4281 relevant to indirect inlining to OB. */
4284 ipa_write_indirect_edge_info (struct output_block
*ob
,
4285 struct cgraph_edge
*cs
)
4287 class cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4288 struct bitpack_d bp
;
4290 streamer_write_hwi (ob
, ii
->param_index
);
4291 bp
= bitpack_create (ob
->main_stream
);
4292 bp_pack_value (&bp
, ii
->polymorphic
, 1);
4293 bp_pack_value (&bp
, ii
->agg_contents
, 1);
4294 bp_pack_value (&bp
, ii
->member_ptr
, 1);
4295 bp_pack_value (&bp
, ii
->by_ref
, 1);
4296 bp_pack_value (&bp
, ii
->guaranteed_unmodified
, 1);
4297 bp_pack_value (&bp
, ii
->vptr_changed
, 1);
4298 streamer_write_bitpack (&bp
);
4299 if (ii
->agg_contents
|| ii
->polymorphic
)
4300 streamer_write_hwi (ob
, ii
->offset
);
4302 gcc_assert (ii
->offset
== 0);
4304 if (ii
->polymorphic
)
4306 streamer_write_hwi (ob
, ii
->otr_token
);
4307 stream_write_tree (ob
, ii
->otr_type
, true);
4308 ii
->context
.stream_out (ob
);
4312 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4313 relevant to indirect inlining from IB. */
4316 ipa_read_indirect_edge_info (class lto_input_block
*ib
,
4317 class data_in
*data_in
,
4318 struct cgraph_edge
*cs
)
4320 class cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4321 struct bitpack_d bp
;
4323 ii
->param_index
= (int) streamer_read_hwi (ib
);
4324 bp
= streamer_read_bitpack (ib
);
4325 ii
->polymorphic
= bp_unpack_value (&bp
, 1);
4326 ii
->agg_contents
= bp_unpack_value (&bp
, 1);
4327 ii
->member_ptr
= bp_unpack_value (&bp
, 1);
4328 ii
->by_ref
= bp_unpack_value (&bp
, 1);
4329 ii
->guaranteed_unmodified
= bp_unpack_value (&bp
, 1);
4330 ii
->vptr_changed
= bp_unpack_value (&bp
, 1);
4331 if (ii
->agg_contents
|| ii
->polymorphic
)
4332 ii
->offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4335 if (ii
->polymorphic
)
4337 ii
->otr_token
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4338 ii
->otr_type
= stream_read_tree (ib
, data_in
);
4339 ii
->context
.stream_in (ib
, data_in
);
4343 /* Stream out NODE info to OB. */
4346 ipa_write_node_info (struct output_block
*ob
, struct cgraph_node
*node
)
4349 lto_symtab_encoder_t encoder
;
4350 class ipa_node_params
*info
= IPA_NODE_REF (node
);
4352 struct cgraph_edge
*e
;
4353 struct bitpack_d bp
;
4355 encoder
= ob
->decl_state
->symtab_node_encoder
;
4356 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4357 streamer_write_uhwi (ob
, node_ref
);
4359 streamer_write_uhwi (ob
, ipa_get_param_count (info
));
4360 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4361 streamer_write_uhwi (ob
, ipa_get_param_move_cost (info
, j
));
4362 bp
= bitpack_create (ob
->main_stream
);
4363 gcc_assert (info
->analysis_done
4364 || ipa_get_param_count (info
) == 0);
4365 gcc_assert (!info
->node_enqueued
);
4366 gcc_assert (!info
->ipcp_orig_node
);
4367 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4368 bp_pack_value (&bp
, ipa_is_param_used (info
, j
), 1);
4369 streamer_write_bitpack (&bp
);
4370 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4372 streamer_write_hwi (ob
, ipa_get_controlled_uses (info
, j
));
4373 stream_write_tree (ob
, ipa_get_type (info
, j
), true);
4375 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4377 class ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4379 streamer_write_uhwi (ob
,
4380 ipa_get_cs_argument_count (args
) * 2
4381 + (args
->polymorphic_call_contexts
!= NULL
));
4382 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4384 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4385 if (args
->polymorphic_call_contexts
!= NULL
)
4386 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
4389 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4391 class ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4393 streamer_write_uhwi (ob
,
4394 ipa_get_cs_argument_count (args
) * 2
4395 + (args
->polymorphic_call_contexts
!= NULL
));
4396 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4398 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4399 if (args
->polymorphic_call_contexts
!= NULL
)
4400 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
4402 ipa_write_indirect_edge_info (ob
, e
);
4406 /* Stream in edge E from IB. */
4409 ipa_read_edge_info (class lto_input_block
*ib
,
4410 class data_in
*data_in
,
4411 struct cgraph_edge
*e
, bool prevails
)
4413 int count
= streamer_read_uhwi (ib
);
4414 bool contexts_computed
= count
& 1;
4419 if (prevails
&& e
->possibly_call_in_translation_unit_p ())
4421 class ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4422 vec_safe_grow_cleared (args
->jump_functions
, count
);
4423 if (contexts_computed
)
4424 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
);
4425 for (int k
= 0; k
< count
; k
++)
4427 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4429 if (contexts_computed
)
4430 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in
4436 for (int k
= 0; k
< count
; k
++)
4438 struct ipa_jump_func dummy
;
4439 ipa_read_jump_function (ib
, &dummy
, e
,
4441 if (contexts_computed
)
4443 class ipa_polymorphic_call_context ctx
;
4444 ctx
.stream_in (ib
, data_in
);
4450 /* Stream in NODE info from IB. */
4453 ipa_read_node_info (class lto_input_block
*ib
, struct cgraph_node
*node
,
4454 class data_in
*data_in
)
4457 struct cgraph_edge
*e
;
4458 struct bitpack_d bp
;
4459 bool prevails
= node
->prevailing_p ();
4460 class ipa_node_params
*info
= prevails
? IPA_NODE_REF (node
) : NULL
;
4462 int param_count
= streamer_read_uhwi (ib
);
4465 ipa_alloc_node_params (node
, param_count
);
4466 for (k
= 0; k
< param_count
; k
++)
4467 (*info
->descriptors
)[k
].move_cost
= streamer_read_uhwi (ib
);
4468 if (ipa_get_param_count (info
) != 0)
4469 info
->analysis_done
= true;
4470 info
->node_enqueued
= false;
4473 for (k
= 0; k
< param_count
; k
++)
4474 streamer_read_uhwi (ib
);
4476 bp
= streamer_read_bitpack (ib
);
4477 for (k
= 0; k
< param_count
; k
++)
4479 bool used
= bp_unpack_value (&bp
, 1);
4482 ipa_set_param_used (info
, k
, used
);
4484 for (k
= 0; k
< param_count
; k
++)
4486 int nuses
= streamer_read_hwi (ib
);
4487 tree type
= stream_read_tree (ib
, data_in
);
4491 ipa_set_controlled_uses (info
, k
, nuses
);
4492 (*info
->descriptors
)[k
].decl_or_type
= type
;
4495 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4496 ipa_read_edge_info (ib
, data_in
, e
, prevails
);
4497 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4499 ipa_read_edge_info (ib
, data_in
, e
, prevails
);
4500 ipa_read_indirect_edge_info (ib
, data_in
, e
);
4504 /* Write jump functions for nodes in SET. */
4507 ipa_prop_write_jump_functions (void)
4509 struct cgraph_node
*node
;
4510 struct output_block
*ob
;
4511 unsigned int count
= 0;
4512 lto_symtab_encoder_iterator lsei
;
4513 lto_symtab_encoder_t encoder
;
4515 if (!ipa_node_params_sum
|| !ipa_edge_args_sum
)
4518 ob
= create_output_block (LTO_section_jump_functions
);
4519 encoder
= ob
->decl_state
->symtab_node_encoder
;
4521 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4522 lsei_next_function_in_partition (&lsei
))
4524 node
= lsei_cgraph_node (lsei
);
4525 if (node
->has_gimple_body_p ()
4526 && IPA_NODE_REF (node
) != NULL
)
4530 streamer_write_uhwi (ob
, count
);
4532 /* Process all of the functions. */
4533 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4534 lsei_next_function_in_partition (&lsei
))
4536 node
= lsei_cgraph_node (lsei
);
4537 if (node
->has_gimple_body_p ()
4538 && IPA_NODE_REF (node
) != NULL
)
4539 ipa_write_node_info (ob
, node
);
4541 streamer_write_char_stream (ob
->main_stream
, 0);
4542 produce_asm (ob
, NULL
);
4543 destroy_output_block (ob
);
4546 /* Read section in file FILE_DATA of length LEN with data DATA. */
4549 ipa_prop_read_section (struct lto_file_decl_data
*file_data
, const char *data
,
4552 const struct lto_function_header
*header
=
4553 (const struct lto_function_header
*) data
;
4554 const int cfg_offset
= sizeof (struct lto_function_header
);
4555 const int main_offset
= cfg_offset
+ header
->cfg_size
;
4556 const int string_offset
= main_offset
+ header
->main_size
;
4557 class data_in
*data_in
;
4561 lto_input_block
ib_main ((const char *) data
+ main_offset
,
4562 header
->main_size
, file_data
->mode_table
);
4565 lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
4566 header
->string_size
, vNULL
);
4567 count
= streamer_read_uhwi (&ib_main
);
4569 for (i
= 0; i
< count
; i
++)
4572 struct cgraph_node
*node
;
4573 lto_symtab_encoder_t encoder
;
4575 index
= streamer_read_uhwi (&ib_main
);
4576 encoder
= file_data
->symtab_node_encoder
;
4577 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
4579 gcc_assert (node
->definition
);
4580 ipa_read_node_info (&ib_main
, node
, data_in
);
4582 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
4584 lto_data_in_delete (data_in
);
4587 /* Read ipcp jump functions. */
4590 ipa_prop_read_jump_functions (void)
4592 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
4593 struct lto_file_decl_data
*file_data
;
4596 ipa_check_create_node_params ();
4597 ipa_check_create_edge_args ();
4598 ipa_register_cgraph_hooks ();
4600 while ((file_data
= file_data_vec
[j
++]))
4603 const char *data
= lto_get_section_data (file_data
, LTO_section_jump_functions
, NULL
, &len
);
4606 ipa_prop_read_section (file_data
, data
, len
);
4611 write_ipcp_transformation_info (output_block
*ob
, cgraph_node
*node
)
4614 unsigned int count
= 0;
4615 lto_symtab_encoder_t encoder
;
4616 struct ipa_agg_replacement_value
*aggvals
, *av
;
4618 aggvals
= ipa_get_agg_replacements_for_node (node
);
4619 encoder
= ob
->decl_state
->symtab_node_encoder
;
4620 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4621 streamer_write_uhwi (ob
, node_ref
);
4623 for (av
= aggvals
; av
; av
= av
->next
)
4625 streamer_write_uhwi (ob
, count
);
4627 for (av
= aggvals
; av
; av
= av
->next
)
4629 struct bitpack_d bp
;
4631 streamer_write_uhwi (ob
, av
->offset
);
4632 streamer_write_uhwi (ob
, av
->index
);
4633 stream_write_tree (ob
, av
->value
, true);
4635 bp
= bitpack_create (ob
->main_stream
);
4636 bp_pack_value (&bp
, av
->by_ref
, 1);
4637 streamer_write_bitpack (&bp
);
4640 ipcp_transformation
*ts
= ipcp_get_transformation_summary (node
);
4641 if (ts
&& vec_safe_length (ts
->m_vr
) > 0)
4643 count
= ts
->m_vr
->length ();
4644 streamer_write_uhwi (ob
, count
);
4645 for (unsigned i
= 0; i
< count
; ++i
)
4647 struct bitpack_d bp
;
4648 ipa_vr
*parm_vr
= &(*ts
->m_vr
)[i
];
4649 bp
= bitpack_create (ob
->main_stream
);
4650 bp_pack_value (&bp
, parm_vr
->known
, 1);
4651 streamer_write_bitpack (&bp
);
4654 streamer_write_enum (ob
->main_stream
, value_rang_type
,
4655 VR_LAST
, parm_vr
->type
);
4656 streamer_write_wide_int (ob
, parm_vr
->min
);
4657 streamer_write_wide_int (ob
, parm_vr
->max
);
4662 streamer_write_uhwi (ob
, 0);
4664 if (ts
&& vec_safe_length (ts
->bits
) > 0)
4666 count
= ts
->bits
->length ();
4667 streamer_write_uhwi (ob
, count
);
4669 for (unsigned i
= 0; i
< count
; ++i
)
4671 const ipa_bits
*bits_jfunc
= (*ts
->bits
)[i
];
4672 struct bitpack_d bp
= bitpack_create (ob
->main_stream
);
4673 bp_pack_value (&bp
, !!bits_jfunc
, 1);
4674 streamer_write_bitpack (&bp
);
4677 streamer_write_widest_int (ob
, bits_jfunc
->value
);
4678 streamer_write_widest_int (ob
, bits_jfunc
->mask
);
4683 streamer_write_uhwi (ob
, 0);
4686 /* Stream in the aggregate value replacement chain for NODE from IB. */
4689 read_ipcp_transformation_info (lto_input_block
*ib
, cgraph_node
*node
,
4692 struct ipa_agg_replacement_value
*aggvals
= NULL
;
4693 unsigned int count
, i
;
4695 count
= streamer_read_uhwi (ib
);
4696 for (i
= 0; i
<count
; i
++)
4698 struct ipa_agg_replacement_value
*av
;
4699 struct bitpack_d bp
;
4701 av
= ggc_alloc
<ipa_agg_replacement_value
> ();
4702 av
->offset
= streamer_read_uhwi (ib
);
4703 av
->index
= streamer_read_uhwi (ib
);
4704 av
->value
= stream_read_tree (ib
, data_in
);
4705 bp
= streamer_read_bitpack (ib
);
4706 av
->by_ref
= bp_unpack_value (&bp
, 1);
4710 ipa_set_node_agg_value_chain (node
, aggvals
);
4712 count
= streamer_read_uhwi (ib
);
4715 ipcp_transformation_initialize ();
4716 ipcp_transformation
*ts
= ipcp_transformation_sum
->get_create (node
);
4717 vec_safe_grow_cleared (ts
->m_vr
, count
);
4718 for (i
= 0; i
< count
; i
++)
4721 parm_vr
= &(*ts
->m_vr
)[i
];
4722 struct bitpack_d bp
;
4723 bp
= streamer_read_bitpack (ib
);
4724 parm_vr
->known
= bp_unpack_value (&bp
, 1);
4727 parm_vr
->type
= streamer_read_enum (ib
, value_range_kind
,
4729 parm_vr
->min
= streamer_read_wide_int (ib
);
4730 parm_vr
->max
= streamer_read_wide_int (ib
);
4734 count
= streamer_read_uhwi (ib
);
4737 ipcp_transformation_initialize ();
4738 ipcp_transformation
*ts
= ipcp_transformation_sum
->get_create (node
);
4739 vec_safe_grow_cleared (ts
->bits
, count
);
4741 for (i
= 0; i
< count
; i
++)
4743 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4744 bool known
= bp_unpack_value (&bp
, 1);
4748 = ipa_get_ipa_bits_for_value (streamer_read_widest_int (ib
),
4749 streamer_read_widest_int (ib
));
4750 (*ts
->bits
)[i
] = bits
;
4756 /* Write all aggregate replacement for nodes in set. */
4759 ipcp_write_transformation_summaries (void)
4761 struct cgraph_node
*node
;
4762 struct output_block
*ob
;
4763 unsigned int count
= 0;
4764 lto_symtab_encoder_iterator lsei
;
4765 lto_symtab_encoder_t encoder
;
4767 ob
= create_output_block (LTO_section_ipcp_transform
);
4768 encoder
= ob
->decl_state
->symtab_node_encoder
;
4770 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4771 lsei_next_function_in_partition (&lsei
))
4773 node
= lsei_cgraph_node (lsei
);
4774 if (node
->has_gimple_body_p ())
4778 streamer_write_uhwi (ob
, count
);
4780 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4781 lsei_next_function_in_partition (&lsei
))
4783 node
= lsei_cgraph_node (lsei
);
4784 if (node
->has_gimple_body_p ())
4785 write_ipcp_transformation_info (ob
, node
);
4787 streamer_write_char_stream (ob
->main_stream
, 0);
4788 produce_asm (ob
, NULL
);
4789 destroy_output_block (ob
);
4792 /* Read replacements section in file FILE_DATA of length LEN with data
4796 read_replacements_section (struct lto_file_decl_data
*file_data
,
4800 const struct lto_function_header
*header
=
4801 (const struct lto_function_header
*) data
;
4802 const int cfg_offset
= sizeof (struct lto_function_header
);
4803 const int main_offset
= cfg_offset
+ header
->cfg_size
;
4804 const int string_offset
= main_offset
+ header
->main_size
;
4805 class data_in
*data_in
;
4809 lto_input_block
ib_main ((const char *) data
+ main_offset
,
4810 header
->main_size
, file_data
->mode_table
);
4812 data_in
= lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
4813 header
->string_size
, vNULL
);
4814 count
= streamer_read_uhwi (&ib_main
);
4816 for (i
= 0; i
< count
; i
++)
4819 struct cgraph_node
*node
;
4820 lto_symtab_encoder_t encoder
;
4822 index
= streamer_read_uhwi (&ib_main
);
4823 encoder
= file_data
->symtab_node_encoder
;
4824 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
4826 gcc_assert (node
->definition
);
4827 read_ipcp_transformation_info (&ib_main
, node
, data_in
);
4829 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
4831 lto_data_in_delete (data_in
);
4834 /* Read IPA-CP aggregate replacements. */
4837 ipcp_read_transformation_summaries (void)
4839 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
4840 struct lto_file_decl_data
*file_data
;
4843 while ((file_data
= file_data_vec
[j
++]))
4846 const char *data
= lto_get_section_data (file_data
,
4847 LTO_section_ipcp_transform
,
4850 read_replacements_section (file_data
, data
, len
);
4854 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4858 adjust_agg_replacement_values (struct cgraph_node
*node
,
4859 struct ipa_agg_replacement_value
*aggval
)
4861 struct ipa_agg_replacement_value
*v
;
4862 int i
, c
= 0, d
= 0, *adj
;
4864 if (!node
->clone
.combined_args_to_skip
)
4867 for (v
= aggval
; v
; v
= v
->next
)
4869 gcc_assert (v
->index
>= 0);
4875 adj
= XALLOCAVEC (int, c
);
4876 for (i
= 0; i
< c
; i
++)
4877 if (bitmap_bit_p (node
->clone
.combined_args_to_skip
, i
))
4885 for (v
= aggval
; v
; v
= v
->next
)
4886 v
->index
= adj
[v
->index
];
4889 /* Dominator walker driving the ipcp modification phase. */
4891 class ipcp_modif_dom_walker
: public dom_walker
4894 ipcp_modif_dom_walker (struct ipa_func_body_info
*fbi
,
4895 vec
<ipa_param_descriptor
, va_gc
> *descs
,
4896 struct ipa_agg_replacement_value
*av
,
4898 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
), m_descriptors (descs
),
4899 m_aggval (av
), m_something_changed (sc
), m_cfg_changed (cc
) {}
4901 virtual edge
before_dom_children (basic_block
);
4904 struct ipa_func_body_info
*m_fbi
;
4905 vec
<ipa_param_descriptor
, va_gc
> *m_descriptors
;
4906 struct ipa_agg_replacement_value
*m_aggval
;
4907 bool *m_something_changed
, *m_cfg_changed
;
4911 ipcp_modif_dom_walker::before_dom_children (basic_block bb
)
4913 gimple_stmt_iterator gsi
;
4914 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4916 struct ipa_agg_replacement_value
*v
;
4917 gimple
*stmt
= gsi_stmt (gsi
);
4919 HOST_WIDE_INT offset
;
4924 if (!gimple_assign_load_p (stmt
))
4926 rhs
= gimple_assign_rhs1 (stmt
);
4927 if (!is_gimple_reg_type (TREE_TYPE (rhs
)))
4932 while (handled_component_p (t
))
4934 /* V_C_E can do things like convert an array of integers to one
4935 bigger integer and similar things we do not handle below. */
4936 if (TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
)
4941 t
= TREE_OPERAND (t
, 0);
4946 if (!ipa_load_from_parm_agg (m_fbi
, m_descriptors
, stmt
, rhs
, &index
,
4947 &offset
, &size
, &by_ref
))
4949 for (v
= m_aggval
; v
; v
= v
->next
)
4950 if (v
->index
== index
4951 && v
->offset
== offset
)
4954 || v
->by_ref
!= by_ref
4955 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v
->value
))),
4959 gcc_checking_assert (is_gimple_ip_invariant (v
->value
));
4960 if (!useless_type_conversion_p (TREE_TYPE (rhs
), TREE_TYPE (v
->value
)))
4962 if (fold_convertible_p (TREE_TYPE (rhs
), v
->value
))
4963 val
= fold_build1 (NOP_EXPR
, TREE_TYPE (rhs
), v
->value
);
4964 else if (TYPE_SIZE (TREE_TYPE (rhs
))
4965 == TYPE_SIZE (TREE_TYPE (v
->value
)))
4966 val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (rhs
), v
->value
);
4971 fprintf (dump_file
, " const ");
4972 print_generic_expr (dump_file
, v
->value
);
4973 fprintf (dump_file
, " can't be converted to type of ");
4974 print_generic_expr (dump_file
, rhs
);
4975 fprintf (dump_file
, "\n");
4983 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4985 fprintf (dump_file
, "Modifying stmt:\n ");
4986 print_gimple_stmt (dump_file
, stmt
, 0);
4988 gimple_assign_set_rhs_from_tree (&gsi
, val
);
4991 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4993 fprintf (dump_file
, "into:\n ");
4994 print_gimple_stmt (dump_file
, stmt
, 0);
4995 fprintf (dump_file
, "\n");
4998 *m_something_changed
= true;
4999 if (maybe_clean_eh_stmt (stmt
)
5000 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
5001 *m_cfg_changed
= true;
5006 /* Update bits info of formal parameters as described in
5007 ipcp_transformation. */
5010 ipcp_update_bits (struct cgraph_node
*node
)
5012 tree parm
= DECL_ARGUMENTS (node
->decl
);
5013 tree next_parm
= parm
;
5014 ipcp_transformation
*ts
= ipcp_get_transformation_summary (node
);
5016 if (!ts
|| vec_safe_length (ts
->bits
) == 0)
5019 vec
<ipa_bits
*, va_gc
> &bits
= *ts
->bits
;
5020 unsigned count
= bits
.length ();
5022 for (unsigned i
= 0; i
< count
; ++i
, parm
= next_parm
)
5024 if (node
->clone
.combined_args_to_skip
5025 && bitmap_bit_p (node
->clone
.combined_args_to_skip
, i
))
5028 gcc_checking_assert (parm
);
5029 next_parm
= DECL_CHAIN (parm
);
5032 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm
))
5033 || POINTER_TYPE_P (TREE_TYPE (parm
)))
5034 || !is_gimple_reg (parm
))
5037 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
), parm
);
5043 fprintf (dump_file
, "Adjusting mask for param %u to ", i
);
5044 print_hex (bits
[i
]->mask
, dump_file
);
5045 fprintf (dump_file
, "\n");
5048 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef
)))
5050 unsigned prec
= TYPE_PRECISION (TREE_TYPE (ddef
));
5051 signop sgn
= TYPE_SIGN (TREE_TYPE (ddef
));
5053 wide_int nonzero_bits
= wide_int::from (bits
[i
]->mask
, prec
, UNSIGNED
)
5054 | wide_int::from (bits
[i
]->value
, prec
, sgn
);
5055 set_nonzero_bits (ddef
, nonzero_bits
);
5059 unsigned tem
= bits
[i
]->mask
.to_uhwi ();
5060 unsigned HOST_WIDE_INT bitpos
= bits
[i
]->value
.to_uhwi ();
5061 unsigned align
= tem
& -tem
;
5062 unsigned misalign
= bitpos
& (align
- 1);
5067 fprintf (dump_file
, "Adjusting align: %u, misalign: %u\n", align
, misalign
);
5069 unsigned old_align
, old_misalign
;
5070 struct ptr_info_def
*pi
= get_ptr_info (ddef
);
5071 bool old_known
= get_ptr_info_alignment (pi
, &old_align
, &old_misalign
);
5074 && old_align
> align
)
5078 fprintf (dump_file
, "But alignment was already %u.\n", old_align
);
5079 if ((old_misalign
& (align
- 1)) != misalign
)
5080 fprintf (dump_file
, "old_misalign (%u) and misalign (%u) mismatch\n",
5081 old_misalign
, misalign
);
5087 && ((misalign
& (old_align
- 1)) != old_misalign
)
5089 fprintf (dump_file
, "old_misalign (%u) and misalign (%u) mismatch\n",
5090 old_misalign
, misalign
);
5092 set_ptr_info_alignment (pi
, align
, misalign
);
5098 /* Update value range of formal parameters as described in
5099 ipcp_transformation. */
5102 ipcp_update_vr (struct cgraph_node
*node
)
5104 tree fndecl
= node
->decl
;
5105 tree parm
= DECL_ARGUMENTS (fndecl
);
5106 tree next_parm
= parm
;
5107 ipcp_transformation
*ts
= ipcp_get_transformation_summary (node
);
5108 if (!ts
|| vec_safe_length (ts
->m_vr
) == 0)
5110 const vec
<ipa_vr
, va_gc
> &vr
= *ts
->m_vr
;
5111 unsigned count
= vr
.length ();
5113 for (unsigned i
= 0; i
< count
; ++i
, parm
= next_parm
)
5115 if (node
->clone
.combined_args_to_skip
5116 && bitmap_bit_p (node
->clone
.combined_args_to_skip
, i
))
5118 gcc_checking_assert (parm
);
5119 next_parm
= DECL_CHAIN (parm
);
5120 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
), parm
);
5122 if (!ddef
|| !is_gimple_reg (parm
))
5126 && (vr
[i
].type
== VR_RANGE
|| vr
[i
].type
== VR_ANTI_RANGE
))
5128 tree type
= TREE_TYPE (ddef
);
5129 unsigned prec
= TYPE_PRECISION (type
);
5130 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef
)))
5134 fprintf (dump_file
, "Setting value range of param %u ", i
);
5135 fprintf (dump_file
, "%s[",
5136 (vr
[i
].type
== VR_ANTI_RANGE
) ? "~" : "");
5137 print_decs (vr
[i
].min
, dump_file
);
5138 fprintf (dump_file
, ", ");
5139 print_decs (vr
[i
].max
, dump_file
);
5140 fprintf (dump_file
, "]\n");
5142 set_range_info (ddef
, vr
[i
].type
,
5143 wide_int_storage::from (vr
[i
].min
, prec
,
5145 wide_int_storage::from (vr
[i
].max
, prec
,
5148 else if (POINTER_TYPE_P (TREE_TYPE (ddef
))
5149 && vr
[i
].type
== VR_ANTI_RANGE
5150 && wi::eq_p (vr
[i
].min
, 0)
5151 && wi::eq_p (vr
[i
].max
, 0))
5154 fprintf (dump_file
, "Setting nonnull for %u\n", i
);
5155 set_ptr_nonnull (ddef
);
5161 /* IPCP transformation phase doing propagation of aggregate values. */
5164 ipcp_transform_function (struct cgraph_node
*node
)
5166 vec
<ipa_param_descriptor
, va_gc
> *descriptors
= NULL
;
5167 struct ipa_func_body_info fbi
;
5168 struct ipa_agg_replacement_value
*aggval
;
5170 bool cfg_changed
= false, something_changed
= false;
5172 gcc_checking_assert (cfun
);
5173 gcc_checking_assert (current_function_decl
);
5176 fprintf (dump_file
, "Modification phase of node %s\n",
5177 node
->dump_name ());
5179 ipcp_update_bits (node
);
5180 ipcp_update_vr (node
);
5181 aggval
= ipa_get_agg_replacements_for_node (node
);
5184 param_count
= count_formal_params (node
->decl
);
5185 if (param_count
== 0)
5187 adjust_agg_replacement_values (node
, aggval
);
5189 ipa_dump_agg_replacement_values (dump_file
, aggval
);
5193 fbi
.bb_infos
= vNULL
;
5194 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
));
5195 fbi
.param_count
= param_count
;
5196 fbi
.aa_walk_budget
= PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS
);
5198 vec_safe_grow_cleared (descriptors
, param_count
);
5199 ipa_populate_param_decls (node
, *descriptors
);
5200 calculate_dominance_info (CDI_DOMINATORS
);
5201 ipcp_modif_dom_walker (&fbi
, descriptors
, aggval
, &something_changed
,
5202 &cfg_changed
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5205 struct ipa_bb_info
*bi
;
5206 FOR_EACH_VEC_ELT (fbi
.bb_infos
, i
, bi
)
5207 free_ipa_bb_info (bi
);
5208 fbi
.bb_infos
.release ();
5209 free_dominance_info (CDI_DOMINATORS
);
5211 ipcp_transformation
*s
= ipcp_transformation_sum
->get (node
);
5212 s
->agg_values
= NULL
;
5216 vec_free (descriptors
);
5218 if (!something_changed
)
5222 delete_unreachable_blocks_update_callgraph (node
, false);
5224 return TODO_update_ssa_only_virtuals
;
5227 #include "gt-ipa-prop.h"