1 /* Interprocedural analyses.
2 Copyright (C) 2005-2021 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
30 #include "tree-streamer.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
50 #include "ipa-utils.h"
54 #include "tree-cfgcleanup.h"
56 #include "symtab-clones.h"
57 #include "attr-fnspec.h"
59 /* Function summary where the parameter infos are actually stored. */
60 ipa_node_params_t
*ipa_node_params_sum
= NULL
;
62 function_summary
<ipcp_transformation
*> *ipcp_transformation_sum
= NULL
;
64 /* Edge summary for IPA-CP edge information. */
65 ipa_edge_args_sum_t
*ipa_edge_args_sum
;
67 /* Traits for a hash table for reusing already existing ipa_bits. */
69 struct ipa_bit_ggc_hash_traits
: public ggc_cache_remove
<ipa_bits
*>
71 typedef ipa_bits
*value_type
;
72 typedef ipa_bits
*compare_type
;
74 hash (const ipa_bits
*p
)
76 hashval_t t
= (hashval_t
) p
->value
.to_shwi ();
77 return iterative_hash_host_wide_int (p
->mask
.to_shwi (), t
);
80 equal (const ipa_bits
*a
, const ipa_bits
*b
)
82 return a
->value
== b
->value
&& a
->mask
== b
->mask
;
84 static const bool empty_zero_p
= true;
86 mark_empty (ipa_bits
*&p
)
91 is_empty (const ipa_bits
*p
)
96 is_deleted (const ipa_bits
*p
)
98 return p
== reinterpret_cast<const ipa_bits
*> (1);
101 mark_deleted (ipa_bits
*&p
)
103 p
= reinterpret_cast<ipa_bits
*> (1);
107 /* Hash table for avoid repeated allocations of equal ipa_bits. */
108 static GTY ((cache
)) hash_table
<ipa_bit_ggc_hash_traits
> *ipa_bits_hash_table
;
110 /* Traits for a hash table for reusing value_ranges used for IPA. Note that
111 the equiv bitmap is not hashed and is expected to be NULL. */
113 struct ipa_vr_ggc_hash_traits
: public ggc_cache_remove
<value_range
*>
115 typedef value_range
*value_type
;
116 typedef value_range
*compare_type
;
118 hash (const value_range
*p
)
120 inchash::hash
hstate (p
->kind ());
121 inchash::add_expr (p
->min (), hstate
);
122 inchash::add_expr (p
->max (), hstate
);
123 return hstate
.end ();
126 equal (const value_range
*a
, const value_range
*b
)
128 return (a
->equal_p (*b
)
129 && types_compatible_p (a
->type (), b
->type ()));
131 static const bool empty_zero_p
= true;
133 mark_empty (value_range
*&p
)
138 is_empty (const value_range
*p
)
143 is_deleted (const value_range
*p
)
145 return p
== reinterpret_cast<const value_range
*> (1);
148 mark_deleted (value_range
*&p
)
150 p
= reinterpret_cast<value_range
*> (1);
154 /* Hash table for avoid repeated allocations of equal value_ranges. */
155 static GTY ((cache
)) hash_table
<ipa_vr_ggc_hash_traits
> *ipa_vr_hash_table
;
157 /* Holders of ipa cgraph hooks: */
158 static struct cgraph_node_hook_list
*function_insertion_hook_holder
;
160 /* Description of a reference to an IPA constant. */
161 struct ipa_cst_ref_desc
163 /* Edge that corresponds to the statement which took the reference. */
164 struct cgraph_edge
*cs
;
165 /* Linked list of duplicates created when call graph edges are cloned. */
166 struct ipa_cst_ref_desc
*next_duplicate
;
167 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
168 if out of control. */
172 /* Allocation pool for reference descriptions. */
174 static object_allocator
<ipa_cst_ref_desc
> ipa_refdesc_pool
175 ("IPA-PROP ref descriptions");
177 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
178 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
181 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node
*node
)
183 tree fs_opts
= DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node
->decl
);
187 return !opt_for_fn (node
->decl
, optimize
) || !opt_for_fn (node
->decl
, flag_ipa_cp
);
190 /* Return index of the formal whose tree is PTREE in function which corresponds
194 ipa_get_param_decl_index_1 (vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
199 count
= vec_safe_length (descriptors
);
200 for (i
= 0; i
< count
; i
++)
201 if ((*descriptors
)[i
].decl_or_type
== ptree
)
207 /* Return index of the formal whose tree is PTREE in function which corresponds
211 ipa_get_param_decl_index (class ipa_node_params
*info
, tree ptree
)
213 return ipa_get_param_decl_index_1 (info
->descriptors
, ptree
);
216 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
220 ipa_populate_param_decls (struct cgraph_node
*node
,
221 vec
<ipa_param_descriptor
, va_gc
> &descriptors
)
229 gcc_assert (gimple_has_body_p (fndecl
));
230 fnargs
= DECL_ARGUMENTS (fndecl
);
232 for (parm
= fnargs
; parm
; parm
= DECL_CHAIN (parm
))
234 descriptors
[param_num
].decl_or_type
= parm
;
235 unsigned int cost
= estimate_move_cost (TREE_TYPE (parm
), true);
236 descriptors
[param_num
].move_cost
= cost
;
237 /* Watch overflow, move_cost is a bitfield. */
238 gcc_checking_assert (cost
== descriptors
[param_num
].move_cost
);
243 /* Return how many formal parameters FNDECL has. */
246 count_formal_params (tree fndecl
)
250 gcc_assert (gimple_has_body_p (fndecl
));
252 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
258 /* Return the declaration of Ith formal parameter of the function corresponding
259 to INFO. Note there is no setter function as this array is built just once
260 using ipa_initialize_node_params. */
263 ipa_dump_param (FILE *file
, class ipa_node_params
*info
, int i
)
265 fprintf (file
, "param #%i", i
);
266 if ((*info
->descriptors
)[i
].decl_or_type
)
269 print_generic_expr (file
, (*info
->descriptors
)[i
].decl_or_type
);
273 /* If necessary, allocate vector of parameter descriptors in info of NODE.
274 Return true if they were allocated, false if not. */
277 ipa_alloc_node_params (struct cgraph_node
*node
, int param_count
)
279 class ipa_node_params
*info
= IPA_NODE_REF_GET_CREATE (node
);
281 if (!info
->descriptors
&& param_count
)
283 vec_safe_grow_cleared (info
->descriptors
, param_count
, true);
290 /* Initialize the ipa_node_params structure associated with NODE by counting
291 the function parameters, creating the descriptors and populating their
295 ipa_initialize_node_params (struct cgraph_node
*node
)
297 class ipa_node_params
*info
= IPA_NODE_REF_GET_CREATE (node
);
299 if (!info
->descriptors
300 && ipa_alloc_node_params (node
, count_formal_params (node
->decl
)))
301 ipa_populate_param_decls (node
, *info
->descriptors
);
304 /* Print the jump functions associated with call graph edge CS to file F. */
307 ipa_print_node_jump_functions_for_edge (FILE *f
, struct cgraph_edge
*cs
)
311 count
= ipa_get_cs_argument_count (IPA_EDGE_REF (cs
));
312 for (i
= 0; i
< count
; i
++)
314 struct ipa_jump_func
*jump_func
;
315 enum jump_func_type type
;
317 jump_func
= ipa_get_ith_jump_func (IPA_EDGE_REF (cs
), i
);
318 type
= jump_func
->type
;
320 fprintf (f
, " param %d: ", i
);
321 if (type
== IPA_JF_UNKNOWN
)
322 fprintf (f
, "UNKNOWN\n");
323 else if (type
== IPA_JF_CONST
)
325 tree val
= jump_func
->value
.constant
.value
;
326 fprintf (f
, "CONST: ");
327 print_generic_expr (f
, val
);
328 if (TREE_CODE (val
) == ADDR_EXPR
329 && TREE_CODE (TREE_OPERAND (val
, 0)) == CONST_DECL
)
332 print_generic_expr (f
, DECL_INITIAL (TREE_OPERAND (val
, 0)));
336 else if (type
== IPA_JF_PASS_THROUGH
)
338 fprintf (f
, "PASS THROUGH: ");
339 fprintf (f
, "%d, op %s",
340 jump_func
->value
.pass_through
.formal_id
,
341 get_tree_code_name(jump_func
->value
.pass_through
.operation
));
342 if (jump_func
->value
.pass_through
.operation
!= NOP_EXPR
)
345 print_generic_expr (f
, jump_func
->value
.pass_through
.operand
);
347 if (jump_func
->value
.pass_through
.agg_preserved
)
348 fprintf (f
, ", agg_preserved");
351 else if (type
== IPA_JF_ANCESTOR
)
353 fprintf (f
, "ANCESTOR: ");
354 fprintf (f
, "%d, offset " HOST_WIDE_INT_PRINT_DEC
,
355 jump_func
->value
.ancestor
.formal_id
,
356 jump_func
->value
.ancestor
.offset
);
357 if (jump_func
->value
.ancestor
.agg_preserved
)
358 fprintf (f
, ", agg_preserved");
362 if (jump_func
->agg
.items
)
364 struct ipa_agg_jf_item
*item
;
367 fprintf (f
, " Aggregate passed by %s:\n",
368 jump_func
->agg
.by_ref
? "reference" : "value");
369 FOR_EACH_VEC_ELT (*jump_func
->agg
.items
, j
, item
)
371 fprintf (f
, " offset: " HOST_WIDE_INT_PRINT_DEC
", ",
373 fprintf (f
, "type: ");
374 print_generic_expr (f
, item
->type
);
376 if (item
->jftype
== IPA_JF_PASS_THROUGH
)
377 fprintf (f
, "PASS THROUGH: %d,",
378 item
->value
.pass_through
.formal_id
);
379 else if (item
->jftype
== IPA_JF_LOAD_AGG
)
381 fprintf (f
, "LOAD AGG: %d",
382 item
->value
.pass_through
.formal_id
);
383 fprintf (f
, " [offset: " HOST_WIDE_INT_PRINT_DEC
", by %s],",
384 item
->value
.load_agg
.offset
,
385 item
->value
.load_agg
.by_ref
? "reference"
389 if (item
->jftype
== IPA_JF_PASS_THROUGH
390 || item
->jftype
== IPA_JF_LOAD_AGG
)
392 fprintf (f
, " op %s",
393 get_tree_code_name (item
->value
.pass_through
.operation
));
394 if (item
->value
.pass_through
.operation
!= NOP_EXPR
)
397 print_generic_expr (f
, item
->value
.pass_through
.operand
);
400 else if (item
->jftype
== IPA_JF_CONST
)
402 fprintf (f
, "CONST: ");
403 print_generic_expr (f
, item
->value
.constant
);
405 else if (item
->jftype
== IPA_JF_UNKNOWN
)
406 fprintf (f
, "UNKNOWN: " HOST_WIDE_INT_PRINT_DEC
" bits",
407 tree_to_uhwi (TYPE_SIZE (item
->type
)));
412 class ipa_polymorphic_call_context
*ctx
413 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs
), i
);
414 if (ctx
&& !ctx
->useless_p ())
416 fprintf (f
, " Context: ");
417 ctx
->dump (dump_file
);
422 fprintf (f
, " value: ");
423 print_hex (jump_func
->bits
->value
, f
);
424 fprintf (f
, ", mask: ");
425 print_hex (jump_func
->bits
->mask
, f
);
429 fprintf (f
, " Unknown bits\n");
435 (jump_func
->m_vr
->kind () == VR_ANTI_RANGE
) ? "~" : "");
436 print_decs (wi::to_wide (jump_func
->m_vr
->min ()), f
);
438 print_decs (wi::to_wide (jump_func
->m_vr
->max ()), f
);
442 fprintf (f
, " Unknown VR\n");
447 /* Print the jump functions of all arguments on all call graph edges going from
451 ipa_print_node_jump_functions (FILE *f
, struct cgraph_node
*node
)
453 struct cgraph_edge
*cs
;
455 fprintf (f
, " Jump functions of caller %s:\n", node
->dump_name ());
456 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
459 fprintf (f
, " callsite %s -> %s : \n",
461 cs
->callee
->dump_name ());
462 if (!ipa_edge_args_info_available_for_edge_p (cs
))
463 fprintf (f
, " no arg info\n");
465 ipa_print_node_jump_functions_for_edge (f
, cs
);
468 for (cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
470 class cgraph_indirect_call_info
*ii
;
472 ii
= cs
->indirect_info
;
473 if (ii
->agg_contents
)
474 fprintf (f
, " indirect %s callsite, calling param %i, "
475 "offset " HOST_WIDE_INT_PRINT_DEC
", %s",
476 ii
->member_ptr
? "member ptr" : "aggregate",
477 ii
->param_index
, ii
->offset
,
478 ii
->by_ref
? "by reference" : "by_value");
480 fprintf (f
, " indirect %s callsite, calling param %i, "
481 "offset " HOST_WIDE_INT_PRINT_DEC
,
482 ii
->polymorphic
? "polymorphic" : "simple", ii
->param_index
,
487 fprintf (f
, ", for stmt ");
488 print_gimple_stmt (f
, cs
->call_stmt
, 0, TDF_SLIM
);
493 ii
->context
.dump (f
);
494 if (!ipa_edge_args_info_available_for_edge_p (cs
))
495 fprintf (f
, " no arg info\n");
497 ipa_print_node_jump_functions_for_edge (f
, cs
);
501 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
504 ipa_print_all_jump_functions (FILE *f
)
506 struct cgraph_node
*node
;
508 fprintf (f
, "\nJump functions:\n");
509 FOR_EACH_FUNCTION (node
)
511 ipa_print_node_jump_functions (f
, node
);
515 /* Set jfunc to be a know-really nothing jump function. */
518 ipa_set_jf_unknown (struct ipa_jump_func
*jfunc
)
520 jfunc
->type
= IPA_JF_UNKNOWN
;
523 /* Set JFUNC to be a copy of another jmp (to be used by jump function
524 combination code). The two functions will share their rdesc. */
527 ipa_set_jf_cst_copy (struct ipa_jump_func
*dst
,
528 struct ipa_jump_func
*src
)
531 gcc_checking_assert (src
->type
== IPA_JF_CONST
);
532 dst
->type
= IPA_JF_CONST
;
533 dst
->value
.constant
= src
->value
.constant
;
536 /* Set JFUNC to be a constant jmp function. */
539 ipa_set_jf_constant (struct ipa_jump_func
*jfunc
, tree constant
,
540 struct cgraph_edge
*cs
)
542 jfunc
->type
= IPA_JF_CONST
;
543 jfunc
->value
.constant
.value
= unshare_expr_without_location (constant
);
545 if (TREE_CODE (constant
) == ADDR_EXPR
546 && TREE_CODE (TREE_OPERAND (constant
, 0)) == FUNCTION_DECL
)
548 struct ipa_cst_ref_desc
*rdesc
;
550 rdesc
= ipa_refdesc_pool
.allocate ();
552 rdesc
->next_duplicate
= NULL
;
554 jfunc
->value
.constant
.rdesc
= rdesc
;
557 jfunc
->value
.constant
.rdesc
= NULL
;
560 /* Set JFUNC to be a simple pass-through jump function. */
562 ipa_set_jf_simple_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
565 jfunc
->type
= IPA_JF_PASS_THROUGH
;
566 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
567 jfunc
->value
.pass_through
.formal_id
= formal_id
;
568 jfunc
->value
.pass_through
.operation
= NOP_EXPR
;
569 jfunc
->value
.pass_through
.agg_preserved
= agg_preserved
;
572 /* Set JFUNC to be an unary pass through jump function. */
575 ipa_set_jf_unary_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
576 enum tree_code operation
)
578 jfunc
->type
= IPA_JF_PASS_THROUGH
;
579 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
580 jfunc
->value
.pass_through
.formal_id
= formal_id
;
581 jfunc
->value
.pass_through
.operation
= operation
;
582 jfunc
->value
.pass_through
.agg_preserved
= false;
584 /* Set JFUNC to be an arithmetic pass through jump function. */
587 ipa_set_jf_arith_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
588 tree operand
, enum tree_code operation
)
590 jfunc
->type
= IPA_JF_PASS_THROUGH
;
591 jfunc
->value
.pass_through
.operand
= unshare_expr_without_location (operand
);
592 jfunc
->value
.pass_through
.formal_id
= formal_id
;
593 jfunc
->value
.pass_through
.operation
= operation
;
594 jfunc
->value
.pass_through
.agg_preserved
= false;
597 /* Set JFUNC to be an ancestor jump function. */
600 ipa_set_ancestor_jf (struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
,
601 int formal_id
, bool agg_preserved
)
603 jfunc
->type
= IPA_JF_ANCESTOR
;
604 jfunc
->value
.ancestor
.formal_id
= formal_id
;
605 jfunc
->value
.ancestor
.offset
= offset
;
606 jfunc
->value
.ancestor
.agg_preserved
= agg_preserved
;
609 /* Get IPA BB information about the given BB. FBI is the context of analyzis
610 of this function body. */
612 static struct ipa_bb_info
*
613 ipa_get_bb_info (struct ipa_func_body_info
*fbi
, basic_block bb
)
615 gcc_checking_assert (fbi
);
616 return &fbi
->bb_infos
[bb
->index
];
619 /* Structure to be passed in between detect_type_change and
620 check_stmt_for_type_change. */
622 struct prop_type_change_info
624 /* Offset into the object where there is the virtual method pointer we are
626 HOST_WIDE_INT offset
;
627 /* The declaration or SSA_NAME pointer of the base that we are checking for
630 /* Set to true if dynamic type change has been detected. */
631 bool type_maybe_changed
;
634 /* Return true if STMT can modify a virtual method table pointer.
636 This function makes special assumptions about both constructors and
637 destructors which are all the functions that are allowed to alter the VMT
638 pointers. It assumes that destructors begin with assignment into all VMT
639 pointers and that constructors essentially look in the following way:
641 1) The very first thing they do is that they call constructors of ancestor
642 sub-objects that have them.
644 2) Then VMT pointers of this and all its ancestors is set to new values
645 corresponding to the type corresponding to the constructor.
647 3) Only afterwards, other stuff such as constructor of member sub-objects
648 and the code written by the user is run. Only this may include calling
649 virtual functions, directly or indirectly.
651 There is no way to call a constructor of an ancestor sub-object in any
654 This means that we do not have to care whether constructors get the correct
655 type information because they will always change it (in fact, if we define
656 the type to be given by the VMT pointer, it is undefined).
658 The most important fact to derive from the above is that if, for some
659 statement in the section 3, we try to detect whether the dynamic type has
660 changed, we can safely ignore all calls as we examine the function body
661 backwards until we reach statements in section 2 because these calls cannot
662 be ancestor constructors or destructors (if the input is not bogus) and so
663 do not change the dynamic type (this holds true only for automatically
664 allocated objects but at the moment we devirtualize only these). We then
665 must detect that statements in section 2 change the dynamic type and can try
666 to derive the new type. That is enough and we can stop, we will never see
667 the calls into constructors of sub-objects in this code. Therefore we can
668 safely ignore all call statements that we traverse.
672 stmt_may_be_vtbl_ptr_store (gimple
*stmt
)
674 if (is_gimple_call (stmt
))
676 if (gimple_clobber_p (stmt
))
678 else if (is_gimple_assign (stmt
))
680 tree lhs
= gimple_assign_lhs (stmt
);
682 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs
)))
684 if (flag_strict_aliasing
685 && !POINTER_TYPE_P (TREE_TYPE (lhs
)))
688 if (TREE_CODE (lhs
) == COMPONENT_REF
689 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
691 /* In the future we might want to use get_ref_base_and_extent to find
692 if there is a field corresponding to the offset and if so, proceed
693 almost like if it was a component ref. */
699 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
700 to check whether a particular statement may modify the virtual table
701 pointerIt stores its result into DATA, which points to a
702 prop_type_change_info structure. */
705 check_stmt_for_type_change (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef
, void *data
)
707 gimple
*stmt
= SSA_NAME_DEF_STMT (vdef
);
708 struct prop_type_change_info
*tci
= (struct prop_type_change_info
*) data
;
710 if (stmt_may_be_vtbl_ptr_store (stmt
))
712 tci
->type_maybe_changed
= true;
719 /* See if ARG is PARAM_DECl describing instance passed by pointer
720 or reference in FUNCTION. Return false if the dynamic type may change
721 in between beggining of the function until CALL is invoked.
723 Generally functions are not allowed to change type of such instances,
724 but they call destructors. We assume that methods cannot destroy the THIS
725 pointer. Also as a special cases, constructor and destructors may change
726 type of the THIS pointer. */
729 param_type_may_change_p (tree function
, tree arg
, gimple
*call
)
731 /* Pure functions cannot do any changes on the dynamic type;
732 that require writting to memory. */
733 if (flags_from_decl_or_type (function
) & (ECF_PURE
| ECF_CONST
))
735 /* We need to check if we are within inlined consturctor
736 or destructor (ideally we would have way to check that the
737 inline cdtor is actually working on ARG, but we don't have
738 easy tie on this, so punt on all non-pure cdtors.
739 We may also record the types of cdtors and once we know type
740 of the instance match them.
742 Also code unification optimizations may merge calls from
743 different blocks making return values unreliable. So
744 do nothing during late optimization. */
745 if (DECL_STRUCT_FUNCTION (function
)->after_inlining
)
747 if (TREE_CODE (arg
) == SSA_NAME
748 && SSA_NAME_IS_DEFAULT_DEF (arg
)
749 && TREE_CODE (SSA_NAME_VAR (arg
)) == PARM_DECL
)
751 /* Normal (non-THIS) argument. */
752 if ((SSA_NAME_VAR (arg
) != DECL_ARGUMENTS (function
)
753 || TREE_CODE (TREE_TYPE (function
)) != METHOD_TYPE
)
754 /* THIS pointer of an method - here we want to watch constructors
755 and destructors as those definitely may change the dynamic
757 || (TREE_CODE (TREE_TYPE (function
)) == METHOD_TYPE
758 && !DECL_CXX_CONSTRUCTOR_P (function
)
759 && !DECL_CXX_DESTRUCTOR_P (function
)
760 && (SSA_NAME_VAR (arg
) == DECL_ARGUMENTS (function
))))
762 /* Walk the inline stack and watch out for ctors/dtors. */
763 for (tree block
= gimple_block (call
); block
&& TREE_CODE (block
) == BLOCK
;
764 block
= BLOCK_SUPERCONTEXT (block
))
765 if (inlined_polymorphic_ctor_dtor_block_p (block
, false))
773 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
774 callsite CALL) by looking for assignments to its virtual table pointer. If
775 it is, return true. ARG is the object itself (not a pointer
776 to it, unless dereferenced). BASE is the base of the memory access as
777 returned by get_ref_base_and_extent, as is the offset.
779 This is helper function for detect_type_change and detect_type_change_ssa
780 that does the heavy work which is usually unnecesary. */
783 detect_type_change_from_memory_writes (ipa_func_body_info
*fbi
, tree arg
,
784 tree base
, tree comp_type
, gcall
*call
,
785 HOST_WIDE_INT offset
)
787 struct prop_type_change_info tci
;
790 gcc_checking_assert (DECL_P (arg
)
791 || TREE_CODE (arg
) == MEM_REF
792 || handled_component_p (arg
));
794 comp_type
= TYPE_MAIN_VARIANT (comp_type
);
796 /* Const calls cannot call virtual methods through VMT and so type changes do
798 if (!flag_devirtualize
|| !gimple_vuse (call
)
799 /* Be sure expected_type is polymorphic. */
801 || TREE_CODE (comp_type
) != RECORD_TYPE
802 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))
803 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))))
806 ao_ref_init (&ao
, arg
);
809 ao
.size
= POINTER_SIZE
;
810 ao
.max_size
= ao
.size
;
813 tci
.object
= get_base_address (arg
);
814 tci
.type_maybe_changed
= false;
817 = walk_aliased_vdefs (&ao
, gimple_vuse (call
), check_stmt_for_type_change
,
818 &tci
, NULL
, NULL
, fbi
->aa_walk_budget
+ 1);
820 if (walked
>= 0 && !tci
.type_maybe_changed
)
826 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
827 If it is, return true. ARG is the object itself (not a pointer
828 to it, unless dereferenced). BASE is the base of the memory access as
829 returned by get_ref_base_and_extent, as is the offset. */
832 detect_type_change (ipa_func_body_info
*fbi
, tree arg
, tree base
,
833 tree comp_type
, gcall
*call
,
834 HOST_WIDE_INT offset
)
836 if (!flag_devirtualize
)
839 if (TREE_CODE (base
) == MEM_REF
840 && !param_type_may_change_p (current_function_decl
,
841 TREE_OPERAND (base
, 0),
844 return detect_type_change_from_memory_writes (fbi
, arg
, base
, comp_type
,
848 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
849 SSA name (its dereference will become the base and the offset is assumed to
853 detect_type_change_ssa (ipa_func_body_info
*fbi
, tree arg
, tree comp_type
,
856 gcc_checking_assert (TREE_CODE (arg
) == SSA_NAME
);
857 if (!flag_devirtualize
858 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
861 if (!param_type_may_change_p (current_function_decl
, arg
, call
))
864 arg
= build2 (MEM_REF
, ptr_type_node
, arg
,
865 build_int_cst (ptr_type_node
, 0));
867 return detect_type_change_from_memory_writes (fbi
, arg
, arg
, comp_type
,
871 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
872 boolean variable pointed to by DATA. */
875 mark_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
878 bool *b
= (bool *) data
;
883 /* Find the nearest valid aa status for parameter specified by INDEX that
886 static struct ipa_param_aa_status
*
887 find_dominating_aa_status (struct ipa_func_body_info
*fbi
, basic_block bb
,
892 bb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
895 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
896 if (!bi
->param_aa_statuses
.is_empty ()
897 && bi
->param_aa_statuses
[index
].valid
)
898 return &bi
->param_aa_statuses
[index
];
902 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
903 structures and/or intialize the result with a dominating description as
906 static struct ipa_param_aa_status
*
907 parm_bb_aa_status_for_bb (struct ipa_func_body_info
*fbi
, basic_block bb
,
910 gcc_checking_assert (fbi
);
911 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
912 if (bi
->param_aa_statuses
.is_empty ())
913 bi
->param_aa_statuses
.safe_grow_cleared (fbi
->param_count
, true);
914 struct ipa_param_aa_status
*paa
= &bi
->param_aa_statuses
[index
];
917 gcc_checking_assert (!paa
->parm_modified
918 && !paa
->ref_modified
919 && !paa
->pt_modified
);
920 struct ipa_param_aa_status
*dom_paa
;
921 dom_paa
= find_dominating_aa_status (fbi
, bb
, index
);
931 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
932 a value known not to be modified in this function before reaching the
933 statement STMT. FBI holds information about the function we have so far
934 gathered but do not survive the summary building stage. */
937 parm_preserved_before_stmt_p (struct ipa_func_body_info
*fbi
, int index
,
938 gimple
*stmt
, tree parm_load
)
940 struct ipa_param_aa_status
*paa
;
941 bool modified
= false;
944 tree base
= get_base_address (parm_load
);
945 gcc_assert (TREE_CODE (base
) == PARM_DECL
);
946 if (TREE_READONLY (base
))
949 gcc_checking_assert (fbi
);
950 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
951 if (paa
->parm_modified
)
954 gcc_checking_assert (gimple_vuse (stmt
) != NULL_TREE
);
955 ao_ref_init (&refd
, parm_load
);
956 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
957 &modified
, NULL
, NULL
,
958 fbi
->aa_walk_budget
+ 1);
963 fbi
->aa_walk_budget
= 0;
966 fbi
->aa_walk_budget
-= walked
;
968 paa
->parm_modified
= true;
972 /* If STMT is an assignment that loads a value from an parameter declaration,
973 return the index of the parameter in ipa_node_params which has not been
974 modified. Otherwise return -1. */
977 load_from_unmodified_param (struct ipa_func_body_info
*fbi
,
978 vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
984 if (!gimple_assign_single_p (stmt
))
987 op1
= gimple_assign_rhs1 (stmt
);
988 if (TREE_CODE (op1
) != PARM_DECL
)
991 index
= ipa_get_param_decl_index_1 (descriptors
, op1
);
993 || !parm_preserved_before_stmt_p (fbi
, index
, stmt
, op1
))
999 /* Return true if memory reference REF (which must be a load through parameter
1000 with INDEX) loads data that are known to be unmodified in this function
1001 before reaching statement STMT. */
1004 parm_ref_data_preserved_p (struct ipa_func_body_info
*fbi
,
1005 int index
, gimple
*stmt
, tree ref
)
1007 struct ipa_param_aa_status
*paa
;
1008 bool modified
= false;
1011 gcc_checking_assert (fbi
);
1012 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
1013 if (paa
->ref_modified
)
1016 gcc_checking_assert (gimple_vuse (stmt
));
1017 ao_ref_init (&refd
, ref
);
1018 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
1019 &modified
, NULL
, NULL
,
1020 fbi
->aa_walk_budget
+ 1);
1024 fbi
->aa_walk_budget
= 0;
1027 fbi
->aa_walk_budget
-= walked
;
1029 paa
->ref_modified
= true;
1033 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1034 is known to be unmodified in this function before reaching call statement
1035 CALL into which it is passed. FBI describes the function body. */
1038 parm_ref_data_pass_through_p (struct ipa_func_body_info
*fbi
, int index
,
1039 gimple
*call
, tree parm
)
1041 bool modified
= false;
1044 /* It's unnecessary to calculate anything about memory contnets for a const
1045 function because it is not goin to use it. But do not cache the result
1046 either. Also, no such calculations for non-pointers. */
1047 if (!gimple_vuse (call
)
1048 || !POINTER_TYPE_P (TREE_TYPE (parm
)))
1051 struct ipa_param_aa_status
*paa
= parm_bb_aa_status_for_bb (fbi
,
1054 if (paa
->pt_modified
)
1057 ao_ref_init_from_ptr_and_size (&refd
, parm
, NULL_TREE
);
1058 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (call
), mark_modified
,
1059 &modified
, NULL
, NULL
,
1060 fbi
->aa_walk_budget
+ 1);
1063 fbi
->aa_walk_budget
= 0;
1067 fbi
->aa_walk_budget
-= walked
;
1069 paa
->pt_modified
= true;
1073 /* Return true if we can prove that OP is a memory reference loading
1074 data from an aggregate passed as a parameter.
1076 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1077 false if it cannot prove that the value has not been modified before the
1078 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1079 if it cannot prove the value has not been modified, in that case it will
1080 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1082 INFO and PARMS_AINFO describe parameters of the current function (but the
1083 latter can be NULL), STMT is the load statement. If function returns true,
1084 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1085 within the aggregate and whether it is a load from a value passed by
1086 reference respectively. */
1089 ipa_load_from_parm_agg (struct ipa_func_body_info
*fbi
,
1090 vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
1091 gimple
*stmt
, tree op
, int *index_p
,
1092 HOST_WIDE_INT
*offset_p
, poly_int64
*size_p
,
1093 bool *by_ref_p
, bool *guaranteed_unmodified
)
1098 tree base
= get_ref_base_and_extent_hwi (op
, offset_p
, &size
, &reverse
);
1105 int index
= ipa_get_param_decl_index_1 (descriptors
, base
);
1107 && parm_preserved_before_stmt_p (fbi
, index
, stmt
, op
))
1113 if (guaranteed_unmodified
)
1114 *guaranteed_unmodified
= true;
1120 if (TREE_CODE (base
) != MEM_REF
1121 || TREE_CODE (TREE_OPERAND (base
, 0)) != SSA_NAME
1122 || !integer_zerop (TREE_OPERAND (base
, 1)))
1125 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base
, 0)))
1127 tree parm
= SSA_NAME_VAR (TREE_OPERAND (base
, 0));
1128 index
= ipa_get_param_decl_index_1 (descriptors
, parm
);
1132 /* This branch catches situations where a pointer parameter is not a
1133 gimple register, for example:
1135 void hip7(S*) (struct S * p)
1137 void (*<T2e4>) (struct S *) D.1867;
1142 D.1867_2 = p.1_1->f;
1147 gimple
*def
= SSA_NAME_DEF_STMT (TREE_OPERAND (base
, 0));
1148 index
= load_from_unmodified_param (fbi
, descriptors
, def
);
1153 bool data_preserved
= parm_ref_data_preserved_p (fbi
, index
, stmt
, op
);
1154 if (!data_preserved
&& !guaranteed_unmodified
)
1161 if (guaranteed_unmodified
)
1162 *guaranteed_unmodified
= data_preserved
;
1168 /* If STMT is an assignment that loads a value from a parameter declaration,
1169 or from an aggregate passed as the parameter either by value or reference,
1170 return the index of the parameter in ipa_node_params. Otherwise return -1.
1172 FBI holds gathered information about the function. INFO describes
1173 parameters of the function, STMT is the assignment statement. If it is a
1174 memory load from an aggregate, *OFFSET_P is filled with offset within the
1175 aggregate, and *BY_REF_P specifies whether the aggregate is passed by
1179 load_from_unmodified_param_or_agg (struct ipa_func_body_info
*fbi
,
1180 class ipa_node_params
*info
,
1182 HOST_WIDE_INT
*offset_p
,
1185 int index
= load_from_unmodified_param (fbi
, info
->descriptors
, stmt
);
1188 /* Load value from a parameter declaration. */
1195 if (!gimple_assign_load_p (stmt
))
1198 tree rhs
= gimple_assign_rhs1 (stmt
);
1200 /* Skip memory reference containing VIEW_CONVERT_EXPR. */
1201 for (tree t
= rhs
; handled_component_p (t
); t
= TREE_OPERAND (t
, 0))
1202 if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
1205 /* Skip memory reference containing bit-field. */
1206 if (TREE_CODE (rhs
) == BIT_FIELD_REF
1207 || contains_bitfld_component_ref_p (rhs
))
1210 if (!ipa_load_from_parm_agg (fbi
, info
->descriptors
, stmt
, rhs
, &index
,
1211 offset_p
, &size
, by_ref_p
))
1214 gcc_assert (!maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (rhs
))),
1218 tree param_type
= ipa_get_type (info
, index
);
1220 if (!param_type
|| !AGGREGATE_TYPE_P (param_type
))
1223 else if (TREE_THIS_VOLATILE (rhs
))
1229 /* Walk pointer adjustemnts from OP (such as POINTER_PLUS and ADDR_EXPR)
1230 to find original pointer. Initialize RET to the pointer which results from
1232 If offset is known return true and initialize OFFSET_RET. */
1235 unadjusted_ptr_and_unit_offset (tree op
, tree
*ret
, poly_int64
*offset_ret
)
1237 poly_int64 offset
= 0;
1238 bool offset_known
= true;
1241 for (i
= 0; i
< param_ipa_jump_function_lookups
; i
++)
1243 if (TREE_CODE (op
) == ADDR_EXPR
)
1245 poly_int64 extra_offset
= 0;
1246 tree base
= get_addr_base_and_unit_offset (TREE_OPERAND (op
, 0),
1250 base
= get_base_address (TREE_OPERAND (op
, 0));
1251 if (TREE_CODE (base
) != MEM_REF
)
1253 offset_known
= false;
1257 if (TREE_CODE (base
) != MEM_REF
)
1259 offset
+= extra_offset
;
1261 op
= TREE_OPERAND (base
, 0);
1262 if (mem_ref_offset (base
).to_shwi (&extra_offset
))
1263 offset
+= extra_offset
;
1265 offset_known
= false;
1267 else if (TREE_CODE (op
) == SSA_NAME
1268 && !SSA_NAME_IS_DEFAULT_DEF (op
))
1270 gimple
*pstmt
= SSA_NAME_DEF_STMT (op
);
1272 if (gimple_assign_single_p (pstmt
))
1273 op
= gimple_assign_rhs1 (pstmt
);
1274 else if (is_gimple_assign (pstmt
)
1275 && gimple_assign_rhs_code (pstmt
) == POINTER_PLUS_EXPR
)
1277 poly_int64 extra_offset
= 0;
1278 if (ptrdiff_tree_p (gimple_assign_rhs2 (pstmt
),
1280 offset
+= extra_offset
;
1282 offset_known
= false;
1283 op
= gimple_assign_rhs1 (pstmt
);
1292 *offset_ret
= offset
;
1293 return offset_known
;
1296 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1297 of an assignment statement STMT, try to determine whether we are actually
1298 handling any of the following cases and construct an appropriate jump
1299 function into JFUNC if so:
1301 1) The passed value is loaded from a formal parameter which is not a gimple
1302 register (most probably because it is addressable, the value has to be
1303 scalar) and we can guarantee the value has not changed. This case can
1304 therefore be described by a simple pass-through jump function. For example:
1313 2) The passed value can be described by a simple arithmetic pass-through
1320 D.2064_4 = a.1(D) + 4;
1323 This case can also occur in combination of the previous one, e.g.:
1331 D.2064_4 = a.0_3 + 4;
1334 3) The passed value is an address of an object within another one (which
1335 also passed by reference). Such situations are described by an ancestor
1336 jump function and describe situations such as:
1338 B::foo() (struct B * const this)
1342 D.1845_2 = &this_1(D)->D.1748;
1345 INFO is the structure describing individual parameters access different
1346 stages of IPA optimizations. PARMS_AINFO contains the information that is
1347 only needed for intraprocedural analysis. */
1350 compute_complex_assign_jump_func (struct ipa_func_body_info
*fbi
,
1351 class ipa_node_params
*info
,
1352 struct ipa_jump_func
*jfunc
,
1353 gcall
*call
, gimple
*stmt
, tree name
,
1356 HOST_WIDE_INT offset
, size
;
1357 tree op1
, tc_ssa
, base
, ssa
;
1361 op1
= gimple_assign_rhs1 (stmt
);
1363 if (TREE_CODE (op1
) == SSA_NAME
)
1365 if (SSA_NAME_IS_DEFAULT_DEF (op1
))
1366 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (op1
));
1368 index
= load_from_unmodified_param (fbi
, info
->descriptors
,
1369 SSA_NAME_DEF_STMT (op1
));
1374 index
= load_from_unmodified_param (fbi
, info
->descriptors
, stmt
);
1375 tc_ssa
= gimple_assign_lhs (stmt
);
1380 switch (gimple_assign_rhs_class (stmt
))
1382 case GIMPLE_BINARY_RHS
:
1384 tree op2
= gimple_assign_rhs2 (stmt
);
1385 if (!is_gimple_ip_invariant (op2
)
1386 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt
))
1388 && !useless_type_conversion_p (TREE_TYPE (name
),
1392 ipa_set_jf_arith_pass_through (jfunc
, index
, op2
,
1393 gimple_assign_rhs_code (stmt
));
1396 case GIMPLE_SINGLE_RHS
:
1398 bool agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
,
1400 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1403 case GIMPLE_UNARY_RHS
:
1404 if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
)))
1405 ipa_set_jf_unary_pass_through (jfunc
, index
,
1406 gimple_assign_rhs_code (stmt
));
1412 if (TREE_CODE (op1
) != ADDR_EXPR
)
1414 op1
= TREE_OPERAND (op1
, 0);
1415 if (TREE_CODE (TREE_TYPE (op1
)) != RECORD_TYPE
)
1417 base
= get_ref_base_and_extent_hwi (op1
, &offset
, &size
, &reverse
);
1418 offset_int mem_offset
;
1420 || TREE_CODE (base
) != MEM_REF
1421 || !mem_ref_offset (base
).is_constant (&mem_offset
))
1423 offset
+= mem_offset
.to_short_addr () * BITS_PER_UNIT
;
1424 ssa
= TREE_OPERAND (base
, 0);
1425 if (TREE_CODE (ssa
) != SSA_NAME
1426 || !SSA_NAME_IS_DEFAULT_DEF (ssa
)
1430 /* Dynamic types are changed in constructors and destructors. */
1431 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (ssa
));
1432 if (index
>= 0 && param_type
&& POINTER_TYPE_P (param_type
))
1433 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1434 parm_ref_data_pass_through_p (fbi
, index
, call
, ssa
));
1437 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1440 iftmp.1_3 = &obj_2(D)->D.1762;
1442 The base of the MEM_REF must be a default definition SSA NAME of a
1443 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1444 whole MEM_REF expression is returned and the offset calculated from any
1445 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1446 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1449 get_ancestor_addr_info (gimple
*assign
, tree
*obj_p
, HOST_WIDE_INT
*offset
)
1452 tree expr
, parm
, obj
;
1455 if (!gimple_assign_single_p (assign
))
1457 expr
= gimple_assign_rhs1 (assign
);
1459 if (TREE_CODE (expr
) != ADDR_EXPR
)
1461 expr
= TREE_OPERAND (expr
, 0);
1463 expr
= get_ref_base_and_extent_hwi (expr
, offset
, &size
, &reverse
);
1465 offset_int mem_offset
;
1467 || TREE_CODE (expr
) != MEM_REF
1468 || !mem_ref_offset (expr
).is_constant (&mem_offset
))
1470 parm
= TREE_OPERAND (expr
, 0);
1471 if (TREE_CODE (parm
) != SSA_NAME
1472 || !SSA_NAME_IS_DEFAULT_DEF (parm
)
1473 || TREE_CODE (SSA_NAME_VAR (parm
)) != PARM_DECL
)
1476 *offset
+= mem_offset
.to_short_addr () * BITS_PER_UNIT
;
1482 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1483 statement PHI, try to find out whether NAME is in fact a
1484 multiple-inheritance typecast from a descendant into an ancestor of a formal
1485 parameter and thus can be described by an ancestor jump function and if so,
1486 write the appropriate function into JFUNC.
1488 Essentially we want to match the following pattern:
1496 iftmp.1_3 = &obj_2(D)->D.1762;
1499 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1500 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1504 compute_complex_ancestor_jump_func (struct ipa_func_body_info
*fbi
,
1505 class ipa_node_params
*info
,
1506 struct ipa_jump_func
*jfunc
,
1507 gcall
*call
, gphi
*phi
)
1509 HOST_WIDE_INT offset
;
1510 gimple
*assign
, *cond
;
1511 basic_block phi_bb
, assign_bb
, cond_bb
;
1512 tree tmp
, parm
, expr
, obj
;
1515 if (gimple_phi_num_args (phi
) != 2)
1518 if (integer_zerop (PHI_ARG_DEF (phi
, 1)))
1519 tmp
= PHI_ARG_DEF (phi
, 0);
1520 else if (integer_zerop (PHI_ARG_DEF (phi
, 0)))
1521 tmp
= PHI_ARG_DEF (phi
, 1);
1524 if (TREE_CODE (tmp
) != SSA_NAME
1525 || SSA_NAME_IS_DEFAULT_DEF (tmp
)
1526 || !POINTER_TYPE_P (TREE_TYPE (tmp
))
1527 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp
))) != RECORD_TYPE
)
1530 assign
= SSA_NAME_DEF_STMT (tmp
);
1531 assign_bb
= gimple_bb (assign
);
1532 if (!single_pred_p (assign_bb
))
1534 expr
= get_ancestor_addr_info (assign
, &obj
, &offset
);
1537 parm
= TREE_OPERAND (expr
, 0);
1538 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (parm
));
1542 cond_bb
= single_pred (assign_bb
);
1543 cond
= last_stmt (cond_bb
);
1545 || gimple_code (cond
) != GIMPLE_COND
1546 || gimple_cond_code (cond
) != NE_EXPR
1547 || gimple_cond_lhs (cond
) != parm
1548 || !integer_zerop (gimple_cond_rhs (cond
)))
1551 phi_bb
= gimple_bb (phi
);
1552 for (i
= 0; i
< 2; i
++)
1554 basic_block pred
= EDGE_PRED (phi_bb
, i
)->src
;
1555 if (pred
!= assign_bb
&& pred
!= cond_bb
)
1559 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1560 parm_ref_data_pass_through_p (fbi
, index
, call
, parm
));
1563 /* Inspect the given TYPE and return true iff it has the same structure (the
1564 same number of fields of the same types) as a C++ member pointer. If
1565 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1566 corresponding fields there. */
1569 type_like_member_ptr_p (tree type
, tree
*method_ptr
, tree
*delta
)
1573 if (TREE_CODE (type
) != RECORD_TYPE
)
1576 fld
= TYPE_FIELDS (type
);
1577 if (!fld
|| !POINTER_TYPE_P (TREE_TYPE (fld
))
1578 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld
))) != METHOD_TYPE
1579 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1585 fld
= DECL_CHAIN (fld
);
1586 if (!fld
|| INTEGRAL_TYPE_P (fld
)
1587 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1592 if (DECL_CHAIN (fld
))
1598 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1599 return the rhs of its defining statement, and this statement is stored in
1600 *RHS_STMT. Otherwise return RHS as it is. */
1603 get_ssa_def_if_simple_copy (tree rhs
, gimple
**rhs_stmt
)
1605 while (TREE_CODE (rhs
) == SSA_NAME
&& !SSA_NAME_IS_DEFAULT_DEF (rhs
))
1607 gimple
*def_stmt
= SSA_NAME_DEF_STMT (rhs
);
1609 if (gimple_assign_single_p (def_stmt
))
1610 rhs
= gimple_assign_rhs1 (def_stmt
);
1613 *rhs_stmt
= def_stmt
;
1618 /* Simple linked list, describing contents of an aggregate before call. */
1620 struct ipa_known_agg_contents_list
1622 /* Offset and size of the described part of the aggregate. */
1623 HOST_WIDE_INT offset
, size
;
1625 /* Type of the described part of the aggregate. */
1628 /* Known constant value or jump function data describing contents. */
1629 struct ipa_load_agg_data value
;
1631 /* Pointer to the next structure in the list. */
1632 struct ipa_known_agg_contents_list
*next
;
1635 /* Add an aggregate content item into a linked list of
1636 ipa_known_agg_contents_list structure, in which all elements
1637 are sorted ascendingly by offset. */
1640 add_to_agg_contents_list (struct ipa_known_agg_contents_list
**plist
,
1641 struct ipa_known_agg_contents_list
*item
)
1643 struct ipa_known_agg_contents_list
*list
= *plist
;
1645 for (; list
; list
= list
->next
)
1647 if (list
->offset
>= item
->offset
)
1650 plist
= &list
->next
;
1657 /* Check whether a given aggregate content is clobbered by certain element in
1658 a linked list of ipa_known_agg_contents_list. */
1661 clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list
*list
,
1662 struct ipa_known_agg_contents_list
*item
)
1664 for (; list
; list
= list
->next
)
1666 if (list
->offset
>= item
->offset
)
1667 return list
->offset
< item
->offset
+ item
->size
;
1669 if (list
->offset
+ list
->size
> item
->offset
)
1676 /* Build aggregate jump function from LIST, assuming there are exactly
1677 VALUE_COUNT entries there and that offset of the passed argument
1678 is ARG_OFFSET and store it into JFUNC. */
1681 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list
*list
,
1682 int value_count
, HOST_WIDE_INT arg_offset
,
1683 struct ipa_jump_func
*jfunc
)
1685 vec_safe_reserve (jfunc
->agg
.items
, value_count
, true);
1686 for (; list
; list
= list
->next
)
1688 struct ipa_agg_jf_item item
;
1689 tree operand
= list
->value
.pass_through
.operand
;
1691 if (list
->value
.pass_through
.formal_id
>= 0)
1693 /* Content value is derived from some formal paramerter. */
1694 if (list
->value
.offset
>= 0)
1695 item
.jftype
= IPA_JF_LOAD_AGG
;
1697 item
.jftype
= IPA_JF_PASS_THROUGH
;
1699 item
.value
.load_agg
= list
->value
;
1701 item
.value
.pass_through
.operand
1702 = unshare_expr_without_location (operand
);
1706 /* Content value is known constant. */
1707 item
.jftype
= IPA_JF_CONST
;
1708 item
.value
.constant
= unshare_expr_without_location (operand
);
1713 item
.type
= list
->type
;
1714 gcc_assert (tree_to_shwi (TYPE_SIZE (list
->type
)) == list
->size
);
1716 item
.offset
= list
->offset
- arg_offset
;
1717 gcc_assert ((item
.offset
% BITS_PER_UNIT
) == 0);
1719 jfunc
->agg
.items
->quick_push (item
);
1723 /* Given an assignment statement STMT, try to collect information into
1724 AGG_VALUE that will be used to construct jump function for RHS of the
1725 assignment, from which content value of an aggregate part comes.
1727 Besides constant and simple pass-through jump functions, also try to
1728 identify whether it matches the following pattern that can be described by
1729 a load-value-from-aggregate jump function, which is a derivative of simple
1730 pass-through jump function.
1736 *(q_5 + 4) = *(p_3(D) + 28) op 1;
1740 Here IPA_LOAD_AGG_DATA data structure is informative enough to describe
1741 constant, simple pass-through and load-vale-from-aggregate. If value
1742 is constant, it will be kept in field OPERAND, and field FORMAL_ID is
1743 set to -1. For simple pass-through and load-value-from-aggregate, field
1744 FORMAL_ID specifies the related formal parameter index, and field
1745 OFFSET can be used to distinguish them, -1 means simple pass-through,
1746 otherwise means load-value-from-aggregate. */
1749 analyze_agg_content_value (struct ipa_func_body_info
*fbi
,
1750 struct ipa_load_agg_data
*agg_value
,
1753 tree lhs
= gimple_assign_lhs (stmt
);
1754 tree rhs1
= gimple_assign_rhs1 (stmt
);
1755 enum tree_code code
;
1758 /* Initialize jump function data for the aggregate part. */
1759 memset (agg_value
, 0, sizeof (*agg_value
));
1760 agg_value
->pass_through
.operation
= NOP_EXPR
;
1761 agg_value
->pass_through
.formal_id
= -1;
1762 agg_value
->offset
= -1;
1764 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs
)) /* TODO: Support aggregate type. */
1765 || TREE_THIS_VOLATILE (lhs
)
1766 || TREE_CODE (lhs
) == BIT_FIELD_REF
1767 || contains_bitfld_component_ref_p (lhs
))
1770 /* Skip SSA copies. */
1771 while (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
1773 if (TREE_CODE (rhs1
) != SSA_NAME
|| SSA_NAME_IS_DEFAULT_DEF (rhs1
))
1776 stmt
= SSA_NAME_DEF_STMT (rhs1
);
1777 if (!is_gimple_assign (stmt
))
1780 rhs1
= gimple_assign_rhs1 (stmt
);
1783 if (gphi
*phi
= dyn_cast
<gphi
*> (stmt
))
1785 /* Also special case like the following (a is a formal parameter):
1787 _12 = *a_11(D).dim[0].stride;
1789 # iftmp.22_9 = PHI <_12(2), 1(3)>
1791 parm.6.dim[0].stride = iftmp.22_9;
1793 __x_MOD_foo (&parm.6, b_31(D));
1795 The aggregate function describing parm.6.dim[0].stride is encoded as a
1796 PASS-THROUGH jump function with ASSERT_EXPR operation whith operand 1
1797 (the constant from the PHI node). */
1799 if (gimple_phi_num_args (phi
) != 2)
1801 tree arg0
= gimple_phi_arg_def (phi
, 0);
1802 tree arg1
= gimple_phi_arg_def (phi
, 1);
1805 if (is_gimple_ip_invariant (arg1
))
1810 else if (is_gimple_ip_invariant (arg0
))
1818 rhs1
= get_ssa_def_if_simple_copy (rhs1
, &stmt
);
1819 if (!is_gimple_assign (stmt
))
1823 agg_value
->pass_through
.operand
= operand
;
1825 else if (is_gimple_assign (stmt
))
1827 code
= gimple_assign_rhs_code (stmt
);
1828 switch (gimple_assign_rhs_class (stmt
))
1830 case GIMPLE_SINGLE_RHS
:
1831 if (is_gimple_ip_invariant (rhs1
))
1833 agg_value
->pass_through
.operand
= rhs1
;
1839 case GIMPLE_UNARY_RHS
:
1840 /* NOTE: A GIMPLE_UNARY_RHS operation might not be tcc_unary
1841 (truth_not_expr is example), GIMPLE_BINARY_RHS does not imply
1842 tcc_binary, this subtleness is somewhat misleading.
1844 Since tcc_unary is widely used in IPA-CP code to check an operation
1845 with one operand, here we only allow tc_unary operation to avoid
1846 possible problem. Then we can use (opclass == tc_unary) or not to
1847 distinguish unary and binary. */
1848 if (TREE_CODE_CLASS (code
) != tcc_unary
|| CONVERT_EXPR_CODE_P (code
))
1851 rhs1
= get_ssa_def_if_simple_copy (rhs1
, &stmt
);
1854 case GIMPLE_BINARY_RHS
:
1856 gimple
*rhs1_stmt
= stmt
;
1857 gimple
*rhs2_stmt
= stmt
;
1858 tree rhs2
= gimple_assign_rhs2 (stmt
);
1860 rhs1
= get_ssa_def_if_simple_copy (rhs1
, &rhs1_stmt
);
1861 rhs2
= get_ssa_def_if_simple_copy (rhs2
, &rhs2_stmt
);
1863 if (is_gimple_ip_invariant (rhs2
))
1865 agg_value
->pass_through
.operand
= rhs2
;
1868 else if (is_gimple_ip_invariant (rhs1
))
1870 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
1871 code
= swap_tree_comparison (code
);
1872 else if (!commutative_tree_code (code
))
1875 agg_value
->pass_through
.operand
= rhs1
;
1882 if (TREE_CODE_CLASS (code
) != tcc_comparison
1883 && !useless_type_conversion_p (TREE_TYPE (lhs
),
1896 if (TREE_CODE (rhs1
) != SSA_NAME
)
1897 index
= load_from_unmodified_param_or_agg (fbi
, fbi
->info
, stmt
,
1899 &agg_value
->by_ref
);
1900 else if (SSA_NAME_IS_DEFAULT_DEF (rhs1
))
1901 index
= ipa_get_param_decl_index (fbi
->info
, SSA_NAME_VAR (rhs1
));
1905 if (agg_value
->offset
>= 0)
1906 agg_value
->type
= TREE_TYPE (rhs1
);
1907 agg_value
->pass_through
.formal_id
= index
;
1908 agg_value
->pass_through
.operation
= code
;
1911 agg_value
->pass_through
.operand
= NULL_TREE
;
1914 /* If STMT is a memory store to the object whose address is BASE, extract
1915 information (offset, size, and value) into CONTENT, and return true,
1916 otherwise we conservatively assume the whole object is modified with
1917 unknown content, and return false. CHECK_REF means that access to object
1918 is expected to be in form of MEM_REF expression. */
1921 extract_mem_content (struct ipa_func_body_info
*fbi
,
1922 gimple
*stmt
, tree base
, bool check_ref
,
1923 struct ipa_known_agg_contents_list
*content
)
1925 HOST_WIDE_INT lhs_offset
, lhs_size
;
1928 if (!is_gimple_assign (stmt
))
1931 tree lhs
= gimple_assign_lhs (stmt
);
1932 tree lhs_base
= get_ref_base_and_extent_hwi (lhs
, &lhs_offset
, &lhs_size
,
1939 if (TREE_CODE (lhs_base
) != MEM_REF
1940 || TREE_OPERAND (lhs_base
, 0) != base
1941 || !integer_zerop (TREE_OPERAND (lhs_base
, 1)))
1944 else if (lhs_base
!= base
)
1947 content
->offset
= lhs_offset
;
1948 content
->size
= lhs_size
;
1949 content
->type
= TREE_TYPE (lhs
);
1950 content
->next
= NULL
;
1952 analyze_agg_content_value (fbi
, &content
->value
, stmt
);
1956 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1957 in ARG is filled in constants or values that are derived from caller's
1958 formal parameter in the way described by some kinds of jump functions. FBI
1959 is the context of the caller function for interprocedural analysis. ARG can
1960 either be an aggregate expression or a pointer to an aggregate. ARG_TYPE is
1961 the type of the aggregate, JFUNC is the jump function for the aggregate. */
1964 determine_known_aggregate_parts (struct ipa_func_body_info
*fbi
,
1965 gcall
*call
, tree arg
,
1967 struct ipa_jump_func
*jfunc
)
1969 struct ipa_known_agg_contents_list
*list
= NULL
, *all_list
= NULL
;
1970 bitmap visited
= NULL
;
1971 int item_count
= 0, value_count
= 0;
1972 HOST_WIDE_INT arg_offset
, arg_size
;
1974 bool check_ref
, by_ref
;
1976 int max_agg_items
= opt_for_fn (fbi
->node
->decl
, param_ipa_max_agg_items
);
1978 if (max_agg_items
== 0)
1981 /* The function operates in three stages. First, we prepare check_ref, r,
1982 arg_base and arg_offset based on what is actually passed as an actual
1985 if (POINTER_TYPE_P (arg_type
))
1988 if (TREE_CODE (arg
) == SSA_NAME
)
1991 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type
)))
1992 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
1997 type_size
= TYPE_SIZE (TREE_TYPE (arg_type
));
1998 arg_size
= tree_to_uhwi (type_size
);
1999 ao_ref_init_from_ptr_and_size (&r
, arg_base
, NULL_TREE
);
2001 else if (TREE_CODE (arg
) == ADDR_EXPR
)
2005 arg
= TREE_OPERAND (arg
, 0);
2006 arg_base
= get_ref_base_and_extent_hwi (arg
, &arg_offset
,
2007 &arg_size
, &reverse
);
2010 if (DECL_P (arg_base
))
2013 ao_ref_init (&r
, arg_base
);
2025 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg
)));
2029 arg_base
= get_ref_base_and_extent_hwi (arg
, &arg_offset
,
2030 &arg_size
, &reverse
);
2034 ao_ref_init (&r
, arg
);
2037 /* Second stage traverses virtual SSA web backwards starting from the call
2038 statement, only looks at individual dominating virtual operand (its
2039 definition dominates the call), as long as it is confident that content
2040 of the aggregate is affected by definition of the virtual operand, it
2041 builds a sorted linked list of ipa_agg_jf_list describing that. */
2043 for (tree dom_vuse
= gimple_vuse (call
); dom_vuse
;)
2045 gimple
*stmt
= SSA_NAME_DEF_STMT (dom_vuse
);
2047 if (gimple_code (stmt
) == GIMPLE_PHI
)
2049 dom_vuse
= get_continuation_for_phi (stmt
, &r
, true,
2050 fbi
->aa_walk_budget
,
2051 &visited
, false, NULL
, NULL
);
2055 if (stmt_may_clobber_ref_p_1 (stmt
, &r
))
2057 struct ipa_known_agg_contents_list
*content
2058 = XALLOCA (struct ipa_known_agg_contents_list
);
2060 if (!extract_mem_content (fbi
, stmt
, arg_base
, check_ref
, content
))
2063 /* Now we get a dominating virtual operand, and need to check
2064 whether its value is clobbered any other dominating one. */
2065 if ((content
->value
.pass_through
.formal_id
>= 0
2066 || content
->value
.pass_through
.operand
)
2067 && !clobber_by_agg_contents_list_p (all_list
, content
))
2069 struct ipa_known_agg_contents_list
*copy
2070 = XALLOCA (struct ipa_known_agg_contents_list
);
2072 /* Add to the list consisting of only dominating virtual
2073 operands, whose definitions can finally reach the call. */
2074 add_to_agg_contents_list (&list
, (*copy
= *content
, copy
));
2076 if (++value_count
== max_agg_items
)
2080 /* Add to the list consisting of all dominating virtual operands. */
2081 add_to_agg_contents_list (&all_list
, content
);
2083 if (++item_count
== 2 * max_agg_items
)
2086 dom_vuse
= gimple_vuse (stmt
);
2090 BITMAP_FREE (visited
);
2092 /* Third stage just goes over the list and creates an appropriate vector of
2093 ipa_agg_jf_item structures out of it, of course only if there are
2094 any meaningful items to begin with. */
2098 jfunc
->agg
.by_ref
= by_ref
;
2099 build_agg_jump_func_from_list (list
, value_count
, arg_offset
, jfunc
);
2104 /* Return the Ith param type of callee associated with call graph
2108 ipa_get_callee_param_type (struct cgraph_edge
*e
, int i
)
2111 tree type
= (e
->callee
2112 ? TREE_TYPE (e
->callee
->decl
)
2113 : gimple_call_fntype (e
->call_stmt
));
2114 tree t
= TYPE_ARG_TYPES (type
);
2116 for (n
= 0; n
< i
; n
++)
2123 return TREE_VALUE (t
);
2126 t
= DECL_ARGUMENTS (e
->callee
->decl
);
2127 for (n
= 0; n
< i
; n
++)
2134 return TREE_TYPE (t
);
2138 /* Return ipa_bits with VALUE and MASK values, which can be either a newly
2139 allocated structure or a previously existing one shared with other jump
2140 functions and/or transformation summaries. */
2143 ipa_get_ipa_bits_for_value (const widest_int
&value
, const widest_int
&mask
)
2149 ipa_bits
**slot
= ipa_bits_hash_table
->find_slot (&tmp
, INSERT
);
2153 ipa_bits
*res
= ggc_alloc
<ipa_bits
> ();
2161 /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
2162 table in order to avoid creating multiple same ipa_bits structures. */
2165 ipa_set_jfunc_bits (ipa_jump_func
*jf
, const widest_int
&value
,
2166 const widest_int
&mask
)
2168 jf
->bits
= ipa_get_ipa_bits_for_value (value
, mask
);
2171 /* Return a pointer to a value_range just like *TMP, but either find it in
2172 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
2174 static value_range
*
2175 ipa_get_value_range (value_range
*tmp
)
2177 value_range
**slot
= ipa_vr_hash_table
->find_slot (tmp
, INSERT
);
2181 value_range
*vr
= new (ggc_alloc
<value_range
> ()) value_range
;
2188 /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
2189 equiv set. Use hash table in order to avoid creating multiple same copies of
2192 static value_range
*
2193 ipa_get_value_range (enum value_range_kind kind
, tree min
, tree max
)
2195 value_range
tmp (min
, max
, kind
);
2196 return ipa_get_value_range (&tmp
);
2199 /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
2200 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
2201 same value_range structures. */
2204 ipa_set_jfunc_vr (ipa_jump_func
*jf
, enum value_range_kind type
,
2207 jf
->m_vr
= ipa_get_value_range (type
, min
, max
);
2210 /* Assign to JF a pointer to a value_range just like TMP but either fetch a
2211 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
2214 ipa_set_jfunc_vr (ipa_jump_func
*jf
, value_range
*tmp
)
2216 jf
->m_vr
= ipa_get_value_range (tmp
);
2219 /* Compute jump function for all arguments of callsite CS and insert the
2220 information in the jump_functions array in the ipa_edge_args corresponding
2221 to this callsite. */
2224 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info
*fbi
,
2225 struct cgraph_edge
*cs
)
2227 class ipa_node_params
*info
= IPA_NODE_REF (cs
->caller
);
2228 class ipa_edge_args
*args
= IPA_EDGE_REF_GET_CREATE (cs
);
2229 gcall
*call
= cs
->call_stmt
;
2230 int n
, arg_num
= gimple_call_num_args (call
);
2231 bool useful_context
= false;
2233 if (arg_num
== 0 || args
->jump_functions
)
2235 vec_safe_grow_cleared (args
->jump_functions
, arg_num
, true);
2236 if (flag_devirtualize
)
2237 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, arg_num
, true);
2239 if (gimple_call_internal_p (call
))
2241 if (ipa_func_spec_opts_forbid_analysis_p (cs
->caller
))
2244 for (n
= 0; n
< arg_num
; n
++)
2246 struct ipa_jump_func
*jfunc
= ipa_get_ith_jump_func (args
, n
);
2247 tree arg
= gimple_call_arg (call
, n
);
2248 tree param_type
= ipa_get_callee_param_type (cs
, n
);
2249 if (flag_devirtualize
&& POINTER_TYPE_P (TREE_TYPE (arg
)))
2252 class ipa_polymorphic_call_context
context (cs
->caller
->decl
,
2255 context
.get_dynamic_type (instance
, arg
, NULL
, cs
->call_stmt
,
2256 &fbi
->aa_walk_budget
);
2257 *ipa_get_ith_polymorhic_call_context (args
, n
) = context
;
2258 if (!context
.useless_p ())
2259 useful_context
= true;
2262 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
2264 bool addr_nonzero
= false;
2265 bool strict_overflow
= false;
2267 if (TREE_CODE (arg
) == SSA_NAME
2269 && get_ptr_nonnull (arg
))
2270 addr_nonzero
= true;
2271 else if (tree_single_nonzero_warnv_p (arg
, &strict_overflow
))
2272 addr_nonzero
= true;
2276 tree z
= build_int_cst (TREE_TYPE (arg
), 0);
2277 ipa_set_jfunc_vr (jfunc
, VR_ANTI_RANGE
, z
, z
);
2280 gcc_assert (!jfunc
->m_vr
);
2285 value_range_kind kind
;
2286 if (TREE_CODE (arg
) == SSA_NAME
2288 && (kind
= get_range_info (arg
, &min
, &max
))
2289 && (kind
== VR_RANGE
|| kind
== VR_ANTI_RANGE
))
2292 value_range
tmpvr (wide_int_to_tree (TREE_TYPE (arg
), min
),
2293 wide_int_to_tree (TREE_TYPE (arg
), max
),
2295 range_fold_unary_expr (&resvr
, NOP_EXPR
, param_type
,
2296 &tmpvr
, TREE_TYPE (arg
));
2297 if (!resvr
.undefined_p () && !resvr
.varying_p ())
2298 ipa_set_jfunc_vr (jfunc
, &resvr
);
2300 gcc_assert (!jfunc
->m_vr
);
2303 gcc_assert (!jfunc
->m_vr
);
2306 if (INTEGRAL_TYPE_P (TREE_TYPE (arg
))
2307 && (TREE_CODE (arg
) == SSA_NAME
|| TREE_CODE (arg
) == INTEGER_CST
))
2309 if (TREE_CODE (arg
) == SSA_NAME
)
2310 ipa_set_jfunc_bits (jfunc
, 0,
2311 widest_int::from (get_nonzero_bits (arg
),
2312 TYPE_SIGN (TREE_TYPE (arg
))));
2314 ipa_set_jfunc_bits (jfunc
, wi::to_widest (arg
), 0);
2316 else if (POINTER_TYPE_P (TREE_TYPE (arg
)))
2318 unsigned HOST_WIDE_INT bitpos
;
2321 get_pointer_alignment_1 (arg
, &align
, &bitpos
);
2322 widest_int mask
= wi::bit_and_not
2323 (wi::mask
<widest_int
> (TYPE_PRECISION (TREE_TYPE (arg
)), false),
2324 align
/ BITS_PER_UNIT
- 1);
2325 widest_int value
= bitpos
/ BITS_PER_UNIT
;
2326 ipa_set_jfunc_bits (jfunc
, value
, mask
);
2329 gcc_assert (!jfunc
->bits
);
2331 if (is_gimple_ip_invariant (arg
)
2333 && is_global_var (arg
)
2334 && TREE_READONLY (arg
)))
2335 ipa_set_jf_constant (jfunc
, arg
, cs
);
2336 else if (!is_gimple_reg_type (TREE_TYPE (arg
))
2337 && TREE_CODE (arg
) == PARM_DECL
)
2339 int index
= ipa_get_param_decl_index (info
, arg
);
2341 gcc_assert (index
>=0);
2342 /* Aggregate passed by value, check for pass-through, otherwise we
2343 will attempt to fill in aggregate contents later in this
2345 if (parm_preserved_before_stmt_p (fbi
, index
, call
, arg
))
2347 ipa_set_jf_simple_pass_through (jfunc
, index
, false);
2351 else if (TREE_CODE (arg
) == SSA_NAME
)
2353 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
2355 int index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (arg
));
2359 agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
, arg
);
2360 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
2365 gimple
*stmt
= SSA_NAME_DEF_STMT (arg
);
2366 if (is_gimple_assign (stmt
))
2367 compute_complex_assign_jump_func (fbi
, info
, jfunc
,
2368 call
, stmt
, arg
, param_type
);
2369 else if (gimple_code (stmt
) == GIMPLE_PHI
)
2370 compute_complex_ancestor_jump_func (fbi
, info
, jfunc
,
2372 as_a
<gphi
*> (stmt
));
2376 /* If ARG is pointer, we cannot use its type to determine the type of aggregate
2377 passed (because type conversions are ignored in gimple). Usually we can
2378 safely get type from function declaration, but in case of K&R prototypes or
2379 variadic functions we can try our luck with type of the pointer passed.
2380 TODO: Since we look for actual initialization of the memory object, we may better
2381 work out the type based on the memory stores we find. */
2383 param_type
= TREE_TYPE (arg
);
2385 if ((jfunc
->type
!= IPA_JF_PASS_THROUGH
2386 || !ipa_get_jf_pass_through_agg_preserved (jfunc
))
2387 && (jfunc
->type
!= IPA_JF_ANCESTOR
2388 || !ipa_get_jf_ancestor_agg_preserved (jfunc
))
2389 && (AGGREGATE_TYPE_P (TREE_TYPE (arg
))
2390 || POINTER_TYPE_P (param_type
)))
2391 determine_known_aggregate_parts (fbi
, call
, arg
, param_type
, jfunc
);
2393 if (!useful_context
)
2394 vec_free (args
->polymorphic_call_contexts
);
2397 /* Compute jump functions for all edges - both direct and indirect - outgoing
2401 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info
*fbi
, basic_block bb
)
2403 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
2405 struct cgraph_edge
*cs
;
2407 FOR_EACH_VEC_ELT_REVERSE (bi
->cg_edges
, i
, cs
)
2409 struct cgraph_node
*callee
= cs
->callee
;
2413 callee
= callee
->ultimate_alias_target ();
2414 /* We do not need to bother analyzing calls to unknown functions
2415 unless they may become known during lto/whopr. */
2416 if (!callee
->definition
&& !flag_lto
2417 && !gimple_call_fnspec (cs
->call_stmt
).known_p ())
2420 ipa_compute_jump_functions_for_edge (fbi
, cs
);
2424 /* If STMT looks like a statement loading a value from a member pointer formal
2425 parameter, return that parameter and store the offset of the field to
2426 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2427 might be clobbered). If USE_DELTA, then we look for a use of the delta
2428 field rather than the pfn. */
2431 ipa_get_stmt_member_ptr_load_param (gimple
*stmt
, bool use_delta
,
2432 HOST_WIDE_INT
*offset_p
)
2434 tree rhs
, rec
, ref_field
, ref_offset
, fld
, ptr_field
, delta_field
;
2436 if (!gimple_assign_single_p (stmt
))
2439 rhs
= gimple_assign_rhs1 (stmt
);
2440 if (TREE_CODE (rhs
) == COMPONENT_REF
)
2442 ref_field
= TREE_OPERAND (rhs
, 1);
2443 rhs
= TREE_OPERAND (rhs
, 0);
2446 ref_field
= NULL_TREE
;
2447 if (TREE_CODE (rhs
) != MEM_REF
)
2449 rec
= TREE_OPERAND (rhs
, 0);
2450 if (TREE_CODE (rec
) != ADDR_EXPR
)
2452 rec
= TREE_OPERAND (rec
, 0);
2453 if (TREE_CODE (rec
) != PARM_DECL
2454 || !type_like_member_ptr_p (TREE_TYPE (rec
), &ptr_field
, &delta_field
))
2456 ref_offset
= TREE_OPERAND (rhs
, 1);
2463 *offset_p
= int_bit_position (fld
);
2467 if (integer_nonzerop (ref_offset
))
2469 return ref_field
== fld
? rec
: NULL_TREE
;
2472 return tree_int_cst_equal (byte_position (fld
), ref_offset
) ? rec
2476 /* Returns true iff T is an SSA_NAME defined by a statement. */
2479 ipa_is_ssa_with_stmt_def (tree t
)
2481 if (TREE_CODE (t
) == SSA_NAME
2482 && !SSA_NAME_IS_DEFAULT_DEF (t
))
2488 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2489 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2490 indirect call graph edge.
2491 If POLYMORPHIC is true record is as a destination of polymorphic call. */
2493 static struct cgraph_edge
*
2494 ipa_note_param_call (struct cgraph_node
*node
, int param_index
,
2495 gcall
*stmt
, bool polymorphic
)
2497 struct cgraph_edge
*cs
;
2499 cs
= node
->get_edge (stmt
);
2500 cs
->indirect_info
->param_index
= param_index
;
2501 cs
->indirect_info
->agg_contents
= 0;
2502 cs
->indirect_info
->member_ptr
= 0;
2503 cs
->indirect_info
->guaranteed_unmodified
= 0;
2504 ipa_set_param_used_by_indirect_call (IPA_NODE_REF (node
),
2506 if (cs
->indirect_info
->polymorphic
|| polymorphic
)
2507 ipa_set_param_used_by_polymorphic_call
2508 (IPA_NODE_REF (node
), param_index
, true);
2512 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2513 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2514 intermediate information about each formal parameter. Currently it checks
2515 whether the call calls a pointer that is a formal parameter and if so, the
2516 parameter is marked with the called flag and an indirect call graph edge
2517 describing the call is created. This is very simple for ordinary pointers
2518 represented in SSA but not-so-nice when it comes to member pointers. The
2519 ugly part of this function does nothing more than trying to match the
2520 pattern of such a call. An example of such a pattern is the gimple dump
2521 below, the call is on the last line:
2524 f$__delta_5 = f.__delta;
2525 f$__pfn_24 = f.__pfn;
2529 f$__delta_5 = MEM[(struct *)&f];
2530 f$__pfn_24 = MEM[(struct *)&f + 4B];
2532 and a few lines below:
2535 D.2496_3 = (int) f$__pfn_24;
2536 D.2497_4 = D.2496_3 & 1;
2543 D.2500_7 = (unsigned int) f$__delta_5;
2544 D.2501_8 = &S + D.2500_7;
2545 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2546 D.2503_10 = *D.2502_9;
2547 D.2504_12 = f$__pfn_24 + -1;
2548 D.2505_13 = (unsigned int) D.2504_12;
2549 D.2506_14 = D.2503_10 + D.2505_13;
2550 D.2507_15 = *D.2506_14;
2551 iftmp.11_16 = (String:: *) D.2507_15;
2554 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2555 D.2500_19 = (unsigned int) f$__delta_5;
2556 D.2508_20 = &S + D.2500_19;
2557 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2559 Such patterns are results of simple calls to a member pointer:
2561 int doprinting (int (MyString::* f)(int) const)
2563 MyString S ("somestring");
2568 Moreover, the function also looks for called pointers loaded from aggregates
2569 passed by value or reference. */
2572 ipa_analyze_indirect_call_uses (struct ipa_func_body_info
*fbi
, gcall
*call
,
2575 class ipa_node_params
*info
= fbi
->info
;
2576 HOST_WIDE_INT offset
;
2579 if (SSA_NAME_IS_DEFAULT_DEF (target
))
2581 tree var
= SSA_NAME_VAR (target
);
2582 int index
= ipa_get_param_decl_index (info
, var
);
2584 ipa_note_param_call (fbi
->node
, index
, call
, false);
2589 gimple
*def
= SSA_NAME_DEF_STMT (target
);
2590 bool guaranteed_unmodified
;
2591 if (gimple_assign_single_p (def
)
2592 && ipa_load_from_parm_agg (fbi
, info
->descriptors
, def
,
2593 gimple_assign_rhs1 (def
), &index
, &offset
,
2594 NULL
, &by_ref
, &guaranteed_unmodified
))
2596 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
,
2598 cs
->indirect_info
->offset
= offset
;
2599 cs
->indirect_info
->agg_contents
= 1;
2600 cs
->indirect_info
->by_ref
= by_ref
;
2601 cs
->indirect_info
->guaranteed_unmodified
= guaranteed_unmodified
;
2605 /* Now we need to try to match the complex pattern of calling a member
2607 if (gimple_code (def
) != GIMPLE_PHI
2608 || gimple_phi_num_args (def
) != 2
2609 || !POINTER_TYPE_P (TREE_TYPE (target
))
2610 || TREE_CODE (TREE_TYPE (TREE_TYPE (target
))) != METHOD_TYPE
)
2613 /* First, we need to check whether one of these is a load from a member
2614 pointer that is a parameter to this function. */
2615 tree n1
= PHI_ARG_DEF (def
, 0);
2616 tree n2
= PHI_ARG_DEF (def
, 1);
2617 if (!ipa_is_ssa_with_stmt_def (n1
) || !ipa_is_ssa_with_stmt_def (n2
))
2619 gimple
*d1
= SSA_NAME_DEF_STMT (n1
);
2620 gimple
*d2
= SSA_NAME_DEF_STMT (n2
);
2623 basic_block bb
, virt_bb
;
2624 basic_block join
= gimple_bb (def
);
2625 if ((rec
= ipa_get_stmt_member_ptr_load_param (d1
, false, &offset
)))
2627 if (ipa_get_stmt_member_ptr_load_param (d2
, false, NULL
))
2630 bb
= EDGE_PRED (join
, 0)->src
;
2631 virt_bb
= gimple_bb (d2
);
2633 else if ((rec
= ipa_get_stmt_member_ptr_load_param (d2
, false, &offset
)))
2635 bb
= EDGE_PRED (join
, 1)->src
;
2636 virt_bb
= gimple_bb (d1
);
2641 /* Second, we need to check that the basic blocks are laid out in the way
2642 corresponding to the pattern. */
2644 if (!single_pred_p (virt_bb
) || !single_succ_p (virt_bb
)
2645 || single_pred (virt_bb
) != bb
2646 || single_succ (virt_bb
) != join
)
2649 /* Third, let's see that the branching is done depending on the least
2650 significant bit of the pfn. */
2652 gimple
*branch
= last_stmt (bb
);
2653 if (!branch
|| gimple_code (branch
) != GIMPLE_COND
)
2656 if ((gimple_cond_code (branch
) != NE_EXPR
2657 && gimple_cond_code (branch
) != EQ_EXPR
)
2658 || !integer_zerop (gimple_cond_rhs (branch
)))
2661 tree cond
= gimple_cond_lhs (branch
);
2662 if (!ipa_is_ssa_with_stmt_def (cond
))
2665 def
= SSA_NAME_DEF_STMT (cond
);
2666 if (!is_gimple_assign (def
)
2667 || gimple_assign_rhs_code (def
) != BIT_AND_EXPR
2668 || !integer_onep (gimple_assign_rhs2 (def
)))
2671 cond
= gimple_assign_rhs1 (def
);
2672 if (!ipa_is_ssa_with_stmt_def (cond
))
2675 def
= SSA_NAME_DEF_STMT (cond
);
2677 if (is_gimple_assign (def
)
2678 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
2680 cond
= gimple_assign_rhs1 (def
);
2681 if (!ipa_is_ssa_with_stmt_def (cond
))
2683 def
= SSA_NAME_DEF_STMT (cond
);
2687 rec2
= ipa_get_stmt_member_ptr_load_param (def
,
2688 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2689 == ptrmemfunc_vbit_in_delta
),
2694 index
= ipa_get_param_decl_index (info
, rec
);
2696 && parm_preserved_before_stmt_p (fbi
, index
, call
, rec
))
2698 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
,
2700 cs
->indirect_info
->offset
= offset
;
2701 cs
->indirect_info
->agg_contents
= 1;
2702 cs
->indirect_info
->member_ptr
= 1;
2703 cs
->indirect_info
->guaranteed_unmodified
= 1;
2709 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2710 object referenced in the expression is a formal parameter of the caller
2711 FBI->node (described by FBI->info), create a call note for the
2715 ipa_analyze_virtual_call_uses (struct ipa_func_body_info
*fbi
,
2716 gcall
*call
, tree target
)
2718 tree obj
= OBJ_TYPE_REF_OBJECT (target
);
2720 HOST_WIDE_INT anc_offset
;
2722 if (!flag_devirtualize
)
2725 if (TREE_CODE (obj
) != SSA_NAME
)
2728 class ipa_node_params
*info
= fbi
->info
;
2729 if (SSA_NAME_IS_DEFAULT_DEF (obj
))
2731 if (TREE_CODE (SSA_NAME_VAR (obj
)) != PARM_DECL
)
2735 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (obj
));
2736 gcc_assert (index
>= 0);
2737 if (detect_type_change_ssa (fbi
, obj
, obj_type_ref_class (target
),
2743 gimple
*stmt
= SSA_NAME_DEF_STMT (obj
);
2746 expr
= get_ancestor_addr_info (stmt
, &obj
, &anc_offset
);
2749 index
= ipa_get_param_decl_index (info
,
2750 SSA_NAME_VAR (TREE_OPERAND (expr
, 0)));
2751 gcc_assert (index
>= 0);
2752 if (detect_type_change (fbi
, obj
, expr
, obj_type_ref_class (target
),
2757 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
,
2759 class cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
2760 ii
->offset
= anc_offset
;
2761 ii
->otr_token
= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target
));
2762 ii
->otr_type
= obj_type_ref_class (target
);
2763 ii
->polymorphic
= 1;
2766 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2767 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2768 containing intermediate information about each formal parameter. */
2771 ipa_analyze_call_uses (struct ipa_func_body_info
*fbi
, gcall
*call
)
2773 tree target
= gimple_call_fn (call
);
2776 || (TREE_CODE (target
) != SSA_NAME
2777 && !virtual_method_call_p (target
)))
2780 struct cgraph_edge
*cs
= fbi
->node
->get_edge (call
);
2781 /* If we previously turned the call into a direct call, there is
2782 no need to analyze. */
2783 if (cs
&& !cs
->indirect_unknown_callee
)
2786 if (cs
->indirect_info
->polymorphic
&& flag_devirtualize
)
2789 tree target
= gimple_call_fn (call
);
2790 ipa_polymorphic_call_context
context (current_function_decl
,
2791 target
, call
, &instance
);
2793 gcc_checking_assert (cs
->indirect_info
->otr_type
2794 == obj_type_ref_class (target
));
2795 gcc_checking_assert (cs
->indirect_info
->otr_token
2796 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target
)));
2798 cs
->indirect_info
->vptr_changed
2799 = !context
.get_dynamic_type (instance
,
2800 OBJ_TYPE_REF_OBJECT (target
),
2801 obj_type_ref_class (target
), call
,
2802 &fbi
->aa_walk_budget
);
2803 cs
->indirect_info
->context
= context
;
2806 if (TREE_CODE (target
) == SSA_NAME
)
2807 ipa_analyze_indirect_call_uses (fbi
, call
, target
);
2808 else if (virtual_method_call_p (target
))
2809 ipa_analyze_virtual_call_uses (fbi
, call
, target
);
2813 /* Analyze the call statement STMT with respect to formal parameters (described
2814 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2815 formal parameters are called. */
2818 ipa_analyze_stmt_uses (struct ipa_func_body_info
*fbi
, gimple
*stmt
)
2820 if (is_gimple_call (stmt
))
2821 ipa_analyze_call_uses (fbi
, as_a
<gcall
*> (stmt
));
2824 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2825 If OP is a parameter declaration, mark it as used in the info structure
2829 visit_ref_for_mod_analysis (gimple
*, tree op
, tree
, void *data
)
2831 class ipa_node_params
*info
= (class ipa_node_params
*) data
;
2833 op
= get_base_address (op
);
2835 && TREE_CODE (op
) == PARM_DECL
)
2837 int index
= ipa_get_param_decl_index (info
, op
);
2838 gcc_assert (index
>= 0);
2839 ipa_set_param_used (info
, index
, true);
2845 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2846 the findings in various structures of the associated ipa_node_params
2847 structure, such as parameter flags, notes etc. FBI holds various data about
2848 the function being analyzed. */
2851 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info
*fbi
, basic_block bb
)
2853 gimple_stmt_iterator gsi
;
2854 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2856 gimple
*stmt
= gsi_stmt (gsi
);
2858 if (is_gimple_debug (stmt
))
2861 ipa_analyze_stmt_uses (fbi
, stmt
);
2862 walk_stmt_load_store_addr_ops (stmt
, fbi
->info
,
2863 visit_ref_for_mod_analysis
,
2864 visit_ref_for_mod_analysis
,
2865 visit_ref_for_mod_analysis
);
2867 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2868 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), fbi
->info
,
2869 visit_ref_for_mod_analysis
,
2870 visit_ref_for_mod_analysis
,
2871 visit_ref_for_mod_analysis
);
2874 /* Calculate controlled uses of parameters of NODE. */
2877 ipa_analyze_controlled_uses (struct cgraph_node
*node
)
2879 class ipa_node_params
*info
= IPA_NODE_REF (node
);
2881 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2883 tree parm
= ipa_get_param (info
, i
);
2884 int controlled_uses
= 0;
2886 /* For SSA regs see if parameter is used. For non-SSA we compute
2887 the flag during modification analysis. */
2888 if (is_gimple_reg (parm
))
2890 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
),
2892 if (ddef
&& !has_zero_uses (ddef
))
2894 imm_use_iterator imm_iter
;
2895 use_operand_p use_p
;
2897 ipa_set_param_used (info
, i
, true);
2898 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, ddef
)
2899 if (!is_gimple_call (USE_STMT (use_p
)))
2901 if (!is_gimple_debug (USE_STMT (use_p
)))
2903 controlled_uses
= IPA_UNDESCRIBED_USE
;
2911 controlled_uses
= 0;
2914 controlled_uses
= IPA_UNDESCRIBED_USE
;
2915 ipa_set_controlled_uses (info
, i
, controlled_uses
);
2919 /* Free stuff in BI. */
2922 free_ipa_bb_info (struct ipa_bb_info
*bi
)
2924 bi
->cg_edges
.release ();
2925 bi
->param_aa_statuses
.release ();
2928 /* Dominator walker driving the analysis. */
2930 class analysis_dom_walker
: public dom_walker
2933 analysis_dom_walker (struct ipa_func_body_info
*fbi
)
2934 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
) {}
2936 virtual edge
before_dom_children (basic_block
);
2939 struct ipa_func_body_info
*m_fbi
;
2943 analysis_dom_walker::before_dom_children (basic_block bb
)
2945 ipa_analyze_params_uses_in_bb (m_fbi
, bb
);
2946 ipa_compute_jump_functions_for_bb (m_fbi
, bb
);
2950 /* Release body info FBI. */
2953 ipa_release_body_info (struct ipa_func_body_info
*fbi
)
2956 struct ipa_bb_info
*bi
;
2958 FOR_EACH_VEC_ELT (fbi
->bb_infos
, i
, bi
)
2959 free_ipa_bb_info (bi
);
2960 fbi
->bb_infos
.release ();
2963 /* Initialize the array describing properties of formal parameters
2964 of NODE, analyze their uses and compute jump functions associated
2965 with actual arguments of calls from within NODE. */
2968 ipa_analyze_node (struct cgraph_node
*node
)
2970 struct ipa_func_body_info fbi
;
2971 class ipa_node_params
*info
;
2973 ipa_check_create_node_params ();
2974 ipa_check_create_edge_args ();
2975 info
= IPA_NODE_REF_GET_CREATE (node
);
2977 if (info
->analysis_done
)
2979 info
->analysis_done
= 1;
2981 if (ipa_func_spec_opts_forbid_analysis_p (node
))
2983 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2985 ipa_set_param_used (info
, i
, true);
2986 ipa_set_controlled_uses (info
, i
, IPA_UNDESCRIBED_USE
);
2991 struct function
*func
= DECL_STRUCT_FUNCTION (node
->decl
);
2993 calculate_dominance_info (CDI_DOMINATORS
);
2994 ipa_initialize_node_params (node
);
2995 ipa_analyze_controlled_uses (node
);
2998 fbi
.info
= IPA_NODE_REF (node
);
2999 fbi
.bb_infos
= vNULL
;
3000 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
), true);
3001 fbi
.param_count
= ipa_get_param_count (info
);
3002 fbi
.aa_walk_budget
= opt_for_fn (node
->decl
, param_ipa_max_aa_steps
);
3004 for (struct cgraph_edge
*cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
3006 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
3007 bi
->cg_edges
.safe_push (cs
);
3010 for (struct cgraph_edge
*cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
3012 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
3013 bi
->cg_edges
.safe_push (cs
);
3016 analysis_dom_walker (&fbi
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
3018 ipa_release_body_info (&fbi
);
3019 free_dominance_info (CDI_DOMINATORS
);
3023 /* Update the jump functions associated with call graph edge E when the call
3024 graph edge CS is being inlined, assuming that E->caller is already (possibly
3025 indirectly) inlined into CS->callee and that E has not been inlined. */
3028 update_jump_functions_after_inlining (struct cgraph_edge
*cs
,
3029 struct cgraph_edge
*e
)
3031 class ipa_edge_args
*top
= IPA_EDGE_REF (cs
);
3032 class ipa_edge_args
*args
= IPA_EDGE_REF (e
);
3035 int count
= ipa_get_cs_argument_count (args
);
3038 for (i
= 0; i
< count
; i
++)
3040 struct ipa_jump_func
*dst
= ipa_get_ith_jump_func (args
, i
);
3041 class ipa_polymorphic_call_context
*dst_ctx
3042 = ipa_get_ith_polymorhic_call_context (args
, i
);
3046 struct ipa_agg_jf_item
*item
;
3049 FOR_EACH_VEC_ELT (*dst
->agg
.items
, j
, item
)
3052 struct ipa_jump_func
*src
;
3054 if (item
->jftype
!= IPA_JF_PASS_THROUGH
3055 && item
->jftype
!= IPA_JF_LOAD_AGG
)
3058 dst_fid
= item
->value
.pass_through
.formal_id
;
3059 if (!top
|| dst_fid
>= ipa_get_cs_argument_count (top
))
3061 item
->jftype
= IPA_JF_UNKNOWN
;
3065 item
->value
.pass_through
.formal_id
= -1;
3066 src
= ipa_get_ith_jump_func (top
, dst_fid
);
3067 if (src
->type
== IPA_JF_CONST
)
3069 if (item
->jftype
== IPA_JF_PASS_THROUGH
3070 && item
->value
.pass_through
.operation
== NOP_EXPR
)
3072 item
->jftype
= IPA_JF_CONST
;
3073 item
->value
.constant
= src
->value
.constant
.value
;
3077 else if (src
->type
== IPA_JF_PASS_THROUGH
3078 && src
->value
.pass_through
.operation
== NOP_EXPR
)
3080 if (item
->jftype
== IPA_JF_PASS_THROUGH
3081 || !item
->value
.load_agg
.by_ref
3082 || src
->value
.pass_through
.agg_preserved
)
3083 item
->value
.pass_through
.formal_id
3084 = src
->value
.pass_through
.formal_id
;
3086 else if (src
->type
== IPA_JF_ANCESTOR
)
3088 if (item
->jftype
== IPA_JF_PASS_THROUGH
)
3090 if (!src
->value
.ancestor
.offset
)
3091 item
->value
.pass_through
.formal_id
3092 = src
->value
.ancestor
.formal_id
;
3094 else if (src
->value
.ancestor
.agg_preserved
)
3096 gcc_checking_assert (item
->value
.load_agg
.by_ref
);
3098 item
->value
.pass_through
.formal_id
3099 = src
->value
.ancestor
.formal_id
;
3100 item
->value
.load_agg
.offset
3101 += src
->value
.ancestor
.offset
;
3105 if (item
->value
.pass_through
.formal_id
< 0)
3106 item
->jftype
= IPA_JF_UNKNOWN
;
3112 ipa_set_jf_unknown (dst
);
3116 if (dst
->type
== IPA_JF_ANCESTOR
)
3118 struct ipa_jump_func
*src
;
3119 int dst_fid
= dst
->value
.ancestor
.formal_id
;
3120 class ipa_polymorphic_call_context
*src_ctx
3121 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
3123 /* Variable number of arguments can cause havoc if we try to access
3124 one that does not exist in the inlined edge. So make sure we
3126 if (dst_fid
>= ipa_get_cs_argument_count (top
))
3128 ipa_set_jf_unknown (dst
);
3132 src
= ipa_get_ith_jump_func (top
, dst_fid
);
3134 if (src_ctx
&& !src_ctx
->useless_p ())
3136 class ipa_polymorphic_call_context ctx
= *src_ctx
;
3138 /* TODO: Make type preserved safe WRT contexts. */
3139 if (!ipa_get_jf_ancestor_type_preserved (dst
))
3140 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
3141 ctx
.offset_by (dst
->value
.ancestor
.offset
);
3142 if (!ctx
.useless_p ())
3146 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
3148 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
3151 dst_ctx
->combine_with (ctx
);
3155 /* Parameter and argument in ancestor jump function must be pointer
3156 type, which means access to aggregate must be by-reference. */
3157 gcc_assert (!src
->agg
.items
|| src
->agg
.by_ref
);
3159 if (src
->agg
.items
&& dst
->value
.ancestor
.agg_preserved
)
3161 struct ipa_agg_jf_item
*item
;
3164 /* Currently we do not produce clobber aggregate jump functions,
3165 replace with merging when we do. */
3166 gcc_assert (!dst
->agg
.items
);
3168 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
3169 dst
->agg
.by_ref
= src
->agg
.by_ref
;
3170 FOR_EACH_VEC_SAFE_ELT (dst
->agg
.items
, j
, item
)
3171 item
->offset
-= dst
->value
.ancestor
.offset
;
3174 if (src
->type
== IPA_JF_PASS_THROUGH
3175 && src
->value
.pass_through
.operation
== NOP_EXPR
)
3177 dst
->value
.ancestor
.formal_id
= src
->value
.pass_through
.formal_id
;
3178 dst
->value
.ancestor
.agg_preserved
&=
3179 src
->value
.pass_through
.agg_preserved
;
3181 else if (src
->type
== IPA_JF_ANCESTOR
)
3183 dst
->value
.ancestor
.formal_id
= src
->value
.ancestor
.formal_id
;
3184 dst
->value
.ancestor
.offset
+= src
->value
.ancestor
.offset
;
3185 dst
->value
.ancestor
.agg_preserved
&=
3186 src
->value
.ancestor
.agg_preserved
;
3189 ipa_set_jf_unknown (dst
);
3191 else if (dst
->type
== IPA_JF_PASS_THROUGH
)
3193 struct ipa_jump_func
*src
;
3194 /* We must check range due to calls with variable number of arguments
3195 and we cannot combine jump functions with operations. */
3196 if (dst
->value
.pass_through
.operation
== NOP_EXPR
3197 && (top
&& dst
->value
.pass_through
.formal_id
3198 < ipa_get_cs_argument_count (top
)))
3200 int dst_fid
= dst
->value
.pass_through
.formal_id
;
3201 src
= ipa_get_ith_jump_func (top
, dst_fid
);
3202 bool dst_agg_p
= ipa_get_jf_pass_through_agg_preserved (dst
);
3203 class ipa_polymorphic_call_context
*src_ctx
3204 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
3206 if (src_ctx
&& !src_ctx
->useless_p ())
3208 class ipa_polymorphic_call_context ctx
= *src_ctx
;
3210 /* TODO: Make type preserved safe WRT contexts. */
3211 if (!ipa_get_jf_pass_through_type_preserved (dst
))
3212 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
3213 if (!ctx
.useless_p ())
3217 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
3219 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
3221 dst_ctx
->combine_with (ctx
);
3226 case IPA_JF_UNKNOWN
:
3227 ipa_set_jf_unknown (dst
);
3230 ipa_set_jf_cst_copy (dst
, src
);
3233 case IPA_JF_PASS_THROUGH
:
3235 int formal_id
= ipa_get_jf_pass_through_formal_id (src
);
3236 enum tree_code operation
;
3237 operation
= ipa_get_jf_pass_through_operation (src
);
3239 if (operation
== NOP_EXPR
)
3243 && ipa_get_jf_pass_through_agg_preserved (src
);
3244 ipa_set_jf_simple_pass_through (dst
, formal_id
, agg_p
);
3246 else if (TREE_CODE_CLASS (operation
) == tcc_unary
)
3247 ipa_set_jf_unary_pass_through (dst
, formal_id
, operation
);
3250 tree operand
= ipa_get_jf_pass_through_operand (src
);
3251 ipa_set_jf_arith_pass_through (dst
, formal_id
, operand
,
3256 case IPA_JF_ANCESTOR
:
3260 && ipa_get_jf_ancestor_agg_preserved (src
);
3261 ipa_set_ancestor_jf (dst
,
3262 ipa_get_jf_ancestor_offset (src
),
3263 ipa_get_jf_ancestor_formal_id (src
),
3272 && (dst_agg_p
|| !src
->agg
.by_ref
))
3274 /* Currently we do not produce clobber aggregate jump
3275 functions, replace with merging when we do. */
3276 gcc_assert (!dst
->agg
.items
);
3278 dst
->agg
.by_ref
= src
->agg
.by_ref
;
3279 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
3283 ipa_set_jf_unknown (dst
);
3288 /* If TARGET is an addr_expr of a function declaration, make it the
3289 (SPECULATIVE)destination of an indirect edge IE and return the edge.
3290 Otherwise, return NULL. */
3292 struct cgraph_edge
*
3293 ipa_make_edge_direct_to_target (struct cgraph_edge
*ie
, tree target
,
3296 struct cgraph_node
*callee
;
3297 bool unreachable
= false;
3299 if (TREE_CODE (target
) == ADDR_EXPR
)
3300 target
= TREE_OPERAND (target
, 0);
3301 if (TREE_CODE (target
) != FUNCTION_DECL
)
3303 target
= canonicalize_constructor_val (target
, NULL
);
3304 if (!target
|| TREE_CODE (target
) != FUNCTION_DECL
)
3306 /* Member pointer call that goes through a VMT lookup. */
3307 if (ie
->indirect_info
->member_ptr
3308 /* Or if target is not an invariant expression and we do not
3309 know if it will evaulate to function at runtime.
3310 This can happen when folding through &VAR, where &VAR
3311 is IP invariant, but VAR itself is not.
3313 TODO: Revisit this when GCC 5 is branched. It seems that
3314 member_ptr check is not needed and that we may try to fold
3315 the expression and see if VAR is readonly. */
3316 || !is_gimple_ip_invariant (target
))
3318 if (dump_enabled_p ())
3320 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, ie
->call_stmt
,
3321 "discovered direct call non-invariant %s\n",
3322 ie
->caller
->dump_name ());
3328 if (dump_enabled_p ())
3330 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, ie
->call_stmt
,
3331 "discovered direct call to non-function in %s, "
3332 "making it __builtin_unreachable\n",
3333 ie
->caller
->dump_name ());
3336 target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
3337 callee
= cgraph_node::get_create (target
);
3341 callee
= cgraph_node::get (target
);
3344 callee
= cgraph_node::get (target
);
3346 /* Because may-edges are not explicitely represented and vtable may be external,
3347 we may create the first reference to the object in the unit. */
3348 if (!callee
|| callee
->inlined_to
)
3351 /* We are better to ensure we can refer to it.
3352 In the case of static functions we are out of luck, since we already
3353 removed its body. In the case of public functions we may or may
3354 not introduce the reference. */
3355 if (!canonicalize_constructor_val (target
, NULL
)
3356 || !TREE_PUBLIC (target
))
3359 fprintf (dump_file
, "ipa-prop: Discovered call to a known target "
3360 "(%s -> %s) but cannot refer to it. Giving up.\n",
3361 ie
->caller
->dump_name (),
3362 ie
->callee
->dump_name ());
3365 callee
= cgraph_node::get_create (target
);
3368 /* If the edge is already speculated. */
3369 if (speculative
&& ie
->speculative
)
3373 cgraph_edge
*e2
= ie
->speculative_call_for_target (callee
);
3377 fprintf (dump_file
, "ipa-prop: Discovered call to a "
3378 "speculative target (%s -> %s) but the call is "
3379 "already speculated to different target. "
3381 ie
->caller
->dump_name (), callee
->dump_name ());
3387 "ipa-prop: Discovered call to a speculative target "
3388 "(%s -> %s) this agree with previous speculation.\n",
3389 ie
->caller
->dump_name (), callee
->dump_name ());
3395 if (!dbg_cnt (devirt
))
3398 ipa_check_create_node_params ();
3400 /* We cannot make edges to inline clones. It is bug that someone removed
3401 the cgraph node too early. */
3402 gcc_assert (!callee
->inlined_to
);
3404 if (dump_file
&& !unreachable
)
3406 fprintf (dump_file
, "ipa-prop: Discovered %s call to a %s target "
3407 "(%s -> %s), for stmt ",
3408 ie
->indirect_info
->polymorphic
? "a virtual" : "an indirect",
3409 speculative
? "speculative" : "known",
3410 ie
->caller
->dump_name (),
3411 callee
->dump_name ());
3413 print_gimple_stmt (dump_file
, ie
->call_stmt
, 2, TDF_SLIM
);
3415 fprintf (dump_file
, "with uid %i\n", ie
->lto_stmt_uid
);
3417 if (dump_enabled_p ())
3419 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, ie
->call_stmt
,
3420 "converting indirect call in %s to direct call to %s\n",
3421 ie
->caller
->dump_name (), callee
->dump_name ());
3425 struct cgraph_edge
*orig
= ie
;
3426 ie
= cgraph_edge::make_direct (ie
, callee
);
3427 /* If we resolved speculative edge the cost is already up to date
3428 for direct call (adjusted by inline_edge_duplication_hook). */
3431 ipa_call_summary
*es
= ipa_call_summaries
->get (ie
);
3432 es
->call_stmt_size
-= (eni_size_weights
.indirect_call_cost
3433 - eni_size_weights
.call_cost
);
3434 es
->call_stmt_time
-= (eni_time_weights
.indirect_call_cost
3435 - eni_time_weights
.call_cost
);
3440 if (!callee
->can_be_discarded_p ())
3443 alias
= dyn_cast
<cgraph_node
*> (callee
->noninterposable_alias ());
3447 /* make_speculative will update ie's cost to direct call cost. */
3448 ie
= ie
->make_speculative
3449 (callee
, ie
->count
.apply_scale (8, 10));
3455 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3456 CONSTRUCTOR and return it. Return NULL if the search fails for some
3460 find_constructor_constant_at_offset (tree constructor
, HOST_WIDE_INT req_offset
)
3462 tree type
= TREE_TYPE (constructor
);
3463 if (TREE_CODE (type
) != ARRAY_TYPE
3464 && TREE_CODE (type
) != RECORD_TYPE
)
3469 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor
), ix
, index
, val
)
3471 HOST_WIDE_INT elt_offset
;
3472 if (TREE_CODE (type
) == ARRAY_TYPE
)
3475 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (type
));
3476 gcc_assert (TREE_CODE (unit_size
) == INTEGER_CST
);
3480 if (TREE_CODE (index
) == RANGE_EXPR
)
3481 off
= wi::to_offset (TREE_OPERAND (index
, 0));
3483 off
= wi::to_offset (index
);
3484 if (TYPE_DOMAIN (type
) && TYPE_MIN_VALUE (TYPE_DOMAIN (type
)))
3486 tree low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
3487 gcc_assert (TREE_CODE (unit_size
) == INTEGER_CST
);
3488 off
= wi::sext (off
- wi::to_offset (low_bound
),
3489 TYPE_PRECISION (TREE_TYPE (index
)));
3491 off
*= wi::to_offset (unit_size
);
3492 /* ??? Handle more than just the first index of a
3496 off
= wi::to_offset (unit_size
) * ix
;
3498 off
= wi::lshift (off
, LOG2_BITS_PER_UNIT
);
3499 if (!wi::fits_shwi_p (off
) || wi::neg_p (off
))
3501 elt_offset
= off
.to_shwi ();
3503 else if (TREE_CODE (type
) == RECORD_TYPE
)
3505 gcc_checking_assert (index
&& TREE_CODE (index
) == FIELD_DECL
);
3506 if (DECL_BIT_FIELD (index
))
3508 elt_offset
= int_bit_position (index
);
3513 if (elt_offset
> req_offset
)
3516 if (TREE_CODE (val
) == CONSTRUCTOR
)
3517 return find_constructor_constant_at_offset (val
,
3518 req_offset
- elt_offset
);
3520 if (elt_offset
== req_offset
3521 && is_gimple_reg_type (TREE_TYPE (val
))
3522 && is_gimple_ip_invariant (val
))
3528 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3529 invariant from a static constructor and if so, return it. Otherwise return
3533 ipa_find_agg_cst_from_init (tree scalar
, HOST_WIDE_INT offset
, bool by_ref
)
3537 if (TREE_CODE (scalar
) != ADDR_EXPR
)
3539 scalar
= TREE_OPERAND (scalar
, 0);
3543 || !is_global_var (scalar
)
3544 || !TREE_READONLY (scalar
)
3545 || !DECL_INITIAL (scalar
)
3546 || TREE_CODE (DECL_INITIAL (scalar
)) != CONSTRUCTOR
)
3549 return find_constructor_constant_at_offset (DECL_INITIAL (scalar
), offset
);
3552 /* Retrieve value from AGG, a set of known offset/value for an aggregate or
3553 static initializer of SCALAR (which can be NULL) for the given OFFSET or
3554 return NULL if there is none. BY_REF specifies whether the value has to be
3555 passed by reference or by value. If FROM_GLOBAL_CONSTANT is non-NULL, then
3556 the boolean it points to is set to true if the value comes from an
3557 initializer of a constant. */
3560 ipa_find_agg_cst_for_param (struct ipa_agg_value_set
*agg
, tree scalar
,
3561 HOST_WIDE_INT offset
, bool by_ref
,
3562 bool *from_global_constant
)
3564 struct ipa_agg_value
*item
;
3569 tree res
= ipa_find_agg_cst_from_init (scalar
, offset
, by_ref
);
3572 if (from_global_constant
)
3573 *from_global_constant
= true;
3579 || by_ref
!= agg
->by_ref
)
3582 FOR_EACH_VEC_ELT (agg
->items
, i
, item
)
3583 if (item
->offset
== offset
)
3585 /* Currently we do not have clobber values, return NULL for them once
3587 gcc_checking_assert (is_gimple_ip_invariant (item
->value
));
3588 if (from_global_constant
)
3589 *from_global_constant
= false;
3595 /* Remove a reference to SYMBOL from the list of references of a node given by
3596 reference description RDESC. Return true if the reference has been
3597 successfully found and removed. */
3600 remove_described_reference (symtab_node
*symbol
, struct ipa_cst_ref_desc
*rdesc
)
3602 struct ipa_ref
*to_del
;
3603 struct cgraph_edge
*origin
;
3608 to_del
= origin
->caller
->find_reference (symbol
, origin
->call_stmt
,
3609 origin
->lto_stmt_uid
);
3613 to_del
->remove_reference ();
3615 fprintf (dump_file
, "ipa-prop: Removed a reference from %s to %s.\n",
3616 origin
->caller
->dump_name (), symbol
->dump_name ());
3620 /* If JFUNC has a reference description with refcount different from
3621 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3622 NULL. JFUNC must be a constant jump function. */
3624 static struct ipa_cst_ref_desc
*
3625 jfunc_rdesc_usable (struct ipa_jump_func
*jfunc
)
3627 struct ipa_cst_ref_desc
*rdesc
= ipa_get_jf_constant_rdesc (jfunc
);
3628 if (rdesc
&& rdesc
->refcount
!= IPA_UNDESCRIBED_USE
)
3634 /* If the value of constant jump function JFUNC is an address of a function
3635 declaration, return the associated call graph node. Otherwise return
3638 static cgraph_node
*
3639 cgraph_node_for_jfunc (struct ipa_jump_func
*jfunc
)
3641 gcc_checking_assert (jfunc
->type
== IPA_JF_CONST
);
3642 tree cst
= ipa_get_jf_constant (jfunc
);
3643 if (TREE_CODE (cst
) != ADDR_EXPR
3644 || TREE_CODE (TREE_OPERAND (cst
, 0)) != FUNCTION_DECL
)
3647 return cgraph_node::get (TREE_OPERAND (cst
, 0));
3651 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3652 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3653 the edge specified in the rdesc. Return false if either the symbol or the
3654 reference could not be found, otherwise return true. */
3657 try_decrement_rdesc_refcount (struct ipa_jump_func
*jfunc
)
3659 struct ipa_cst_ref_desc
*rdesc
;
3660 if (jfunc
->type
== IPA_JF_CONST
3661 && (rdesc
= jfunc_rdesc_usable (jfunc
))
3662 && --rdesc
->refcount
== 0)
3664 symtab_node
*symbol
= cgraph_node_for_jfunc (jfunc
);
3668 return remove_described_reference (symbol
, rdesc
);
3673 /* Try to find a destination for indirect edge IE that corresponds to a simple
3674 call or a call of a member function pointer and where the destination is a
3675 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3676 the type of the parameter to which the result of JFUNC is passed. If it can
3677 be determined, return the newly direct edge, otherwise return NULL.
3678 NEW_ROOT and NEW_ROOT_INFO is the node and its info that JFUNC lattices are
3681 static struct cgraph_edge
*
3682 try_make_edge_direct_simple_call (struct cgraph_edge
*ie
,
3683 struct ipa_jump_func
*jfunc
, tree target_type
,
3684 struct cgraph_node
*new_root
,
3685 class ipa_node_params
*new_root_info
)
3687 struct cgraph_edge
*cs
;
3689 bool agg_contents
= ie
->indirect_info
->agg_contents
;
3690 tree scalar
= ipa_value_from_jfunc (new_root_info
, jfunc
, target_type
);
3693 bool from_global_constant
;
3694 ipa_agg_value_set agg
= ipa_agg_value_set_from_jfunc (new_root_info
,
3697 target
= ipa_find_agg_cst_for_param (&agg
, scalar
,
3698 ie
->indirect_info
->offset
,
3699 ie
->indirect_info
->by_ref
,
3700 &from_global_constant
);
3703 && !from_global_constant
3704 && !ie
->indirect_info
->guaranteed_unmodified
)
3711 cs
= ipa_make_edge_direct_to_target (ie
, target
);
3713 if (cs
&& !agg_contents
)
3716 gcc_checking_assert (cs
->callee
3718 || jfunc
->type
!= IPA_JF_CONST
3719 || !cgraph_node_for_jfunc (jfunc
)
3720 || cs
->callee
== cgraph_node_for_jfunc (jfunc
)));
3721 ok
= try_decrement_rdesc_refcount (jfunc
);
3722 gcc_checking_assert (ok
);
3728 /* Return the target to be used in cases of impossible devirtualization. IE
3729 and target (the latter can be NULL) are dumped when dumping is enabled. */
3732 ipa_impossible_devirt_target (struct cgraph_edge
*ie
, tree target
)
3738 "Type inconsistent devirtualization: %s->%s\n",
3739 ie
->caller
->dump_name (),
3740 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target
)));
3743 "No devirtualization target in %s\n",
3744 ie
->caller
->dump_name ());
3746 tree new_target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
3747 cgraph_node::get_create (new_target
);
3751 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3752 call based on a formal parameter which is described by jump function JFUNC
3753 and if it can be determined, make it direct and return the direct edge.
3754 Otherwise, return NULL. CTX describes the polymorphic context that the
3755 parameter the call is based on brings along with it. NEW_ROOT and
3756 NEW_ROOT_INFO is the node and its info that JFUNC lattices are relative
3759 static struct cgraph_edge
*
3760 try_make_edge_direct_virtual_call (struct cgraph_edge
*ie
,
3761 struct ipa_jump_func
*jfunc
,
3762 class ipa_polymorphic_call_context ctx
,
3763 struct cgraph_node
*new_root
,
3764 class ipa_node_params
*new_root_info
)
3767 bool speculative
= false;
3769 if (!opt_for_fn (ie
->caller
->decl
, flag_devirtualize
))
3772 gcc_assert (!ie
->indirect_info
->by_ref
);
3774 /* Try to do lookup via known virtual table pointer value. */
3775 if (!ie
->indirect_info
->vptr_changed
3776 || opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
))
3779 unsigned HOST_WIDE_INT offset
;
3780 tree scalar
= (jfunc
->type
== IPA_JF_CONST
) ? ipa_get_jf_constant (jfunc
)
3782 ipa_agg_value_set agg
= ipa_agg_value_set_from_jfunc (new_root_info
,
3785 tree t
= ipa_find_agg_cst_for_param (&agg
, scalar
,
3786 ie
->indirect_info
->offset
,
3789 if (t
&& vtable_pointer_value_to_vtable (t
, &vtable
, &offset
))
3792 t
= gimple_get_virt_method_for_vtable (ie
->indirect_info
->otr_token
,
3793 vtable
, offset
, &can_refer
);
3797 || fndecl_built_in_p (t
, BUILT_IN_UNREACHABLE
)
3798 || !possible_polymorphic_call_target_p
3799 (ie
, cgraph_node::get (t
)))
3801 /* Do not speculate builtin_unreachable, it is stupid! */
3802 if (!ie
->indirect_info
->vptr_changed
)
3803 target
= ipa_impossible_devirt_target (ie
, target
);
3810 speculative
= ie
->indirect_info
->vptr_changed
;
3816 ipa_polymorphic_call_context
ie_context (ie
);
3817 vec
<cgraph_node
*>targets
;
3820 ctx
.offset_by (ie
->indirect_info
->offset
);
3821 if (ie
->indirect_info
->vptr_changed
)
3822 ctx
.possible_dynamic_type_change (ie
->in_polymorphic_cdtor
,
3823 ie
->indirect_info
->otr_type
);
3824 ctx
.combine_with (ie_context
, ie
->indirect_info
->otr_type
);
3825 targets
= possible_polymorphic_call_targets
3826 (ie
->indirect_info
->otr_type
,
3827 ie
->indirect_info
->otr_token
,
3829 if (final
&& targets
.length () <= 1)
3831 speculative
= false;
3832 if (targets
.length () == 1)
3833 target
= targets
[0]->decl
;
3835 target
= ipa_impossible_devirt_target (ie
, NULL_TREE
);
3837 else if (!target
&& opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
)
3838 && !ie
->speculative
&& ie
->maybe_hot_p ())
3841 n
= try_speculative_devirtualization (ie
->indirect_info
->otr_type
,
3842 ie
->indirect_info
->otr_token
,
3843 ie
->indirect_info
->context
);
3853 if (!possible_polymorphic_call_target_p
3854 (ie
, cgraph_node::get_create (target
)))
3858 target
= ipa_impossible_devirt_target (ie
, target
);
3860 return ipa_make_edge_direct_to_target (ie
, target
, speculative
);
3866 /* Update the param called notes associated with NODE when CS is being inlined,
3867 assuming NODE is (potentially indirectly) inlined into CS->callee.
3868 Moreover, if the callee is discovered to be constant, create a new cgraph
3869 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3870 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3873 update_indirect_edges_after_inlining (struct cgraph_edge
*cs
,
3874 struct cgraph_node
*node
,
3875 vec
<cgraph_edge
*> *new_edges
)
3877 class ipa_edge_args
*top
;
3878 struct cgraph_edge
*ie
, *next_ie
, *new_direct_edge
;
3879 struct cgraph_node
*new_root
;
3880 class ipa_node_params
*new_root_info
, *inlined_node_info
;
3883 ipa_check_create_edge_args ();
3884 top
= IPA_EDGE_REF (cs
);
3885 new_root
= cs
->caller
->inlined_to
3886 ? cs
->caller
->inlined_to
: cs
->caller
;
3887 new_root_info
= IPA_NODE_REF (new_root
);
3888 inlined_node_info
= IPA_NODE_REF (cs
->callee
->function_symbol ());
3890 for (ie
= node
->indirect_calls
; ie
; ie
= next_ie
)
3892 class cgraph_indirect_call_info
*ici
= ie
->indirect_info
;
3893 struct ipa_jump_func
*jfunc
;
3896 next_ie
= ie
->next_callee
;
3898 if (ici
->param_index
== -1)
3901 /* We must check range due to calls with variable number of arguments: */
3902 if (!top
|| ici
->param_index
>= ipa_get_cs_argument_count (top
))
3904 ici
->param_index
= -1;
3908 param_index
= ici
->param_index
;
3909 jfunc
= ipa_get_ith_jump_func (top
, param_index
);
3911 auto_vec
<cgraph_node
*, 4> spec_targets
;
3912 if (ie
->speculative
)
3913 for (cgraph_edge
*direct
= ie
->first_speculative_call_target ();
3915 direct
= direct
->next_speculative_call_target ())
3916 spec_targets
.safe_push (direct
->callee
);
3918 if (!opt_for_fn (node
->decl
, flag_indirect_inlining
))
3919 new_direct_edge
= NULL
;
3920 else if (ici
->polymorphic
)
3922 ipa_polymorphic_call_context ctx
;
3923 ctx
= ipa_context_from_jfunc (new_root_info
, cs
, param_index
, jfunc
);
3924 new_direct_edge
= try_make_edge_direct_virtual_call (ie
, jfunc
, ctx
,
3930 tree target_type
= ipa_get_type (inlined_node_info
, param_index
);
3931 new_direct_edge
= try_make_edge_direct_simple_call (ie
, jfunc
,
3937 /* If speculation was removed, then we need to do nothing. */
3938 if (new_direct_edge
&& new_direct_edge
!= ie
3939 && spec_targets
.contains (new_direct_edge
->callee
))
3941 new_direct_edge
->indirect_inlining_edge
= 1;
3942 top
= IPA_EDGE_REF (cs
);
3944 if (!new_direct_edge
->speculative
)
3947 else if (new_direct_edge
)
3949 new_direct_edge
->indirect_inlining_edge
= 1;
3952 new_edges
->safe_push (new_direct_edge
);
3955 top
= IPA_EDGE_REF (cs
);
3956 /* If speculative edge was introduced we still need to update
3957 call info of the indirect edge. */
3958 if (!new_direct_edge
->speculative
)
3961 if (jfunc
->type
== IPA_JF_PASS_THROUGH
3962 && ipa_get_jf_pass_through_operation (jfunc
) == NOP_EXPR
)
3964 if (ici
->agg_contents
3965 && !ipa_get_jf_pass_through_agg_preserved (jfunc
)
3966 && !ici
->polymorphic
)
3967 ici
->param_index
= -1;
3970 ici
->param_index
= ipa_get_jf_pass_through_formal_id (jfunc
);
3971 if (ici
->polymorphic
3972 && !ipa_get_jf_pass_through_type_preserved (jfunc
))
3973 ici
->vptr_changed
= true;
3974 ipa_set_param_used_by_indirect_call (new_root_info
,
3975 ici
->param_index
, true);
3976 if (ici
->polymorphic
)
3977 ipa_set_param_used_by_polymorphic_call (new_root_info
,
3978 ici
->param_index
, true);
3981 else if (jfunc
->type
== IPA_JF_ANCESTOR
)
3983 if (ici
->agg_contents
3984 && !ipa_get_jf_ancestor_agg_preserved (jfunc
)
3985 && !ici
->polymorphic
)
3986 ici
->param_index
= -1;
3989 ici
->param_index
= ipa_get_jf_ancestor_formal_id (jfunc
);
3990 ici
->offset
+= ipa_get_jf_ancestor_offset (jfunc
);
3991 if (ici
->polymorphic
3992 && !ipa_get_jf_ancestor_type_preserved (jfunc
))
3993 ici
->vptr_changed
= true;
3994 ipa_set_param_used_by_indirect_call (new_root_info
,
3995 ici
->param_index
, true);
3996 if (ici
->polymorphic
)
3997 ipa_set_param_used_by_polymorphic_call (new_root_info
,
3998 ici
->param_index
, true);
4002 /* Either we can find a destination for this edge now or never. */
4003 ici
->param_index
= -1;
4009 /* Recursively traverse subtree of NODE (including node) made of inlined
4010 cgraph_edges when CS has been inlined and invoke
4011 update_indirect_edges_after_inlining on all nodes and
4012 update_jump_functions_after_inlining on all non-inlined edges that lead out
4013 of this subtree. Newly discovered indirect edges will be added to
4014 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
4018 propagate_info_to_inlined_callees (struct cgraph_edge
*cs
,
4019 struct cgraph_node
*node
,
4020 vec
<cgraph_edge
*> *new_edges
)
4022 struct cgraph_edge
*e
;
4025 res
= update_indirect_edges_after_inlining (cs
, node
, new_edges
);
4027 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4028 if (!e
->inline_failed
)
4029 res
|= propagate_info_to_inlined_callees (cs
, e
->callee
, new_edges
);
4031 update_jump_functions_after_inlining (cs
, e
);
4032 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4033 update_jump_functions_after_inlining (cs
, e
);
4038 /* Combine two controlled uses counts as done during inlining. */
4041 combine_controlled_uses_counters (int c
, int d
)
4043 if (c
== IPA_UNDESCRIBED_USE
|| d
== IPA_UNDESCRIBED_USE
)
4044 return IPA_UNDESCRIBED_USE
;
4049 /* Propagate number of controlled users from CS->caleee to the new root of the
4050 tree of inlined nodes. */
4053 propagate_controlled_uses (struct cgraph_edge
*cs
)
4055 class ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
4058 struct cgraph_node
*new_root
= cs
->caller
->inlined_to
4059 ? cs
->caller
->inlined_to
: cs
->caller
;
4060 class ipa_node_params
*new_root_info
= IPA_NODE_REF (new_root
);
4061 class ipa_node_params
*old_root_info
= IPA_NODE_REF (cs
->callee
);
4067 count
= MIN (ipa_get_cs_argument_count (args
),
4068 ipa_get_param_count (old_root_info
));
4069 for (i
= 0; i
< count
; i
++)
4071 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
4072 struct ipa_cst_ref_desc
*rdesc
;
4074 if (jf
->type
== IPA_JF_PASS_THROUGH
)
4077 src_idx
= ipa_get_jf_pass_through_formal_id (jf
);
4078 c
= ipa_get_controlled_uses (new_root_info
, src_idx
);
4079 d
= ipa_get_controlled_uses (old_root_info
, i
);
4081 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf
)
4082 == NOP_EXPR
|| c
== IPA_UNDESCRIBED_USE
);
4083 c
= combine_controlled_uses_counters (c
, d
);
4084 ipa_set_controlled_uses (new_root_info
, src_idx
, c
);
4085 if (c
== 0 && new_root_info
->ipcp_orig_node
)
4087 struct cgraph_node
*n
;
4088 struct ipa_ref
*ref
;
4089 tree t
= new_root_info
->known_csts
[src_idx
];
4091 if (t
&& TREE_CODE (t
) == ADDR_EXPR
4092 && TREE_CODE (TREE_OPERAND (t
, 0)) == FUNCTION_DECL
4093 && (n
= cgraph_node::get (TREE_OPERAND (t
, 0)))
4094 && (ref
= new_root
->find_reference (n
, NULL
, 0)))
4097 fprintf (dump_file
, "ipa-prop: Removing cloning-created "
4098 "reference from %s to %s.\n",
4099 new_root
->dump_name (),
4101 ref
->remove_reference ();
4105 else if (jf
->type
== IPA_JF_CONST
4106 && (rdesc
= jfunc_rdesc_usable (jf
)))
4108 int d
= ipa_get_controlled_uses (old_root_info
, i
);
4109 int c
= rdesc
->refcount
;
4110 rdesc
->refcount
= combine_controlled_uses_counters (c
, d
);
4111 if (rdesc
->refcount
== 0)
4113 tree cst
= ipa_get_jf_constant (jf
);
4114 struct cgraph_node
*n
;
4115 gcc_checking_assert (TREE_CODE (cst
) == ADDR_EXPR
4116 && TREE_CODE (TREE_OPERAND (cst
, 0))
4118 n
= cgraph_node::get (TREE_OPERAND (cst
, 0));
4121 struct cgraph_node
*clone
;
4123 ok
= remove_described_reference (n
, rdesc
);
4124 gcc_checking_assert (ok
);
4127 while (clone
->inlined_to
4128 && clone
->ipcp_clone
4129 && clone
!= rdesc
->cs
->caller
)
4131 struct ipa_ref
*ref
;
4132 ref
= clone
->find_reference (n
, NULL
, 0);
4136 fprintf (dump_file
, "ipa-prop: Removing "
4137 "cloning-created reference "
4139 clone
->dump_name (),
4141 ref
->remove_reference ();
4143 clone
= clone
->callers
->caller
;
4150 for (i
= ipa_get_param_count (old_root_info
);
4151 i
< ipa_get_cs_argument_count (args
);
4154 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
4156 if (jf
->type
== IPA_JF_CONST
)
4158 struct ipa_cst_ref_desc
*rdesc
= jfunc_rdesc_usable (jf
);
4160 rdesc
->refcount
= IPA_UNDESCRIBED_USE
;
4162 else if (jf
->type
== IPA_JF_PASS_THROUGH
)
4163 ipa_set_controlled_uses (new_root_info
,
4164 jf
->value
.pass_through
.formal_id
,
4165 IPA_UNDESCRIBED_USE
);
4169 /* Update jump functions and call note functions on inlining the call site CS.
4170 CS is expected to lead to a node already cloned by
4171 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
4172 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
4176 ipa_propagate_indirect_call_infos (struct cgraph_edge
*cs
,
4177 vec
<cgraph_edge
*> *new_edges
)
4180 /* Do nothing if the preparation phase has not been carried out yet
4181 (i.e. during early inlining). */
4182 if (!ipa_node_params_sum
)
4184 gcc_assert (ipa_edge_args_sum
);
4186 propagate_controlled_uses (cs
);
4187 changed
= propagate_info_to_inlined_callees (cs
, cs
->callee
, new_edges
);
4188 ipa_node_params_sum
->remove (cs
->callee
);
4190 class ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
4194 if (args
->jump_functions
)
4196 struct ipa_jump_func
*jf
;
4198 FOR_EACH_VEC_ELT (*args
->jump_functions
, i
, jf
)
4199 if (jf
->type
== IPA_JF_CONST
4200 && ipa_get_jf_constant_rdesc (jf
))
4207 ipa_edge_args_sum
->remove (cs
);
4209 if (ipcp_transformation_sum
)
4210 ipcp_transformation_sum
->remove (cs
->callee
);
4215 /* Ensure that array of edge arguments infos is big enough to accommodate a
4216 structure for all edges and reallocates it if not. Also, allocate
4217 associated hash tables is they do not already exist. */
4220 ipa_check_create_edge_args (void)
4222 if (!ipa_edge_args_sum
)
4224 = (new (ggc_alloc_no_dtor
<ipa_edge_args_sum_t
> ())
4225 ipa_edge_args_sum_t (symtab
, true));
4226 if (!ipa_bits_hash_table
)
4227 ipa_bits_hash_table
= hash_table
<ipa_bit_ggc_hash_traits
>::create_ggc (37);
4228 if (!ipa_vr_hash_table
)
4229 ipa_vr_hash_table
= hash_table
<ipa_vr_ggc_hash_traits
>::create_ggc (37);
4232 /* Free all ipa_edge structures. */
4235 ipa_free_all_edge_args (void)
4237 if (!ipa_edge_args_sum
)
4240 ggc_delete (ipa_edge_args_sum
);
4241 ipa_edge_args_sum
= NULL
;
4244 /* Free all ipa_node_params structures. */
4247 ipa_free_all_node_params (void)
4249 if (ipa_node_params_sum
)
4250 ggc_delete (ipa_node_params_sum
);
4251 ipa_node_params_sum
= NULL
;
4254 /* Initialize IPA CP transformation summary and also allocate any necessary hash
4255 tables if they do not already exist. */
4258 ipcp_transformation_initialize (void)
4260 if (!ipa_bits_hash_table
)
4261 ipa_bits_hash_table
= hash_table
<ipa_bit_ggc_hash_traits
>::create_ggc (37);
4262 if (!ipa_vr_hash_table
)
4263 ipa_vr_hash_table
= hash_table
<ipa_vr_ggc_hash_traits
>::create_ggc (37);
4264 if (ipcp_transformation_sum
== NULL
)
4266 ipcp_transformation_sum
= ipcp_transformation_t::create_ggc (symtab
);
4267 ipcp_transformation_sum
->disable_insertion_hook ();
4271 /* Release the IPA CP transformation summary. */
4274 ipcp_free_transformation_sum (void)
4276 if (!ipcp_transformation_sum
)
4279 ipcp_transformation_sum
->~function_summary
<ipcp_transformation
*> ();
4280 ggc_free (ipcp_transformation_sum
);
4281 ipcp_transformation_sum
= NULL
;
4284 /* Set the aggregate replacements of NODE to be AGGVALS. */
4287 ipa_set_node_agg_value_chain (struct cgraph_node
*node
,
4288 struct ipa_agg_replacement_value
*aggvals
)
4290 ipcp_transformation_initialize ();
4291 ipcp_transformation
*s
= ipcp_transformation_sum
->get_create (node
);
4292 s
->agg_values
= aggvals
;
4295 /* Hook that is called by cgraph.c when an edge is removed. Adjust reference
4296 count data structures accordingly. */
4299 ipa_edge_args_sum_t::remove (cgraph_edge
*cs
, ipa_edge_args
*args
)
4301 if (args
->jump_functions
)
4303 struct ipa_jump_func
*jf
;
4305 FOR_EACH_VEC_ELT (*args
->jump_functions
, i
, jf
)
4307 struct ipa_cst_ref_desc
*rdesc
;
4308 try_decrement_rdesc_refcount (jf
);
4309 if (jf
->type
== IPA_JF_CONST
4310 && (rdesc
= ipa_get_jf_constant_rdesc (jf
))
4317 /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
4318 reference count data strucutres accordingly. */
4321 ipa_edge_args_sum_t::duplicate (cgraph_edge
*src
, cgraph_edge
*dst
,
4322 ipa_edge_args
*old_args
, ipa_edge_args
*new_args
)
4326 new_args
->jump_functions
= vec_safe_copy (old_args
->jump_functions
);
4327 if (old_args
->polymorphic_call_contexts
)
4328 new_args
->polymorphic_call_contexts
4329 = vec_safe_copy (old_args
->polymorphic_call_contexts
);
4331 for (i
= 0; i
< vec_safe_length (old_args
->jump_functions
); i
++)
4333 struct ipa_jump_func
*src_jf
= ipa_get_ith_jump_func (old_args
, i
);
4334 struct ipa_jump_func
*dst_jf
= ipa_get_ith_jump_func (new_args
, i
);
4336 dst_jf
->agg
.items
= vec_safe_copy (dst_jf
->agg
.items
);
4338 if (src_jf
->type
== IPA_JF_CONST
)
4340 struct ipa_cst_ref_desc
*src_rdesc
= jfunc_rdesc_usable (src_jf
);
4343 dst_jf
->value
.constant
.rdesc
= NULL
;
4344 else if (src
->caller
== dst
->caller
)
4346 struct ipa_ref
*ref
;
4347 symtab_node
*n
= cgraph_node_for_jfunc (src_jf
);
4348 gcc_checking_assert (n
);
4349 ref
= src
->caller
->find_reference (n
, src
->call_stmt
,
4351 gcc_checking_assert (ref
);
4352 dst
->caller
->clone_reference (ref
, ref
->stmt
);
4354 struct ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
4355 dst_rdesc
->cs
= dst
;
4356 dst_rdesc
->refcount
= src_rdesc
->refcount
;
4357 dst_rdesc
->next_duplicate
= NULL
;
4358 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
4360 else if (src_rdesc
->cs
== src
)
4362 struct ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
4363 dst_rdesc
->cs
= dst
;
4364 dst_rdesc
->refcount
= src_rdesc
->refcount
;
4365 dst_rdesc
->next_duplicate
= src_rdesc
->next_duplicate
;
4366 src_rdesc
->next_duplicate
= dst_rdesc
;
4367 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
4371 struct ipa_cst_ref_desc
*dst_rdesc
;
4372 /* This can happen during inlining, when a JFUNC can refer to a
4373 reference taken in a function up in the tree of inline clones.
4374 We need to find the duplicate that refers to our tree of
4377 gcc_assert (dst
->caller
->inlined_to
);
4378 for (dst_rdesc
= src_rdesc
->next_duplicate
;
4380 dst_rdesc
= dst_rdesc
->next_duplicate
)
4382 struct cgraph_node
*top
;
4383 top
= dst_rdesc
->cs
->caller
->inlined_to
4384 ? dst_rdesc
->cs
->caller
->inlined_to
4385 : dst_rdesc
->cs
->caller
;
4386 if (dst
->caller
->inlined_to
== top
)
4389 gcc_assert (dst_rdesc
);
4390 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
4393 else if (dst_jf
->type
== IPA_JF_PASS_THROUGH
4394 && src
->caller
== dst
->caller
)
4396 struct cgraph_node
*inline_root
= dst
->caller
->inlined_to
4397 ? dst
->caller
->inlined_to
: dst
->caller
;
4398 class ipa_node_params
*root_info
= IPA_NODE_REF (inline_root
);
4399 int idx
= ipa_get_jf_pass_through_formal_id (dst_jf
);
4401 int c
= ipa_get_controlled_uses (root_info
, idx
);
4402 if (c
!= IPA_UNDESCRIBED_USE
)
4405 ipa_set_controlled_uses (root_info
, idx
, c
);
4411 /* Analyze newly added function into callgraph. */
4414 ipa_add_new_function (cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
4416 if (node
->has_gimple_body_p ())
4417 ipa_analyze_node (node
);
4420 /* Hook that is called by summary when a node is duplicated. */
4423 ipa_node_params_t::duplicate(cgraph_node
*src
, cgraph_node
*dst
,
4424 ipa_node_params
*old_info
,
4425 ipa_node_params
*new_info
)
4427 ipa_agg_replacement_value
*old_av
, *new_av
;
4429 new_info
->descriptors
= vec_safe_copy (old_info
->descriptors
);
4430 new_info
->lattices
= NULL
;
4431 new_info
->ipcp_orig_node
= old_info
->ipcp_orig_node
;
4432 new_info
->known_csts
= old_info
->known_csts
.copy ();
4433 new_info
->known_contexts
= old_info
->known_contexts
.copy ();
4435 new_info
->analysis_done
= old_info
->analysis_done
;
4436 new_info
->node_enqueued
= old_info
->node_enqueued
;
4437 new_info
->versionable
= old_info
->versionable
;
4439 old_av
= ipa_get_agg_replacements_for_node (src
);
4445 struct ipa_agg_replacement_value
*v
;
4447 v
= ggc_alloc
<ipa_agg_replacement_value
> ();
4448 memcpy (v
, old_av
, sizeof (*v
));
4451 old_av
= old_av
->next
;
4453 ipa_set_node_agg_value_chain (dst
, new_av
);
4457 /* Duplication of ipcp transformation summaries. */
4460 ipcp_transformation_t::duplicate(cgraph_node
*, cgraph_node
*dst
,
4461 ipcp_transformation
*src_trans
,
4462 ipcp_transformation
*dst_trans
)
4464 /* Avoid redundant work of duplicating vectors we will never use. */
4465 if (dst
->inlined_to
)
4467 dst_trans
->bits
= vec_safe_copy (src_trans
->bits
);
4468 dst_trans
->m_vr
= vec_safe_copy (src_trans
->m_vr
);
4469 ipa_agg_replacement_value
*agg
= src_trans
->agg_values
,
4470 **aggptr
= &dst_trans
->agg_values
;
4473 *aggptr
= ggc_alloc
<ipa_agg_replacement_value
> ();
4476 aggptr
= &(*aggptr
)->next
;
4480 /* Register our cgraph hooks if they are not already there. */
4483 ipa_register_cgraph_hooks (void)
4485 ipa_check_create_node_params ();
4486 ipa_check_create_edge_args ();
4488 function_insertion_hook_holder
=
4489 symtab
->add_cgraph_insertion_hook (&ipa_add_new_function
, NULL
);
4492 /* Unregister our cgraph hooks if they are not already there. */
4495 ipa_unregister_cgraph_hooks (void)
4497 if (function_insertion_hook_holder
)
4498 symtab
->remove_cgraph_insertion_hook (function_insertion_hook_holder
);
4499 function_insertion_hook_holder
= NULL
;
4502 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4503 longer needed after ipa-cp. */
4506 ipa_free_all_structures_after_ipa_cp (void)
4508 if (!optimize
&& !in_lto_p
)
4510 ipa_free_all_edge_args ();
4511 ipa_free_all_node_params ();
4512 ipcp_sources_pool
.release ();
4513 ipcp_cst_values_pool
.release ();
4514 ipcp_poly_ctx_values_pool
.release ();
4515 ipcp_agg_lattice_pool
.release ();
4516 ipa_unregister_cgraph_hooks ();
4517 ipa_refdesc_pool
.release ();
4521 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4522 longer needed after indirect inlining. */
4525 ipa_free_all_structures_after_iinln (void)
4527 ipa_free_all_edge_args ();
4528 ipa_free_all_node_params ();
4529 ipa_unregister_cgraph_hooks ();
4530 ipcp_sources_pool
.release ();
4531 ipcp_cst_values_pool
.release ();
4532 ipcp_poly_ctx_values_pool
.release ();
4533 ipcp_agg_lattice_pool
.release ();
4534 ipa_refdesc_pool
.release ();
4537 /* Print ipa_tree_map data structures of all functions in the
4541 ipa_print_node_params (FILE *f
, struct cgraph_node
*node
)
4544 class ipa_node_params
*info
;
4546 if (!node
->definition
)
4548 info
= IPA_NODE_REF (node
);
4549 fprintf (f
, " function %s parameter descriptors:\n", node
->dump_name ());
4552 fprintf (f
, " no params return\n");
4555 count
= ipa_get_param_count (info
);
4556 for (i
= 0; i
< count
; i
++)
4561 ipa_dump_param (f
, info
, i
);
4562 if (ipa_is_param_used (info
, i
))
4563 fprintf (f
, " used");
4564 if (ipa_is_param_used_by_ipa_predicates (info
, i
))
4565 fprintf (f
, " used_by_ipa_predicates");
4566 if (ipa_is_param_used_by_indirect_call (info
, i
))
4567 fprintf (f
, " used_by_indirect_call");
4568 if (ipa_is_param_used_by_polymorphic_call (info
, i
))
4569 fprintf (f
, " used_by_polymorphic_call");
4570 c
= ipa_get_controlled_uses (info
, i
);
4571 if (c
== IPA_UNDESCRIBED_USE
)
4572 fprintf (f
, " undescribed_use");
4574 fprintf (f
, " controlled_uses=%i", c
);
4579 /* Print ipa_tree_map data structures of all functions in the
4583 ipa_print_all_params (FILE * f
)
4585 struct cgraph_node
*node
;
4587 fprintf (f
, "\nFunction parameters:\n");
4588 FOR_EACH_FUNCTION (node
)
4589 ipa_print_node_params (f
, node
);
4592 /* Dump the AV linked list. */
4595 ipa_dump_agg_replacement_values (FILE *f
, struct ipa_agg_replacement_value
*av
)
4598 fprintf (f
, " Aggregate replacements:");
4599 for (; av
; av
= av
->next
)
4601 fprintf (f
, "%s %i[" HOST_WIDE_INT_PRINT_DEC
"]=", comma
? "," : "",
4602 av
->index
, av
->offset
);
4603 print_generic_expr (f
, av
->value
);
4609 /* Stream out jump function JUMP_FUNC to OB. */
4612 ipa_write_jump_function (struct output_block
*ob
,
4613 struct ipa_jump_func
*jump_func
)
4615 struct ipa_agg_jf_item
*item
;
4616 struct bitpack_d bp
;
4620 /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4621 as well as WPA memory by handling them specially. */
4622 if (jump_func
->type
== IPA_JF_CONST
4623 && TREE_CODE (jump_func
->value
.constant
.value
) == ADDR_EXPR
)
4626 streamer_write_uhwi (ob
, jump_func
->type
* 2 + flag
);
4627 switch (jump_func
->type
)
4629 case IPA_JF_UNKNOWN
:
4633 EXPR_LOCATION (jump_func
->value
.constant
.value
) == UNKNOWN_LOCATION
);
4634 stream_write_tree (ob
,
4636 ? TREE_OPERAND (jump_func
->value
.constant
.value
, 0)
4637 : jump_func
->value
.constant
.value
, true);
4639 case IPA_JF_PASS_THROUGH
:
4640 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.operation
);
4641 if (jump_func
->value
.pass_through
.operation
== NOP_EXPR
)
4643 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4644 bp
= bitpack_create (ob
->main_stream
);
4645 bp_pack_value (&bp
, jump_func
->value
.pass_through
.agg_preserved
, 1);
4646 streamer_write_bitpack (&bp
);
4648 else if (TREE_CODE_CLASS (jump_func
->value
.pass_through
.operation
)
4650 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4653 stream_write_tree (ob
, jump_func
->value
.pass_through
.operand
, true);
4654 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4657 case IPA_JF_ANCESTOR
:
4658 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.offset
);
4659 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.formal_id
);
4660 bp
= bitpack_create (ob
->main_stream
);
4661 bp_pack_value (&bp
, jump_func
->value
.ancestor
.agg_preserved
, 1);
4662 streamer_write_bitpack (&bp
);
4665 fatal_error (UNKNOWN_LOCATION
, "invalid jump function in LTO stream");
4668 count
= vec_safe_length (jump_func
->agg
.items
);
4669 streamer_write_uhwi (ob
, count
);
4672 bp
= bitpack_create (ob
->main_stream
);
4673 bp_pack_value (&bp
, jump_func
->agg
.by_ref
, 1);
4674 streamer_write_bitpack (&bp
);
4677 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, i
, item
)
4679 stream_write_tree (ob
, item
->type
, true);
4680 streamer_write_uhwi (ob
, item
->offset
);
4681 streamer_write_uhwi (ob
, item
->jftype
);
4682 switch (item
->jftype
)
4684 case IPA_JF_UNKNOWN
:
4687 stream_write_tree (ob
, item
->value
.constant
, true);
4689 case IPA_JF_PASS_THROUGH
:
4690 case IPA_JF_LOAD_AGG
:
4691 streamer_write_uhwi (ob
, item
->value
.pass_through
.operation
);
4692 streamer_write_uhwi (ob
, item
->value
.pass_through
.formal_id
);
4693 if (TREE_CODE_CLASS (item
->value
.pass_through
.operation
)
4695 stream_write_tree (ob
, item
->value
.pass_through
.operand
, true);
4696 if (item
->jftype
== IPA_JF_LOAD_AGG
)
4698 stream_write_tree (ob
, item
->value
.load_agg
.type
, true);
4699 streamer_write_uhwi (ob
, item
->value
.load_agg
.offset
);
4700 bp
= bitpack_create (ob
->main_stream
);
4701 bp_pack_value (&bp
, item
->value
.load_agg
.by_ref
, 1);
4702 streamer_write_bitpack (&bp
);
4706 fatal_error (UNKNOWN_LOCATION
,
4707 "invalid jump function in LTO stream");
4711 bp
= bitpack_create (ob
->main_stream
);
4712 bp_pack_value (&bp
, !!jump_func
->bits
, 1);
4713 streamer_write_bitpack (&bp
);
4714 if (jump_func
->bits
)
4716 streamer_write_widest_int (ob
, jump_func
->bits
->value
);
4717 streamer_write_widest_int (ob
, jump_func
->bits
->mask
);
4719 bp_pack_value (&bp
, !!jump_func
->m_vr
, 1);
4720 streamer_write_bitpack (&bp
);
4721 if (jump_func
->m_vr
)
4723 streamer_write_enum (ob
->main_stream
, value_rang_type
,
4724 VR_LAST
, jump_func
->m_vr
->kind ());
4725 stream_write_tree (ob
, jump_func
->m_vr
->min (), true);
4726 stream_write_tree (ob
, jump_func
->m_vr
->max (), true);
4730 /* Read in jump function JUMP_FUNC from IB. */
4733 ipa_read_jump_function (class lto_input_block
*ib
,
4734 struct ipa_jump_func
*jump_func
,
4735 struct cgraph_edge
*cs
,
4736 class data_in
*data_in
,
4739 enum jump_func_type jftype
;
4740 enum tree_code operation
;
4742 int val
= streamer_read_uhwi (ib
);
4743 bool flag
= val
& 1;
4745 jftype
= (enum jump_func_type
) (val
/ 2);
4748 case IPA_JF_UNKNOWN
:
4749 ipa_set_jf_unknown (jump_func
);
4753 tree t
= stream_read_tree (ib
, data_in
);
4754 if (flag
&& prevails
)
4755 t
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (t
)), t
);
4756 ipa_set_jf_constant (jump_func
, t
, cs
);
4759 case IPA_JF_PASS_THROUGH
:
4760 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
4761 if (operation
== NOP_EXPR
)
4763 int formal_id
= streamer_read_uhwi (ib
);
4764 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4765 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4766 ipa_set_jf_simple_pass_through (jump_func
, formal_id
, agg_preserved
);
4768 else if (TREE_CODE_CLASS (operation
) == tcc_unary
)
4770 int formal_id
= streamer_read_uhwi (ib
);
4771 ipa_set_jf_unary_pass_through (jump_func
, formal_id
, operation
);
4775 tree operand
= stream_read_tree (ib
, data_in
);
4776 int formal_id
= streamer_read_uhwi (ib
);
4777 ipa_set_jf_arith_pass_through (jump_func
, formal_id
, operand
,
4781 case IPA_JF_ANCESTOR
:
4783 HOST_WIDE_INT offset
= streamer_read_uhwi (ib
);
4784 int formal_id
= streamer_read_uhwi (ib
);
4785 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4786 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4787 ipa_set_ancestor_jf (jump_func
, offset
, formal_id
, agg_preserved
);
4791 fatal_error (UNKNOWN_LOCATION
, "invalid jump function in LTO stream");
4794 count
= streamer_read_uhwi (ib
);
4797 jump_func
->agg
.items
= NULL
;
4798 vec_safe_reserve (jump_func
->agg
.items
, count
, true);
4802 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4803 jump_func
->agg
.by_ref
= bp_unpack_value (&bp
, 1);
4805 for (i
= 0; i
< count
; i
++)
4807 struct ipa_agg_jf_item item
;
4808 item
.type
= stream_read_tree (ib
, data_in
);
4809 item
.offset
= streamer_read_uhwi (ib
);
4810 item
.jftype
= (enum jump_func_type
) streamer_read_uhwi (ib
);
4812 switch (item
.jftype
)
4814 case IPA_JF_UNKNOWN
:
4817 item
.value
.constant
= stream_read_tree (ib
, data_in
);
4819 case IPA_JF_PASS_THROUGH
:
4820 case IPA_JF_LOAD_AGG
:
4821 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
4822 item
.value
.pass_through
.operation
= operation
;
4823 item
.value
.pass_through
.formal_id
= streamer_read_uhwi (ib
);
4824 if (TREE_CODE_CLASS (operation
) == tcc_unary
)
4825 item
.value
.pass_through
.operand
= NULL_TREE
;
4827 item
.value
.pass_through
.operand
= stream_read_tree (ib
, data_in
);
4828 if (item
.jftype
== IPA_JF_LOAD_AGG
)
4830 struct bitpack_d bp
;
4831 item
.value
.load_agg
.type
= stream_read_tree (ib
, data_in
);
4832 item
.value
.load_agg
.offset
= streamer_read_uhwi (ib
);
4833 bp
= streamer_read_bitpack (ib
);
4834 item
.value
.load_agg
.by_ref
= bp_unpack_value (&bp
, 1);
4838 fatal_error (UNKNOWN_LOCATION
,
4839 "invalid jump function in LTO stream");
4842 jump_func
->agg
.items
->quick_push (item
);
4845 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4846 bool bits_known
= bp_unpack_value (&bp
, 1);
4849 widest_int value
= streamer_read_widest_int (ib
);
4850 widest_int mask
= streamer_read_widest_int (ib
);
4852 ipa_set_jfunc_bits (jump_func
, value
, mask
);
4855 jump_func
->bits
= NULL
;
4857 struct bitpack_d vr_bp
= streamer_read_bitpack (ib
);
4858 bool vr_known
= bp_unpack_value (&vr_bp
, 1);
4861 enum value_range_kind type
= streamer_read_enum (ib
, value_range_kind
,
4863 tree min
= stream_read_tree (ib
, data_in
);
4864 tree max
= stream_read_tree (ib
, data_in
);
4866 ipa_set_jfunc_vr (jump_func
, type
, min
, max
);
4869 jump_func
->m_vr
= NULL
;
4872 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4873 relevant to indirect inlining to OB. */
4876 ipa_write_indirect_edge_info (struct output_block
*ob
,
4877 struct cgraph_edge
*cs
)
4879 class cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4880 struct bitpack_d bp
;
4882 streamer_write_hwi (ob
, ii
->param_index
);
4883 bp
= bitpack_create (ob
->main_stream
);
4884 bp_pack_value (&bp
, ii
->polymorphic
, 1);
4885 bp_pack_value (&bp
, ii
->agg_contents
, 1);
4886 bp_pack_value (&bp
, ii
->member_ptr
, 1);
4887 bp_pack_value (&bp
, ii
->by_ref
, 1);
4888 bp_pack_value (&bp
, ii
->guaranteed_unmodified
, 1);
4889 bp_pack_value (&bp
, ii
->vptr_changed
, 1);
4890 streamer_write_bitpack (&bp
);
4891 if (ii
->agg_contents
|| ii
->polymorphic
)
4892 streamer_write_hwi (ob
, ii
->offset
);
4894 gcc_assert (ii
->offset
== 0);
4896 if (ii
->polymorphic
)
4898 streamer_write_hwi (ob
, ii
->otr_token
);
4899 stream_write_tree (ob
, ii
->otr_type
, true);
4900 ii
->context
.stream_out (ob
);
4904 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4905 relevant to indirect inlining from IB. */
4908 ipa_read_indirect_edge_info (class lto_input_block
*ib
,
4909 class data_in
*data_in
,
4910 struct cgraph_edge
*cs
,
4911 class ipa_node_params
*info
)
4913 class cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4914 struct bitpack_d bp
;
4916 ii
->param_index
= (int) streamer_read_hwi (ib
);
4917 bp
= streamer_read_bitpack (ib
);
4918 ii
->polymorphic
= bp_unpack_value (&bp
, 1);
4919 ii
->agg_contents
= bp_unpack_value (&bp
, 1);
4920 ii
->member_ptr
= bp_unpack_value (&bp
, 1);
4921 ii
->by_ref
= bp_unpack_value (&bp
, 1);
4922 ii
->guaranteed_unmodified
= bp_unpack_value (&bp
, 1);
4923 ii
->vptr_changed
= bp_unpack_value (&bp
, 1);
4924 if (ii
->agg_contents
|| ii
->polymorphic
)
4925 ii
->offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4928 if (ii
->polymorphic
)
4930 ii
->otr_token
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4931 ii
->otr_type
= stream_read_tree (ib
, data_in
);
4932 ii
->context
.stream_in (ib
, data_in
);
4934 if (info
&& ii
->param_index
>= 0)
4936 if (ii
->polymorphic
)
4937 ipa_set_param_used_by_polymorphic_call (info
,
4938 ii
->param_index
, true);
4939 ipa_set_param_used_by_indirect_call (info
,
4940 ii
->param_index
, true);
4944 /* Stream out NODE info to OB. */
4947 ipa_write_node_info (struct output_block
*ob
, struct cgraph_node
*node
)
4950 lto_symtab_encoder_t encoder
;
4951 class ipa_node_params
*info
= IPA_NODE_REF (node
);
4953 struct cgraph_edge
*e
;
4954 struct bitpack_d bp
;
4956 encoder
= ob
->decl_state
->symtab_node_encoder
;
4957 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4958 streamer_write_uhwi (ob
, node_ref
);
4960 streamer_write_uhwi (ob
, ipa_get_param_count (info
));
4961 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4962 streamer_write_uhwi (ob
, ipa_get_param_move_cost (info
, j
));
4963 bp
= bitpack_create (ob
->main_stream
);
4964 gcc_assert (info
->analysis_done
4965 || ipa_get_param_count (info
) == 0);
4966 gcc_assert (!info
->node_enqueued
);
4967 gcc_assert (!info
->ipcp_orig_node
);
4968 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4969 bp_pack_value (&bp
, ipa_is_param_used (info
, j
), 1);
4970 streamer_write_bitpack (&bp
);
4971 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4973 streamer_write_hwi (ob
, ipa_get_controlled_uses (info
, j
));
4974 stream_write_tree (ob
, ipa_get_type (info
, j
), true);
4976 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4978 class ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4982 streamer_write_uhwi (ob
, 0);
4986 streamer_write_uhwi (ob
,
4987 ipa_get_cs_argument_count (args
) * 2
4988 + (args
->polymorphic_call_contexts
!= NULL
));
4989 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4991 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4992 if (args
->polymorphic_call_contexts
!= NULL
)
4993 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
4996 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4998 class ipa_edge_args
*args
= IPA_EDGE_REF (e
);
5000 streamer_write_uhwi (ob
, 0);
5003 streamer_write_uhwi (ob
,
5004 ipa_get_cs_argument_count (args
) * 2
5005 + (args
->polymorphic_call_contexts
!= NULL
));
5006 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
5008 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
5009 if (args
->polymorphic_call_contexts
!= NULL
)
5010 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
5013 ipa_write_indirect_edge_info (ob
, e
);
5017 /* Stream in edge E from IB. */
5020 ipa_read_edge_info (class lto_input_block
*ib
,
5021 class data_in
*data_in
,
5022 struct cgraph_edge
*e
, bool prevails
)
5024 int count
= streamer_read_uhwi (ib
);
5025 bool contexts_computed
= count
& 1;
5031 && (e
->possibly_call_in_translation_unit_p ()
5032 /* Also stream in jump functions to builtins in hope that they
5033 will get fnspecs. */
5034 || fndecl_built_in_p (e
->callee
->decl
, BUILT_IN_NORMAL
)))
5036 class ipa_edge_args
*args
= IPA_EDGE_REF_GET_CREATE (e
);
5037 vec_safe_grow_cleared (args
->jump_functions
, count
, true);
5038 if (contexts_computed
)
5039 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
, true);
5040 for (int k
= 0; k
< count
; k
++)
5042 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
5044 if (contexts_computed
)
5045 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in
5051 for (int k
= 0; k
< count
; k
++)
5053 struct ipa_jump_func dummy
;
5054 ipa_read_jump_function (ib
, &dummy
, e
,
5056 if (contexts_computed
)
5058 class ipa_polymorphic_call_context ctx
;
5059 ctx
.stream_in (ib
, data_in
);
5065 /* Stream in NODE info from IB. */
5068 ipa_read_node_info (class lto_input_block
*ib
, struct cgraph_node
*node
,
5069 class data_in
*data_in
)
5072 struct cgraph_edge
*e
;
5073 struct bitpack_d bp
;
5074 bool prevails
= node
->prevailing_p ();
5075 class ipa_node_params
*info
= prevails
5076 ? IPA_NODE_REF_GET_CREATE (node
) : NULL
;
5078 int param_count
= streamer_read_uhwi (ib
);
5081 ipa_alloc_node_params (node
, param_count
);
5082 for (k
= 0; k
< param_count
; k
++)
5083 (*info
->descriptors
)[k
].move_cost
= streamer_read_uhwi (ib
);
5084 if (ipa_get_param_count (info
) != 0)
5085 info
->analysis_done
= true;
5086 info
->node_enqueued
= false;
5089 for (k
= 0; k
< param_count
; k
++)
5090 streamer_read_uhwi (ib
);
5092 bp
= streamer_read_bitpack (ib
);
5093 for (k
= 0; k
< param_count
; k
++)
5095 bool used
= bp_unpack_value (&bp
, 1);
5098 ipa_set_param_used (info
, k
, used
);
5100 for (k
= 0; k
< param_count
; k
++)
5102 int nuses
= streamer_read_hwi (ib
);
5103 tree type
= stream_read_tree (ib
, data_in
);
5107 ipa_set_controlled_uses (info
, k
, nuses
);
5108 (*info
->descriptors
)[k
].decl_or_type
= type
;
5111 for (e
= node
->callees
; e
; e
= e
->next_callee
)
5112 ipa_read_edge_info (ib
, data_in
, e
, prevails
);
5113 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
5115 ipa_read_edge_info (ib
, data_in
, e
, prevails
);
5116 ipa_read_indirect_edge_info (ib
, data_in
, e
, info
);
5120 /* Write jump functions for nodes in SET. */
5123 ipa_prop_write_jump_functions (void)
5125 struct cgraph_node
*node
;
5126 struct output_block
*ob
;
5127 unsigned int count
= 0;
5128 lto_symtab_encoder_iterator lsei
;
5129 lto_symtab_encoder_t encoder
;
5131 if (!ipa_node_params_sum
|| !ipa_edge_args_sum
)
5134 ob
= create_output_block (LTO_section_jump_functions
);
5135 encoder
= ob
->decl_state
->symtab_node_encoder
;
5137 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5138 lsei_next_function_in_partition (&lsei
))
5140 node
= lsei_cgraph_node (lsei
);
5141 if (node
->has_gimple_body_p ()
5142 && IPA_NODE_REF (node
) != NULL
)
5146 streamer_write_uhwi (ob
, count
);
5148 /* Process all of the functions. */
5149 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5150 lsei_next_function_in_partition (&lsei
))
5152 node
= lsei_cgraph_node (lsei
);
5153 if (node
->has_gimple_body_p ()
5154 && IPA_NODE_REF (node
) != NULL
)
5155 ipa_write_node_info (ob
, node
);
5157 streamer_write_char_stream (ob
->main_stream
, 0);
5158 produce_asm (ob
, NULL
);
5159 destroy_output_block (ob
);
5162 /* Read section in file FILE_DATA of length LEN with data DATA. */
5165 ipa_prop_read_section (struct lto_file_decl_data
*file_data
, const char *data
,
5168 const struct lto_function_header
*header
=
5169 (const struct lto_function_header
*) data
;
5170 const int cfg_offset
= sizeof (struct lto_function_header
);
5171 const int main_offset
= cfg_offset
+ header
->cfg_size
;
5172 const int string_offset
= main_offset
+ header
->main_size
;
5173 class data_in
*data_in
;
5177 lto_input_block
ib_main ((const char *) data
+ main_offset
,
5178 header
->main_size
, file_data
->mode_table
);
5181 lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
5182 header
->string_size
, vNULL
);
5183 count
= streamer_read_uhwi (&ib_main
);
5185 for (i
= 0; i
< count
; i
++)
5188 struct cgraph_node
*node
;
5189 lto_symtab_encoder_t encoder
;
5191 index
= streamer_read_uhwi (&ib_main
);
5192 encoder
= file_data
->symtab_node_encoder
;
5193 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
5195 gcc_assert (node
->definition
);
5196 ipa_read_node_info (&ib_main
, node
, data_in
);
5198 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
5200 lto_data_in_delete (data_in
);
5203 /* Read ipcp jump functions. */
5206 ipa_prop_read_jump_functions (void)
5208 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
5209 struct lto_file_decl_data
*file_data
;
5212 ipa_check_create_node_params ();
5213 ipa_check_create_edge_args ();
5214 ipa_register_cgraph_hooks ();
5216 while ((file_data
= file_data_vec
[j
++]))
5220 = lto_get_summary_section_data (file_data
, LTO_section_jump_functions
,
5223 ipa_prop_read_section (file_data
, data
, len
);
5228 write_ipcp_transformation_info (output_block
*ob
, cgraph_node
*node
)
5231 unsigned int count
= 0;
5232 lto_symtab_encoder_t encoder
;
5233 struct ipa_agg_replacement_value
*aggvals
, *av
;
5235 aggvals
= ipa_get_agg_replacements_for_node (node
);
5236 encoder
= ob
->decl_state
->symtab_node_encoder
;
5237 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
5238 streamer_write_uhwi (ob
, node_ref
);
5240 for (av
= aggvals
; av
; av
= av
->next
)
5242 streamer_write_uhwi (ob
, count
);
5244 for (av
= aggvals
; av
; av
= av
->next
)
5246 struct bitpack_d bp
;
5248 streamer_write_uhwi (ob
, av
->offset
);
5249 streamer_write_uhwi (ob
, av
->index
);
5250 stream_write_tree (ob
, av
->value
, true);
5252 bp
= bitpack_create (ob
->main_stream
);
5253 bp_pack_value (&bp
, av
->by_ref
, 1);
5254 streamer_write_bitpack (&bp
);
5257 ipcp_transformation
*ts
= ipcp_get_transformation_summary (node
);
5258 if (ts
&& vec_safe_length (ts
->m_vr
) > 0)
5260 count
= ts
->m_vr
->length ();
5261 streamer_write_uhwi (ob
, count
);
5262 for (unsigned i
= 0; i
< count
; ++i
)
5264 struct bitpack_d bp
;
5265 ipa_vr
*parm_vr
= &(*ts
->m_vr
)[i
];
5266 bp
= bitpack_create (ob
->main_stream
);
5267 bp_pack_value (&bp
, parm_vr
->known
, 1);
5268 streamer_write_bitpack (&bp
);
5271 streamer_write_enum (ob
->main_stream
, value_rang_type
,
5272 VR_LAST
, parm_vr
->type
);
5273 streamer_write_wide_int (ob
, parm_vr
->min
);
5274 streamer_write_wide_int (ob
, parm_vr
->max
);
5279 streamer_write_uhwi (ob
, 0);
5281 if (ts
&& vec_safe_length (ts
->bits
) > 0)
5283 count
= ts
->bits
->length ();
5284 streamer_write_uhwi (ob
, count
);
5286 for (unsigned i
= 0; i
< count
; ++i
)
5288 const ipa_bits
*bits_jfunc
= (*ts
->bits
)[i
];
5289 struct bitpack_d bp
= bitpack_create (ob
->main_stream
);
5290 bp_pack_value (&bp
, !!bits_jfunc
, 1);
5291 streamer_write_bitpack (&bp
);
5294 streamer_write_widest_int (ob
, bits_jfunc
->value
);
5295 streamer_write_widest_int (ob
, bits_jfunc
->mask
);
5300 streamer_write_uhwi (ob
, 0);
5303 /* Stream in the aggregate value replacement chain for NODE from IB. */
5306 read_ipcp_transformation_info (lto_input_block
*ib
, cgraph_node
*node
,
5309 struct ipa_agg_replacement_value
*aggvals
= NULL
;
5310 unsigned int count
, i
;
5312 count
= streamer_read_uhwi (ib
);
5313 for (i
= 0; i
<count
; i
++)
5315 struct ipa_agg_replacement_value
*av
;
5316 struct bitpack_d bp
;
5318 av
= ggc_alloc
<ipa_agg_replacement_value
> ();
5319 av
->offset
= streamer_read_uhwi (ib
);
5320 av
->index
= streamer_read_uhwi (ib
);
5321 av
->value
= stream_read_tree (ib
, data_in
);
5322 bp
= streamer_read_bitpack (ib
);
5323 av
->by_ref
= bp_unpack_value (&bp
, 1);
5327 ipa_set_node_agg_value_chain (node
, aggvals
);
5329 count
= streamer_read_uhwi (ib
);
5332 ipcp_transformation_initialize ();
5333 ipcp_transformation
*ts
= ipcp_transformation_sum
->get_create (node
);
5334 vec_safe_grow_cleared (ts
->m_vr
, count
, true);
5335 for (i
= 0; i
< count
; i
++)
5338 parm_vr
= &(*ts
->m_vr
)[i
];
5339 struct bitpack_d bp
;
5340 bp
= streamer_read_bitpack (ib
);
5341 parm_vr
->known
= bp_unpack_value (&bp
, 1);
5344 parm_vr
->type
= streamer_read_enum (ib
, value_range_kind
,
5346 parm_vr
->min
= streamer_read_wide_int (ib
);
5347 parm_vr
->max
= streamer_read_wide_int (ib
);
5351 count
= streamer_read_uhwi (ib
);
5354 ipcp_transformation_initialize ();
5355 ipcp_transformation
*ts
= ipcp_transformation_sum
->get_create (node
);
5356 vec_safe_grow_cleared (ts
->bits
, count
, true);
5358 for (i
= 0; i
< count
; i
++)
5360 struct bitpack_d bp
= streamer_read_bitpack (ib
);
5361 bool known
= bp_unpack_value (&bp
, 1);
5364 const widest_int value
= streamer_read_widest_int (ib
);
5365 const widest_int mask
= streamer_read_widest_int (ib
);
5367 = ipa_get_ipa_bits_for_value (value
, mask
);
5368 (*ts
->bits
)[i
] = bits
;
5374 /* Write all aggregate replacement for nodes in set. */
5377 ipcp_write_transformation_summaries (void)
5379 struct cgraph_node
*node
;
5380 struct output_block
*ob
;
5381 unsigned int count
= 0;
5382 lto_symtab_encoder_iterator lsei
;
5383 lto_symtab_encoder_t encoder
;
5385 ob
= create_output_block (LTO_section_ipcp_transform
);
5386 encoder
= ob
->decl_state
->symtab_node_encoder
;
5388 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5389 lsei_next_function_in_partition (&lsei
))
5391 node
= lsei_cgraph_node (lsei
);
5392 if (node
->has_gimple_body_p ())
5396 streamer_write_uhwi (ob
, count
);
5398 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5399 lsei_next_function_in_partition (&lsei
))
5401 node
= lsei_cgraph_node (lsei
);
5402 if (node
->has_gimple_body_p ())
5403 write_ipcp_transformation_info (ob
, node
);
5405 streamer_write_char_stream (ob
->main_stream
, 0);
5406 produce_asm (ob
, NULL
);
5407 destroy_output_block (ob
);
5410 /* Read replacements section in file FILE_DATA of length LEN with data
5414 read_replacements_section (struct lto_file_decl_data
*file_data
,
5418 const struct lto_function_header
*header
=
5419 (const struct lto_function_header
*) data
;
5420 const int cfg_offset
= sizeof (struct lto_function_header
);
5421 const int main_offset
= cfg_offset
+ header
->cfg_size
;
5422 const int string_offset
= main_offset
+ header
->main_size
;
5423 class data_in
*data_in
;
5427 lto_input_block
ib_main ((const char *) data
+ main_offset
,
5428 header
->main_size
, file_data
->mode_table
);
5430 data_in
= lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
5431 header
->string_size
, vNULL
);
5432 count
= streamer_read_uhwi (&ib_main
);
5434 for (i
= 0; i
< count
; i
++)
5437 struct cgraph_node
*node
;
5438 lto_symtab_encoder_t encoder
;
5440 index
= streamer_read_uhwi (&ib_main
);
5441 encoder
= file_data
->symtab_node_encoder
;
5442 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
5444 gcc_assert (node
->definition
);
5445 read_ipcp_transformation_info (&ib_main
, node
, data_in
);
5447 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
5449 lto_data_in_delete (data_in
);
5452 /* Read IPA-CP aggregate replacements. */
5455 ipcp_read_transformation_summaries (void)
5457 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
5458 struct lto_file_decl_data
*file_data
;
5461 while ((file_data
= file_data_vec
[j
++]))
5465 = lto_get_summary_section_data (file_data
, LTO_section_ipcp_transform
,
5468 read_replacements_section (file_data
, data
, len
);
5472 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5476 adjust_agg_replacement_values (struct cgraph_node
*node
,
5477 struct ipa_agg_replacement_value
*aggval
)
5479 struct ipa_agg_replacement_value
*v
;
5480 clone_info
*cinfo
= clone_info::get (node
);
5482 if (!cinfo
|| !cinfo
->param_adjustments
)
5485 auto_vec
<int, 16> new_indices
;
5486 cinfo
->param_adjustments
->get_updated_indices (&new_indices
);
5487 for (v
= aggval
; v
; v
= v
->next
)
5489 gcc_checking_assert (v
->index
>= 0);
5491 if ((unsigned) v
->index
< new_indices
.length ())
5492 v
->index
= new_indices
[v
->index
];
5494 /* This can happen if we know about a constant passed by reference by
5495 an argument which is never actually used for anything, let alone
5496 loading that constant. */
5501 /* Dominator walker driving the ipcp modification phase. */
5503 class ipcp_modif_dom_walker
: public dom_walker
5506 ipcp_modif_dom_walker (struct ipa_func_body_info
*fbi
,
5507 vec
<ipa_param_descriptor
, va_gc
> *descs
,
5508 struct ipa_agg_replacement_value
*av
,
5510 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
), m_descriptors (descs
),
5511 m_aggval (av
), m_something_changed (sc
), m_cfg_changed (cc
) {}
5513 virtual edge
before_dom_children (basic_block
);
5516 struct ipa_func_body_info
*m_fbi
;
5517 vec
<ipa_param_descriptor
, va_gc
> *m_descriptors
;
5518 struct ipa_agg_replacement_value
*m_aggval
;
5519 bool *m_something_changed
, *m_cfg_changed
;
5523 ipcp_modif_dom_walker::before_dom_children (basic_block bb
)
5525 gimple_stmt_iterator gsi
;
5526 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5528 struct ipa_agg_replacement_value
*v
;
5529 gimple
*stmt
= gsi_stmt (gsi
);
5531 HOST_WIDE_INT offset
;
5536 if (!gimple_assign_load_p (stmt
))
5538 rhs
= gimple_assign_rhs1 (stmt
);
5539 if (!is_gimple_reg_type (TREE_TYPE (rhs
)))
5544 while (handled_component_p (t
))
5546 /* V_C_E can do things like convert an array of integers to one
5547 bigger integer and similar things we do not handle below. */
5548 if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
5553 t
= TREE_OPERAND (t
, 0);
5558 if (!ipa_load_from_parm_agg (m_fbi
, m_descriptors
, stmt
, rhs
, &index
,
5559 &offset
, &size
, &by_ref
))
5561 for (v
= m_aggval
; v
; v
= v
->next
)
5562 if (v
->index
== index
5563 && v
->offset
== offset
)
5566 || v
->by_ref
!= by_ref
5567 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v
->value
))),
5571 gcc_checking_assert (is_gimple_ip_invariant (v
->value
));
5572 if (!useless_type_conversion_p (TREE_TYPE (rhs
), TREE_TYPE (v
->value
)))
5574 if (fold_convertible_p (TREE_TYPE (rhs
), v
->value
))
5575 val
= fold_build1 (NOP_EXPR
, TREE_TYPE (rhs
), v
->value
);
5576 else if (TYPE_SIZE (TREE_TYPE (rhs
))
5577 == TYPE_SIZE (TREE_TYPE (v
->value
)))
5578 val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (rhs
), v
->value
);
5583 fprintf (dump_file
, " const ");
5584 print_generic_expr (dump_file
, v
->value
);
5585 fprintf (dump_file
, " can't be converted to type of ");
5586 print_generic_expr (dump_file
, rhs
);
5587 fprintf (dump_file
, "\n");
5595 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5597 fprintf (dump_file
, "Modifying stmt:\n ");
5598 print_gimple_stmt (dump_file
, stmt
, 0);
5600 gimple_assign_set_rhs_from_tree (&gsi
, val
);
5603 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5605 fprintf (dump_file
, "into:\n ");
5606 print_gimple_stmt (dump_file
, stmt
, 0);
5607 fprintf (dump_file
, "\n");
5610 *m_something_changed
= true;
5611 if (maybe_clean_eh_stmt (stmt
)
5612 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
5613 *m_cfg_changed
= true;
5618 /* Return true if we have recorded VALUE and MASK about PARM.
5619 Set VALUE and MASk accordingly. */
5622 ipcp_get_parm_bits (tree parm
, tree
*value
, widest_int
*mask
)
5624 cgraph_node
*cnode
= cgraph_node::get (current_function_decl
);
5625 ipcp_transformation
*ts
= ipcp_get_transformation_summary (cnode
);
5626 if (!ts
|| vec_safe_length (ts
->bits
) == 0)
5630 for (tree p
= DECL_ARGUMENTS (current_function_decl
);
5631 p
!= parm
; p
= DECL_CHAIN (p
))
5634 /* Ignore static chain. */
5639 clone_info
*cinfo
= clone_info::get (cnode
);
5640 if (cinfo
&& cinfo
->param_adjustments
)
5642 i
= cinfo
->param_adjustments
->get_original_index (i
);
5647 vec
<ipa_bits
*, va_gc
> &bits
= *ts
->bits
;
5650 *mask
= bits
[i
]->mask
;
5651 *value
= wide_int_to_tree (TREE_TYPE (parm
), bits
[i
]->value
);
5656 /* Update bits info of formal parameters as described in
5657 ipcp_transformation. */
5660 ipcp_update_bits (struct cgraph_node
*node
)
5662 ipcp_transformation
*ts
= ipcp_get_transformation_summary (node
);
5664 if (!ts
|| vec_safe_length (ts
->bits
) == 0)
5666 vec
<ipa_bits
*, va_gc
> &bits
= *ts
->bits
;
5667 unsigned count
= bits
.length ();
5671 auto_vec
<int, 16> new_indices
;
5672 bool need_remapping
= false;
5673 clone_info
*cinfo
= clone_info::get (node
);
5674 if (cinfo
&& cinfo
->param_adjustments
)
5676 cinfo
->param_adjustments
->get_updated_indices (&new_indices
);
5677 need_remapping
= true;
5679 auto_vec
<tree
, 16> parm_decls
;
5680 push_function_arg_decls (&parm_decls
, node
->decl
);
5682 for (unsigned i
= 0; i
< count
; ++i
)
5687 if (i
>= new_indices
.length ())
5689 int idx
= new_indices
[i
];
5692 parm
= parm_decls
[idx
];
5695 parm
= parm_decls
[i
];
5696 gcc_checking_assert (parm
);
5700 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm
))
5701 || POINTER_TYPE_P (TREE_TYPE (parm
)))
5702 || !is_gimple_reg (parm
))
5705 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
), parm
);
5711 fprintf (dump_file
, "Adjusting mask for param %u to ", i
);
5712 print_hex (bits
[i
]->mask
, dump_file
);
5713 fprintf (dump_file
, "\n");
5716 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef
)))
5718 unsigned prec
= TYPE_PRECISION (TREE_TYPE (ddef
));
5719 signop sgn
= TYPE_SIGN (TREE_TYPE (ddef
));
5721 wide_int nonzero_bits
= wide_int::from (bits
[i
]->mask
, prec
, UNSIGNED
)
5722 | wide_int::from (bits
[i
]->value
, prec
, sgn
);
5723 set_nonzero_bits (ddef
, nonzero_bits
);
5727 unsigned tem
= bits
[i
]->mask
.to_uhwi ();
5728 unsigned HOST_WIDE_INT bitpos
= bits
[i
]->value
.to_uhwi ();
5729 unsigned align
= tem
& -tem
;
5730 unsigned misalign
= bitpos
& (align
- 1);
5735 fprintf (dump_file
, "Adjusting align: %u, misalign: %u\n", align
, misalign
);
5737 unsigned old_align
, old_misalign
;
5738 struct ptr_info_def
*pi
= get_ptr_info (ddef
);
5739 bool old_known
= get_ptr_info_alignment (pi
, &old_align
, &old_misalign
);
5742 && old_align
> align
)
5746 fprintf (dump_file
, "But alignment was already %u.\n", old_align
);
5747 if ((old_misalign
& (align
- 1)) != misalign
)
5748 fprintf (dump_file
, "old_misalign (%u) and misalign (%u) mismatch\n",
5749 old_misalign
, misalign
);
5755 && ((misalign
& (old_align
- 1)) != old_misalign
)
5757 fprintf (dump_file
, "old_misalign (%u) and misalign (%u) mismatch\n",
5758 old_misalign
, misalign
);
5760 set_ptr_info_alignment (pi
, align
, misalign
);
5767 ipa_vr::nonzero_p (tree expr_type
) const
5769 if (type
== VR_ANTI_RANGE
&& wi::eq_p (min
, 0) && wi::eq_p (max
, 0))
5772 unsigned prec
= TYPE_PRECISION (expr_type
);
5773 return (type
== VR_RANGE
5774 && TYPE_UNSIGNED (expr_type
)
5775 && wi::eq_p (min
, wi::one (prec
))
5776 && wi::eq_p (max
, wi::max_value (prec
, TYPE_SIGN (expr_type
))));
5779 /* Update value range of formal parameters as described in
5780 ipcp_transformation. */
5783 ipcp_update_vr (struct cgraph_node
*node
)
5785 ipcp_transformation
*ts
= ipcp_get_transformation_summary (node
);
5786 if (!ts
|| vec_safe_length (ts
->m_vr
) == 0)
5788 const vec
<ipa_vr
, va_gc
> &vr
= *ts
->m_vr
;
5789 unsigned count
= vr
.length ();
5793 auto_vec
<int, 16> new_indices
;
5794 bool need_remapping
= false;
5795 clone_info
*cinfo
= clone_info::get (node
);
5796 if (cinfo
&& cinfo
->param_adjustments
)
5798 cinfo
->param_adjustments
->get_updated_indices (&new_indices
);
5799 need_remapping
= true;
5801 auto_vec
<tree
, 16> parm_decls
;
5802 push_function_arg_decls (&parm_decls
, node
->decl
);
5804 for (unsigned i
= 0; i
< count
; ++i
)
5810 if (i
>= new_indices
.length ())
5812 remapped_idx
= new_indices
[i
];
5813 if (remapped_idx
< 0)
5819 parm
= parm_decls
[remapped_idx
];
5821 gcc_checking_assert (parm
);
5822 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
), parm
);
5824 if (!ddef
|| !is_gimple_reg (parm
))
5828 && (vr
[i
].type
== VR_RANGE
|| vr
[i
].type
== VR_ANTI_RANGE
))
5830 tree type
= TREE_TYPE (ddef
);
5831 unsigned prec
= TYPE_PRECISION (type
);
5832 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef
)))
5836 fprintf (dump_file
, "Setting value range of param %u "
5837 "(now %i) ", i
, remapped_idx
);
5838 fprintf (dump_file
, "%s[",
5839 (vr
[i
].type
== VR_ANTI_RANGE
) ? "~" : "");
5840 print_decs (vr
[i
].min
, dump_file
);
5841 fprintf (dump_file
, ", ");
5842 print_decs (vr
[i
].max
, dump_file
);
5843 fprintf (dump_file
, "]\n");
5845 set_range_info (ddef
, vr
[i
].type
,
5846 wide_int_storage::from (vr
[i
].min
, prec
,
5848 wide_int_storage::from (vr
[i
].max
, prec
,
5851 else if (POINTER_TYPE_P (TREE_TYPE (ddef
))
5852 && vr
[i
].nonzero_p (TREE_TYPE (ddef
)))
5855 fprintf (dump_file
, "Setting nonnull for %u\n", i
);
5856 set_ptr_nonnull (ddef
);
5862 /* IPCP transformation phase doing propagation of aggregate values. */
5865 ipcp_transform_function (struct cgraph_node
*node
)
5867 vec
<ipa_param_descriptor
, va_gc
> *descriptors
= NULL
;
5868 struct ipa_func_body_info fbi
;
5869 struct ipa_agg_replacement_value
*aggval
;
5871 bool cfg_changed
= false, something_changed
= false;
5873 gcc_checking_assert (cfun
);
5874 gcc_checking_assert (current_function_decl
);
5877 fprintf (dump_file
, "Modification phase of node %s\n",
5878 node
->dump_name ());
5880 ipcp_update_bits (node
);
5881 ipcp_update_vr (node
);
5882 aggval
= ipa_get_agg_replacements_for_node (node
);
5885 param_count
= count_formal_params (node
->decl
);
5886 if (param_count
== 0)
5888 adjust_agg_replacement_values (node
, aggval
);
5890 ipa_dump_agg_replacement_values (dump_file
, aggval
);
5894 fbi
.bb_infos
= vNULL
;
5895 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
), true);
5896 fbi
.param_count
= param_count
;
5897 fbi
.aa_walk_budget
= opt_for_fn (node
->decl
, param_ipa_max_aa_steps
);
5899 vec_safe_grow_cleared (descriptors
, param_count
, true);
5900 ipa_populate_param_decls (node
, *descriptors
);
5901 calculate_dominance_info (CDI_DOMINATORS
);
5902 ipcp_modif_dom_walker (&fbi
, descriptors
, aggval
, &something_changed
,
5903 &cfg_changed
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5906 struct ipa_bb_info
*bi
;
5907 FOR_EACH_VEC_ELT (fbi
.bb_infos
, i
, bi
)
5908 free_ipa_bb_info (bi
);
5909 fbi
.bb_infos
.release ();
5910 free_dominance_info (CDI_DOMINATORS
);
5912 ipcp_transformation
*s
= ipcp_transformation_sum
->get (node
);
5913 s
->agg_values
= NULL
;
5917 vec_free (descriptors
);
5919 if (!something_changed
)
5923 delete_unreachable_blocks_update_callgraph (node
, false);
5925 return TODO_update_ssa_only_virtuals
;
5929 /* Return true if OTHER describes same agg value. */
5931 ipa_agg_value::equal_to (const ipa_agg_value
&other
)
5933 return offset
== other
.offset
5934 && operand_equal_p (value
, other
.value
, 0);
5937 /* Destructor also removing individual aggregate values. */
5939 ipa_auto_call_arg_values::~ipa_auto_call_arg_values ()
5941 ipa_release_agg_values (m_known_aggs
, false);
5946 #include "gt-ipa-prop.h"