1 /* Interprocedural analyses.
2 Copyright (C) 2005-2021 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
30 #include "tree-streamer.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
50 #include "ipa-utils.h"
54 #include "tree-cfgcleanup.h"
56 #include "symtab-clones.h"
57 #include "attr-fnspec.h"
58 #include "gimple-range.h"
60 /* Function summary where the parameter infos are actually stored. */
61 ipa_node_params_t
*ipa_node_params_sum
= NULL
;
63 function_summary
<ipcp_transformation
*> *ipcp_transformation_sum
= NULL
;
65 /* Edge summary for IPA-CP edge information. */
66 ipa_edge_args_sum_t
*ipa_edge_args_sum
;
68 /* Traits for a hash table for reusing already existing ipa_bits. */
70 struct ipa_bit_ggc_hash_traits
: public ggc_cache_remove
<ipa_bits
*>
72 typedef ipa_bits
*value_type
;
73 typedef ipa_bits
*compare_type
;
75 hash (const ipa_bits
*p
)
77 hashval_t t
= (hashval_t
) p
->value
.to_shwi ();
78 return iterative_hash_host_wide_int (p
->mask
.to_shwi (), t
);
81 equal (const ipa_bits
*a
, const ipa_bits
*b
)
83 return a
->value
== b
->value
&& a
->mask
== b
->mask
;
85 static const bool empty_zero_p
= true;
87 mark_empty (ipa_bits
*&p
)
92 is_empty (const ipa_bits
*p
)
97 is_deleted (const ipa_bits
*p
)
99 return p
== reinterpret_cast<const ipa_bits
*> (1);
102 mark_deleted (ipa_bits
*&p
)
104 p
= reinterpret_cast<ipa_bits
*> (1);
108 /* Hash table for avoid repeated allocations of equal ipa_bits. */
109 static GTY ((cache
)) hash_table
<ipa_bit_ggc_hash_traits
> *ipa_bits_hash_table
;
111 /* Traits for a hash table for reusing value_ranges used for IPA. Note that
112 the equiv bitmap is not hashed and is expected to be NULL. */
114 struct ipa_vr_ggc_hash_traits
: public ggc_cache_remove
<value_range
*>
116 typedef value_range
*value_type
;
117 typedef value_range
*compare_type
;
119 hash (const value_range
*p
)
121 inchash::hash
hstate (p
->kind ());
122 inchash::add_expr (p
->min (), hstate
);
123 inchash::add_expr (p
->max (), hstate
);
124 return hstate
.end ();
127 equal (const value_range
*a
, const value_range
*b
)
129 return (a
->equal_p (*b
)
130 && types_compatible_p (a
->type (), b
->type ()));
132 static const bool empty_zero_p
= true;
134 mark_empty (value_range
*&p
)
139 is_empty (const value_range
*p
)
144 is_deleted (const value_range
*p
)
146 return p
== reinterpret_cast<const value_range
*> (1);
149 mark_deleted (value_range
*&p
)
151 p
= reinterpret_cast<value_range
*> (1);
155 /* Hash table for avoid repeated allocations of equal value_ranges. */
156 static GTY ((cache
)) hash_table
<ipa_vr_ggc_hash_traits
> *ipa_vr_hash_table
;
158 /* Holders of ipa cgraph hooks: */
159 static struct cgraph_node_hook_list
*function_insertion_hook_holder
;
161 /* Description of a reference to an IPA constant. */
162 struct ipa_cst_ref_desc
164 /* Edge that corresponds to the statement which took the reference. */
165 struct cgraph_edge
*cs
;
166 /* Linked list of duplicates created when call graph edges are cloned. */
167 struct ipa_cst_ref_desc
*next_duplicate
;
168 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
169 if out of control. */
173 /* Allocation pool for reference descriptions. */
175 static object_allocator
<ipa_cst_ref_desc
> ipa_refdesc_pool
176 ("IPA-PROP ref descriptions");
178 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
179 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
182 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node
*node
)
184 tree fs_opts
= DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node
->decl
);
188 return !opt_for_fn (node
->decl
, optimize
) || !opt_for_fn (node
->decl
, flag_ipa_cp
);
191 /* Return index of the formal whose tree is PTREE in function which corresponds
195 ipa_get_param_decl_index_1 (vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
200 count
= vec_safe_length (descriptors
);
201 for (i
= 0; i
< count
; i
++)
202 if ((*descriptors
)[i
].decl_or_type
== ptree
)
208 /* Return index of the formal whose tree is PTREE in function which corresponds
212 ipa_get_param_decl_index (class ipa_node_params
*info
, tree ptree
)
214 return ipa_get_param_decl_index_1 (info
->descriptors
, ptree
);
217 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
221 ipa_populate_param_decls (struct cgraph_node
*node
,
222 vec
<ipa_param_descriptor
, va_gc
> &descriptors
)
230 gcc_assert (gimple_has_body_p (fndecl
));
231 fnargs
= DECL_ARGUMENTS (fndecl
);
233 for (parm
= fnargs
; parm
; parm
= DECL_CHAIN (parm
))
235 descriptors
[param_num
].decl_or_type
= parm
;
236 unsigned int cost
= estimate_move_cost (TREE_TYPE (parm
), true);
237 descriptors
[param_num
].move_cost
= cost
;
238 /* Watch overflow, move_cost is a bitfield. */
239 gcc_checking_assert (cost
== descriptors
[param_num
].move_cost
);
244 /* Return how many formal parameters FNDECL has. */
247 count_formal_params (tree fndecl
)
251 gcc_assert (gimple_has_body_p (fndecl
));
253 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
259 /* Return the declaration of Ith formal parameter of the function corresponding
260 to INFO. Note there is no setter function as this array is built just once
261 using ipa_initialize_node_params. */
264 ipa_dump_param (FILE *file
, class ipa_node_params
*info
, int i
)
266 fprintf (file
, "param #%i", i
);
267 if ((*info
->descriptors
)[i
].decl_or_type
)
270 print_generic_expr (file
, (*info
->descriptors
)[i
].decl_or_type
);
274 /* If necessary, allocate vector of parameter descriptors in info of NODE.
275 Return true if they were allocated, false if not. */
278 ipa_alloc_node_params (struct cgraph_node
*node
, int param_count
)
280 ipa_node_params
*info
= ipa_node_params_sum
->get_create (node
);
282 if (!info
->descriptors
&& param_count
)
284 vec_safe_grow_cleared (info
->descriptors
, param_count
, true);
291 /* Initialize the ipa_node_params structure associated with NODE by counting
292 the function parameters, creating the descriptors and populating their
296 ipa_initialize_node_params (struct cgraph_node
*node
)
298 ipa_node_params
*info
= ipa_node_params_sum
->get_create (node
);
300 if (!info
->descriptors
301 && ipa_alloc_node_params (node
, count_formal_params (node
->decl
)))
302 ipa_populate_param_decls (node
, *info
->descriptors
);
305 /* Print the jump functions associated with call graph edge CS to file F. */
308 ipa_print_node_jump_functions_for_edge (FILE *f
, struct cgraph_edge
*cs
)
310 ipa_edge_args
*args
= ipa_edge_args_sum
->get (cs
);
311 int count
= ipa_get_cs_argument_count (args
);
313 for (int i
= 0; i
< count
; i
++)
315 struct ipa_jump_func
*jump_func
;
316 enum jump_func_type type
;
318 jump_func
= ipa_get_ith_jump_func (args
, i
);
319 type
= jump_func
->type
;
321 fprintf (f
, " param %d: ", i
);
322 if (type
== IPA_JF_UNKNOWN
)
323 fprintf (f
, "UNKNOWN\n");
324 else if (type
== IPA_JF_CONST
)
326 tree val
= jump_func
->value
.constant
.value
;
327 fprintf (f
, "CONST: ");
328 print_generic_expr (f
, val
);
329 if (TREE_CODE (val
) == ADDR_EXPR
330 && TREE_CODE (TREE_OPERAND (val
, 0)) == CONST_DECL
)
333 print_generic_expr (f
, DECL_INITIAL (TREE_OPERAND (val
, 0)));
337 else if (type
== IPA_JF_PASS_THROUGH
)
339 fprintf (f
, "PASS THROUGH: ");
340 fprintf (f
, "%d, op %s",
341 jump_func
->value
.pass_through
.formal_id
,
342 get_tree_code_name(jump_func
->value
.pass_through
.operation
));
343 if (jump_func
->value
.pass_through
.operation
!= NOP_EXPR
)
346 print_generic_expr (f
, jump_func
->value
.pass_through
.operand
);
348 if (jump_func
->value
.pass_through
.agg_preserved
)
349 fprintf (f
, ", agg_preserved");
352 else if (type
== IPA_JF_ANCESTOR
)
354 fprintf (f
, "ANCESTOR: ");
355 fprintf (f
, "%d, offset " HOST_WIDE_INT_PRINT_DEC
,
356 jump_func
->value
.ancestor
.formal_id
,
357 jump_func
->value
.ancestor
.offset
);
358 if (jump_func
->value
.ancestor
.agg_preserved
)
359 fprintf (f
, ", agg_preserved");
363 if (jump_func
->agg
.items
)
365 struct ipa_agg_jf_item
*item
;
368 fprintf (f
, " Aggregate passed by %s:\n",
369 jump_func
->agg
.by_ref
? "reference" : "value");
370 FOR_EACH_VEC_ELT (*jump_func
->agg
.items
, j
, item
)
372 fprintf (f
, " offset: " HOST_WIDE_INT_PRINT_DEC
", ",
374 fprintf (f
, "type: ");
375 print_generic_expr (f
, item
->type
);
377 if (item
->jftype
== IPA_JF_PASS_THROUGH
)
378 fprintf (f
, "PASS THROUGH: %d,",
379 item
->value
.pass_through
.formal_id
);
380 else if (item
->jftype
== IPA_JF_LOAD_AGG
)
382 fprintf (f
, "LOAD AGG: %d",
383 item
->value
.pass_through
.formal_id
);
384 fprintf (f
, " [offset: " HOST_WIDE_INT_PRINT_DEC
", by %s],",
385 item
->value
.load_agg
.offset
,
386 item
->value
.load_agg
.by_ref
? "reference"
390 if (item
->jftype
== IPA_JF_PASS_THROUGH
391 || item
->jftype
== IPA_JF_LOAD_AGG
)
393 fprintf (f
, " op %s",
394 get_tree_code_name (item
->value
.pass_through
.operation
));
395 if (item
->value
.pass_through
.operation
!= NOP_EXPR
)
398 print_generic_expr (f
, item
->value
.pass_through
.operand
);
401 else if (item
->jftype
== IPA_JF_CONST
)
403 fprintf (f
, "CONST: ");
404 print_generic_expr (f
, item
->value
.constant
);
406 else if (item
->jftype
== IPA_JF_UNKNOWN
)
407 fprintf (f
, "UNKNOWN: " HOST_WIDE_INT_PRINT_DEC
" bits",
408 tree_to_uhwi (TYPE_SIZE (item
->type
)));
413 class ipa_polymorphic_call_context
*ctx
414 = ipa_get_ith_polymorhic_call_context (args
, i
);
415 if (ctx
&& !ctx
->useless_p ())
417 fprintf (f
, " Context: ");
418 ctx
->dump (dump_file
);
423 fprintf (f
, " value: ");
424 print_hex (jump_func
->bits
->value
, f
);
425 fprintf (f
, ", mask: ");
426 print_hex (jump_func
->bits
->mask
, f
);
430 fprintf (f
, " Unknown bits\n");
436 (jump_func
->m_vr
->kind () == VR_ANTI_RANGE
) ? "~" : "");
437 print_decs (wi::to_wide (jump_func
->m_vr
->min ()), f
);
439 print_decs (wi::to_wide (jump_func
->m_vr
->max ()), f
);
443 fprintf (f
, " Unknown VR\n");
448 /* Print the jump functions of all arguments on all call graph edges going from
452 ipa_print_node_jump_functions (FILE *f
, struct cgraph_node
*node
)
454 struct cgraph_edge
*cs
;
456 fprintf (f
, " Jump functions of caller %s:\n", node
->dump_name ());
457 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
460 fprintf (f
, " callsite %s -> %s : \n",
462 cs
->callee
->dump_name ());
463 if (!ipa_edge_args_info_available_for_edge_p (cs
))
464 fprintf (f
, " no arg info\n");
466 ipa_print_node_jump_functions_for_edge (f
, cs
);
469 for (cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
471 class cgraph_indirect_call_info
*ii
;
473 ii
= cs
->indirect_info
;
474 if (ii
->agg_contents
)
475 fprintf (f
, " indirect %s callsite, calling param %i, "
476 "offset " HOST_WIDE_INT_PRINT_DEC
", %s",
477 ii
->member_ptr
? "member ptr" : "aggregate",
478 ii
->param_index
, ii
->offset
,
479 ii
->by_ref
? "by reference" : "by_value");
481 fprintf (f
, " indirect %s callsite, calling param %i, "
482 "offset " HOST_WIDE_INT_PRINT_DEC
,
483 ii
->polymorphic
? "polymorphic" : "simple", ii
->param_index
,
488 fprintf (f
, ", for stmt ");
489 print_gimple_stmt (f
, cs
->call_stmt
, 0, TDF_SLIM
);
494 ii
->context
.dump (f
);
495 if (!ipa_edge_args_info_available_for_edge_p (cs
))
496 fprintf (f
, " no arg info\n");
498 ipa_print_node_jump_functions_for_edge (f
, cs
);
502 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
505 ipa_print_all_jump_functions (FILE *f
)
507 struct cgraph_node
*node
;
509 fprintf (f
, "\nJump functions:\n");
510 FOR_EACH_FUNCTION (node
)
512 ipa_print_node_jump_functions (f
, node
);
516 /* Set jfunc to be a know-really nothing jump function. */
519 ipa_set_jf_unknown (struct ipa_jump_func
*jfunc
)
521 jfunc
->type
= IPA_JF_UNKNOWN
;
524 /* Set JFUNC to be a copy of another jmp (to be used by jump function
525 combination code). The two functions will share their rdesc. */
528 ipa_set_jf_cst_copy (struct ipa_jump_func
*dst
,
529 struct ipa_jump_func
*src
)
532 gcc_checking_assert (src
->type
== IPA_JF_CONST
);
533 dst
->type
= IPA_JF_CONST
;
534 dst
->value
.constant
= src
->value
.constant
;
537 /* Set JFUNC to be a constant jmp function. */
540 ipa_set_jf_constant (struct ipa_jump_func
*jfunc
, tree constant
,
541 struct cgraph_edge
*cs
)
543 jfunc
->type
= IPA_JF_CONST
;
544 jfunc
->value
.constant
.value
= unshare_expr_without_location (constant
);
546 if (TREE_CODE (constant
) == ADDR_EXPR
547 && TREE_CODE (TREE_OPERAND (constant
, 0)) == FUNCTION_DECL
)
549 struct ipa_cst_ref_desc
*rdesc
;
551 rdesc
= ipa_refdesc_pool
.allocate ();
553 rdesc
->next_duplicate
= NULL
;
555 jfunc
->value
.constant
.rdesc
= rdesc
;
558 jfunc
->value
.constant
.rdesc
= NULL
;
561 /* Set JFUNC to be a simple pass-through jump function. */
563 ipa_set_jf_simple_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
566 jfunc
->type
= IPA_JF_PASS_THROUGH
;
567 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
568 jfunc
->value
.pass_through
.formal_id
= formal_id
;
569 jfunc
->value
.pass_through
.operation
= NOP_EXPR
;
570 jfunc
->value
.pass_through
.agg_preserved
= agg_preserved
;
573 /* Set JFUNC to be an unary pass through jump function. */
576 ipa_set_jf_unary_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
577 enum tree_code operation
)
579 jfunc
->type
= IPA_JF_PASS_THROUGH
;
580 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
581 jfunc
->value
.pass_through
.formal_id
= formal_id
;
582 jfunc
->value
.pass_through
.operation
= operation
;
583 jfunc
->value
.pass_through
.agg_preserved
= false;
585 /* Set JFUNC to be an arithmetic pass through jump function. */
588 ipa_set_jf_arith_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
589 tree operand
, enum tree_code operation
)
591 jfunc
->type
= IPA_JF_PASS_THROUGH
;
592 jfunc
->value
.pass_through
.operand
= unshare_expr_without_location (operand
);
593 jfunc
->value
.pass_through
.formal_id
= formal_id
;
594 jfunc
->value
.pass_through
.operation
= operation
;
595 jfunc
->value
.pass_through
.agg_preserved
= false;
598 /* Set JFUNC to be an ancestor jump function. */
601 ipa_set_ancestor_jf (struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
,
602 int formal_id
, bool agg_preserved
)
604 jfunc
->type
= IPA_JF_ANCESTOR
;
605 jfunc
->value
.ancestor
.formal_id
= formal_id
;
606 jfunc
->value
.ancestor
.offset
= offset
;
607 jfunc
->value
.ancestor
.agg_preserved
= agg_preserved
;
610 /* Get IPA BB information about the given BB. FBI is the context of analyzis
611 of this function body. */
613 static struct ipa_bb_info
*
614 ipa_get_bb_info (struct ipa_func_body_info
*fbi
, basic_block bb
)
616 gcc_checking_assert (fbi
);
617 return &fbi
->bb_infos
[bb
->index
];
620 /* Structure to be passed in between detect_type_change and
621 check_stmt_for_type_change. */
623 struct prop_type_change_info
625 /* Offset into the object where there is the virtual method pointer we are
627 HOST_WIDE_INT offset
;
628 /* The declaration or SSA_NAME pointer of the base that we are checking for
631 /* Set to true if dynamic type change has been detected. */
632 bool type_maybe_changed
;
635 /* Return true if STMT can modify a virtual method table pointer.
637 This function makes special assumptions about both constructors and
638 destructors which are all the functions that are allowed to alter the VMT
639 pointers. It assumes that destructors begin with assignment into all VMT
640 pointers and that constructors essentially look in the following way:
642 1) The very first thing they do is that they call constructors of ancestor
643 sub-objects that have them.
645 2) Then VMT pointers of this and all its ancestors is set to new values
646 corresponding to the type corresponding to the constructor.
648 3) Only afterwards, other stuff such as constructor of member sub-objects
649 and the code written by the user is run. Only this may include calling
650 virtual functions, directly or indirectly.
652 There is no way to call a constructor of an ancestor sub-object in any
655 This means that we do not have to care whether constructors get the correct
656 type information because they will always change it (in fact, if we define
657 the type to be given by the VMT pointer, it is undefined).
659 The most important fact to derive from the above is that if, for some
660 statement in the section 3, we try to detect whether the dynamic type has
661 changed, we can safely ignore all calls as we examine the function body
662 backwards until we reach statements in section 2 because these calls cannot
663 be ancestor constructors or destructors (if the input is not bogus) and so
664 do not change the dynamic type (this holds true only for automatically
665 allocated objects but at the moment we devirtualize only these). We then
666 must detect that statements in section 2 change the dynamic type and can try
667 to derive the new type. That is enough and we can stop, we will never see
668 the calls into constructors of sub-objects in this code. Therefore we can
669 safely ignore all call statements that we traverse.
673 stmt_may_be_vtbl_ptr_store (gimple
*stmt
)
675 if (is_gimple_call (stmt
))
677 if (gimple_clobber_p (stmt
))
679 else if (is_gimple_assign (stmt
))
681 tree lhs
= gimple_assign_lhs (stmt
);
683 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs
)))
685 if (flag_strict_aliasing
686 && !POINTER_TYPE_P (TREE_TYPE (lhs
)))
689 if (TREE_CODE (lhs
) == COMPONENT_REF
690 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
692 /* In the future we might want to use get_ref_base_and_extent to find
693 if there is a field corresponding to the offset and if so, proceed
694 almost like if it was a component ref. */
700 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
701 to check whether a particular statement may modify the virtual table
702 pointerIt stores its result into DATA, which points to a
703 prop_type_change_info structure. */
706 check_stmt_for_type_change (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef
, void *data
)
708 gimple
*stmt
= SSA_NAME_DEF_STMT (vdef
);
709 struct prop_type_change_info
*tci
= (struct prop_type_change_info
*) data
;
711 if (stmt_may_be_vtbl_ptr_store (stmt
))
713 tci
->type_maybe_changed
= true;
720 /* See if ARG is PARAM_DECl describing instance passed by pointer
721 or reference in FUNCTION. Return false if the dynamic type may change
722 in between beggining of the function until CALL is invoked.
724 Generally functions are not allowed to change type of such instances,
725 but they call destructors. We assume that methods cannot destroy the THIS
726 pointer. Also as a special cases, constructor and destructors may change
727 type of the THIS pointer. */
730 param_type_may_change_p (tree function
, tree arg
, gimple
*call
)
732 /* Pure functions cannot do any changes on the dynamic type;
733 that require writting to memory. */
734 if (flags_from_decl_or_type (function
) & (ECF_PURE
| ECF_CONST
))
736 /* We need to check if we are within inlined consturctor
737 or destructor (ideally we would have way to check that the
738 inline cdtor is actually working on ARG, but we don't have
739 easy tie on this, so punt on all non-pure cdtors.
740 We may also record the types of cdtors and once we know type
741 of the instance match them.
743 Also code unification optimizations may merge calls from
744 different blocks making return values unreliable. So
745 do nothing during late optimization. */
746 if (DECL_STRUCT_FUNCTION (function
)->after_inlining
)
748 if (TREE_CODE (arg
) == SSA_NAME
749 && SSA_NAME_IS_DEFAULT_DEF (arg
)
750 && TREE_CODE (SSA_NAME_VAR (arg
)) == PARM_DECL
)
752 /* Normal (non-THIS) argument. */
753 if ((SSA_NAME_VAR (arg
) != DECL_ARGUMENTS (function
)
754 || TREE_CODE (TREE_TYPE (function
)) != METHOD_TYPE
)
755 /* THIS pointer of an method - here we want to watch constructors
756 and destructors as those definitely may change the dynamic
758 || (TREE_CODE (TREE_TYPE (function
)) == METHOD_TYPE
759 && !DECL_CXX_CONSTRUCTOR_P (function
)
760 && !DECL_CXX_DESTRUCTOR_P (function
)
761 && (SSA_NAME_VAR (arg
) == DECL_ARGUMENTS (function
))))
763 /* Walk the inline stack and watch out for ctors/dtors. */
764 for (tree block
= gimple_block (call
); block
&& TREE_CODE (block
) == BLOCK
;
765 block
= BLOCK_SUPERCONTEXT (block
))
766 if (inlined_polymorphic_ctor_dtor_block_p (block
, false))
774 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
775 callsite CALL) by looking for assignments to its virtual table pointer. If
776 it is, return true. ARG is the object itself (not a pointer
777 to it, unless dereferenced). BASE is the base of the memory access as
778 returned by get_ref_base_and_extent, as is the offset.
780 This is helper function for detect_type_change and detect_type_change_ssa
781 that does the heavy work which is usually unnecesary. */
784 detect_type_change_from_memory_writes (ipa_func_body_info
*fbi
, tree arg
,
785 tree base
, tree comp_type
, gcall
*call
,
786 HOST_WIDE_INT offset
)
788 struct prop_type_change_info tci
;
791 gcc_checking_assert (DECL_P (arg
)
792 || TREE_CODE (arg
) == MEM_REF
793 || handled_component_p (arg
));
795 comp_type
= TYPE_MAIN_VARIANT (comp_type
);
797 /* Const calls cannot call virtual methods through VMT and so type changes do
799 if (!flag_devirtualize
|| !gimple_vuse (call
)
800 /* Be sure expected_type is polymorphic. */
802 || TREE_CODE (comp_type
) != RECORD_TYPE
803 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))
804 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))))
807 if (fbi
->aa_walk_budget
== 0)
810 ao_ref_init (&ao
, arg
);
813 ao
.size
= POINTER_SIZE
;
814 ao
.max_size
= ao
.size
;
817 tci
.object
= get_base_address (arg
);
818 tci
.type_maybe_changed
= false;
821 = walk_aliased_vdefs (&ao
, gimple_vuse (call
), check_stmt_for_type_change
,
822 &tci
, NULL
, NULL
, fbi
->aa_walk_budget
);
824 fbi
->aa_walk_budget
-= walked
;
826 fbi
->aa_walk_budget
= 0;
828 if (walked
>= 0 && !tci
.type_maybe_changed
)
834 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
835 If it is, return true. ARG is the object itself (not a pointer
836 to it, unless dereferenced). BASE is the base of the memory access as
837 returned by get_ref_base_and_extent, as is the offset. */
840 detect_type_change (ipa_func_body_info
*fbi
, tree arg
, tree base
,
841 tree comp_type
, gcall
*call
,
842 HOST_WIDE_INT offset
)
844 if (!flag_devirtualize
)
847 if (TREE_CODE (base
) == MEM_REF
848 && !param_type_may_change_p (current_function_decl
,
849 TREE_OPERAND (base
, 0),
852 return detect_type_change_from_memory_writes (fbi
, arg
, base
, comp_type
,
856 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
857 SSA name (its dereference will become the base and the offset is assumed to
861 detect_type_change_ssa (ipa_func_body_info
*fbi
, tree arg
, tree comp_type
,
864 gcc_checking_assert (TREE_CODE (arg
) == SSA_NAME
);
865 if (!flag_devirtualize
866 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
869 if (!param_type_may_change_p (current_function_decl
, arg
, call
))
872 arg
= build2 (MEM_REF
, ptr_type_node
, arg
,
873 build_int_cst (ptr_type_node
, 0));
875 return detect_type_change_from_memory_writes (fbi
, arg
, arg
, comp_type
,
879 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
880 boolean variable pointed to by DATA. */
883 mark_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
886 bool *b
= (bool *) data
;
891 /* Find the nearest valid aa status for parameter specified by INDEX that
894 static struct ipa_param_aa_status
*
895 find_dominating_aa_status (struct ipa_func_body_info
*fbi
, basic_block bb
,
900 bb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
903 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
904 if (!bi
->param_aa_statuses
.is_empty ()
905 && bi
->param_aa_statuses
[index
].valid
)
906 return &bi
->param_aa_statuses
[index
];
910 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
911 structures and/or intialize the result with a dominating description as
914 static struct ipa_param_aa_status
*
915 parm_bb_aa_status_for_bb (struct ipa_func_body_info
*fbi
, basic_block bb
,
918 gcc_checking_assert (fbi
);
919 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
920 if (bi
->param_aa_statuses
.is_empty ())
921 bi
->param_aa_statuses
.safe_grow_cleared (fbi
->param_count
, true);
922 struct ipa_param_aa_status
*paa
= &bi
->param_aa_statuses
[index
];
925 gcc_checking_assert (!paa
->parm_modified
926 && !paa
->ref_modified
927 && !paa
->pt_modified
);
928 struct ipa_param_aa_status
*dom_paa
;
929 dom_paa
= find_dominating_aa_status (fbi
, bb
, index
);
939 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
940 a value known not to be modified in this function before reaching the
941 statement STMT. FBI holds information about the function we have so far
942 gathered but do not survive the summary building stage. */
945 parm_preserved_before_stmt_p (struct ipa_func_body_info
*fbi
, int index
,
946 gimple
*stmt
, tree parm_load
)
948 struct ipa_param_aa_status
*paa
;
949 bool modified
= false;
952 tree base
= get_base_address (parm_load
);
953 gcc_assert (TREE_CODE (base
) == PARM_DECL
);
954 if (TREE_READONLY (base
))
957 gcc_checking_assert (fbi
);
958 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
959 if (paa
->parm_modified
|| fbi
->aa_walk_budget
== 0)
962 gcc_checking_assert (gimple_vuse (stmt
) != NULL_TREE
);
963 ao_ref_init (&refd
, parm_load
);
964 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
965 &modified
, NULL
, NULL
,
966 fbi
->aa_walk_budget
);
970 fbi
->aa_walk_budget
= 0;
973 fbi
->aa_walk_budget
-= walked
;
975 paa
->parm_modified
= true;
979 /* If STMT is an assignment that loads a value from an parameter declaration,
980 return the index of the parameter in ipa_node_params which has not been
981 modified. Otherwise return -1. */
984 load_from_unmodified_param (struct ipa_func_body_info
*fbi
,
985 vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
991 if (!gimple_assign_single_p (stmt
))
994 op1
= gimple_assign_rhs1 (stmt
);
995 if (TREE_CODE (op1
) != PARM_DECL
)
998 index
= ipa_get_param_decl_index_1 (descriptors
, op1
);
1000 || !parm_preserved_before_stmt_p (fbi
, index
, stmt
, op1
))
1006 /* Return true if memory reference REF (which must be a load through parameter
1007 with INDEX) loads data that are known to be unmodified in this function
1008 before reaching statement STMT. */
1011 parm_ref_data_preserved_p (struct ipa_func_body_info
*fbi
,
1012 int index
, gimple
*stmt
, tree ref
)
1014 struct ipa_param_aa_status
*paa
;
1015 bool modified
= false;
1018 gcc_checking_assert (fbi
);
1019 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
1020 if (paa
->ref_modified
|| fbi
->aa_walk_budget
== 0)
1023 gcc_checking_assert (gimple_vuse (stmt
));
1024 ao_ref_init (&refd
, ref
);
1025 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
1026 &modified
, NULL
, NULL
,
1027 fbi
->aa_walk_budget
);
1031 fbi
->aa_walk_budget
= 0;
1034 fbi
->aa_walk_budget
-= walked
;
1036 paa
->ref_modified
= true;
1040 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1041 is known to be unmodified in this function before reaching call statement
1042 CALL into which it is passed. FBI describes the function body. */
1045 parm_ref_data_pass_through_p (struct ipa_func_body_info
*fbi
, int index
,
1046 gimple
*call
, tree parm
)
1048 bool modified
= false;
1051 /* It's unnecessary to calculate anything about memory contnets for a const
1052 function because it is not goin to use it. But do not cache the result
1053 either. Also, no such calculations for non-pointers. */
1054 if (!gimple_vuse (call
)
1055 || !POINTER_TYPE_P (TREE_TYPE (parm
)))
1058 struct ipa_param_aa_status
*paa
= parm_bb_aa_status_for_bb (fbi
,
1061 if (paa
->pt_modified
|| fbi
->aa_walk_budget
== 0)
1064 ao_ref_init_from_ptr_and_size (&refd
, parm
, NULL_TREE
);
1065 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (call
), mark_modified
,
1066 &modified
, NULL
, NULL
,
1067 fbi
->aa_walk_budget
);
1070 fbi
->aa_walk_budget
= 0;
1074 fbi
->aa_walk_budget
-= walked
;
1076 paa
->pt_modified
= true;
1080 /* Return true if we can prove that OP is a memory reference loading
1081 data from an aggregate passed as a parameter.
1083 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1084 false if it cannot prove that the value has not been modified before the
1085 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1086 if it cannot prove the value has not been modified, in that case it will
1087 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1089 INFO and PARMS_AINFO describe parameters of the current function (but the
1090 latter can be NULL), STMT is the load statement. If function returns true,
1091 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1092 within the aggregate and whether it is a load from a value passed by
1093 reference respectively. */
1096 ipa_load_from_parm_agg (struct ipa_func_body_info
*fbi
,
1097 vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
1098 gimple
*stmt
, tree op
, int *index_p
,
1099 HOST_WIDE_INT
*offset_p
, poly_int64
*size_p
,
1100 bool *by_ref_p
, bool *guaranteed_unmodified
)
1105 tree base
= get_ref_base_and_extent_hwi (op
, offset_p
, &size
, &reverse
);
1112 int index
= ipa_get_param_decl_index_1 (descriptors
, base
);
1114 && parm_preserved_before_stmt_p (fbi
, index
, stmt
, op
))
1120 if (guaranteed_unmodified
)
1121 *guaranteed_unmodified
= true;
1127 if (TREE_CODE (base
) != MEM_REF
1128 || TREE_CODE (TREE_OPERAND (base
, 0)) != SSA_NAME
1129 || !integer_zerop (TREE_OPERAND (base
, 1)))
1132 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base
, 0)))
1134 tree parm
= SSA_NAME_VAR (TREE_OPERAND (base
, 0));
1135 index
= ipa_get_param_decl_index_1 (descriptors
, parm
);
1139 /* This branch catches situations where a pointer parameter is not a
1140 gimple register, for example:
1142 void hip7(S*) (struct S * p)
1144 void (*<T2e4>) (struct S *) D.1867;
1149 D.1867_2 = p.1_1->f;
1154 gimple
*def
= SSA_NAME_DEF_STMT (TREE_OPERAND (base
, 0));
1155 index
= load_from_unmodified_param (fbi
, descriptors
, def
);
1160 bool data_preserved
= parm_ref_data_preserved_p (fbi
, index
, stmt
, op
);
1161 if (!data_preserved
&& !guaranteed_unmodified
)
1168 if (guaranteed_unmodified
)
1169 *guaranteed_unmodified
= data_preserved
;
1175 /* If STMT is an assignment that loads a value from a parameter declaration,
1176 or from an aggregate passed as the parameter either by value or reference,
1177 return the index of the parameter in ipa_node_params. Otherwise return -1.
1179 FBI holds gathered information about the function. INFO describes
1180 parameters of the function, STMT is the assignment statement. If it is a
1181 memory load from an aggregate, *OFFSET_P is filled with offset within the
1182 aggregate, and *BY_REF_P specifies whether the aggregate is passed by
1186 load_from_unmodified_param_or_agg (struct ipa_func_body_info
*fbi
,
1187 class ipa_node_params
*info
,
1189 HOST_WIDE_INT
*offset_p
,
1192 int index
= load_from_unmodified_param (fbi
, info
->descriptors
, stmt
);
1195 /* Load value from a parameter declaration. */
1202 if (!gimple_assign_load_p (stmt
))
1205 tree rhs
= gimple_assign_rhs1 (stmt
);
1207 /* Skip memory reference containing VIEW_CONVERT_EXPR. */
1208 for (tree t
= rhs
; handled_component_p (t
); t
= TREE_OPERAND (t
, 0))
1209 if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
1212 /* Skip memory reference containing bit-field. */
1213 if (TREE_CODE (rhs
) == BIT_FIELD_REF
1214 || contains_bitfld_component_ref_p (rhs
))
1217 if (!ipa_load_from_parm_agg (fbi
, info
->descriptors
, stmt
, rhs
, &index
,
1218 offset_p
, &size
, by_ref_p
))
1221 gcc_assert (!maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (rhs
))),
1225 tree param_type
= ipa_get_type (info
, index
);
1227 if (!param_type
|| !AGGREGATE_TYPE_P (param_type
))
1230 else if (TREE_THIS_VOLATILE (rhs
))
1236 /* Walk pointer adjustemnts from OP (such as POINTER_PLUS and ADDR_EXPR)
1237 to find original pointer. Initialize RET to the pointer which results from
1239 If offset is known return true and initialize OFFSET_RET. */
1242 unadjusted_ptr_and_unit_offset (tree op
, tree
*ret
, poly_int64
*offset_ret
)
1244 poly_int64 offset
= 0;
1245 bool offset_known
= true;
1248 for (i
= 0; i
< param_ipa_jump_function_lookups
; i
++)
1250 if (TREE_CODE (op
) == ADDR_EXPR
)
1252 poly_int64 extra_offset
= 0;
1253 tree base
= get_addr_base_and_unit_offset (TREE_OPERAND (op
, 0),
1257 base
= get_base_address (TREE_OPERAND (op
, 0));
1258 if (TREE_CODE (base
) != MEM_REF
)
1260 offset_known
= false;
1264 if (TREE_CODE (base
) != MEM_REF
)
1266 offset
+= extra_offset
;
1268 op
= TREE_OPERAND (base
, 0);
1269 if (mem_ref_offset (base
).to_shwi (&extra_offset
))
1270 offset
+= extra_offset
;
1272 offset_known
= false;
1274 else if (TREE_CODE (op
) == SSA_NAME
1275 && !SSA_NAME_IS_DEFAULT_DEF (op
))
1277 gimple
*pstmt
= SSA_NAME_DEF_STMT (op
);
1279 if (gimple_assign_single_p (pstmt
))
1280 op
= gimple_assign_rhs1 (pstmt
);
1281 else if (is_gimple_assign (pstmt
)
1282 && gimple_assign_rhs_code (pstmt
) == POINTER_PLUS_EXPR
)
1284 poly_int64 extra_offset
= 0;
1285 if (ptrdiff_tree_p (gimple_assign_rhs2 (pstmt
),
1287 offset
+= extra_offset
;
1289 offset_known
= false;
1290 op
= gimple_assign_rhs1 (pstmt
);
1299 *offset_ret
= offset
;
1300 return offset_known
;
1303 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1304 of an assignment statement STMT, try to determine whether we are actually
1305 handling any of the following cases and construct an appropriate jump
1306 function into JFUNC if so:
1308 1) The passed value is loaded from a formal parameter which is not a gimple
1309 register (most probably because it is addressable, the value has to be
1310 scalar) and we can guarantee the value has not changed. This case can
1311 therefore be described by a simple pass-through jump function. For example:
1320 2) The passed value can be described by a simple arithmetic pass-through
1327 D.2064_4 = a.1(D) + 4;
1330 This case can also occur in combination of the previous one, e.g.:
1338 D.2064_4 = a.0_3 + 4;
1341 3) The passed value is an address of an object within another one (which
1342 also passed by reference). Such situations are described by an ancestor
1343 jump function and describe situations such as:
1345 B::foo() (struct B * const this)
1349 D.1845_2 = &this_1(D)->D.1748;
1352 INFO is the structure describing individual parameters access different
1353 stages of IPA optimizations. PARMS_AINFO contains the information that is
1354 only needed for intraprocedural analysis. */
1357 compute_complex_assign_jump_func (struct ipa_func_body_info
*fbi
,
1358 class ipa_node_params
*info
,
1359 struct ipa_jump_func
*jfunc
,
1360 gcall
*call
, gimple
*stmt
, tree name
,
1363 HOST_WIDE_INT offset
, size
;
1364 tree op1
, tc_ssa
, base
, ssa
;
1368 op1
= gimple_assign_rhs1 (stmt
);
1370 if (TREE_CODE (op1
) == SSA_NAME
)
1372 if (SSA_NAME_IS_DEFAULT_DEF (op1
))
1373 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (op1
));
1375 index
= load_from_unmodified_param (fbi
, info
->descriptors
,
1376 SSA_NAME_DEF_STMT (op1
));
1381 index
= load_from_unmodified_param (fbi
, info
->descriptors
, stmt
);
1382 tc_ssa
= gimple_assign_lhs (stmt
);
1387 switch (gimple_assign_rhs_class (stmt
))
1389 case GIMPLE_BINARY_RHS
:
1391 tree op2
= gimple_assign_rhs2 (stmt
);
1392 if (!is_gimple_ip_invariant (op2
)
1393 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt
))
1395 && !useless_type_conversion_p (TREE_TYPE (name
),
1399 ipa_set_jf_arith_pass_through (jfunc
, index
, op2
,
1400 gimple_assign_rhs_code (stmt
));
1403 case GIMPLE_SINGLE_RHS
:
1405 bool agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
,
1407 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1410 case GIMPLE_UNARY_RHS
:
1411 if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
)))
1412 ipa_set_jf_unary_pass_through (jfunc
, index
,
1413 gimple_assign_rhs_code (stmt
));
1419 if (TREE_CODE (op1
) != ADDR_EXPR
)
1421 op1
= TREE_OPERAND (op1
, 0);
1422 if (TREE_CODE (TREE_TYPE (op1
)) != RECORD_TYPE
)
1424 base
= get_ref_base_and_extent_hwi (op1
, &offset
, &size
, &reverse
);
1425 offset_int mem_offset
;
1427 || TREE_CODE (base
) != MEM_REF
1428 || !mem_ref_offset (base
).is_constant (&mem_offset
))
1430 offset
+= mem_offset
.to_short_addr () * BITS_PER_UNIT
;
1431 ssa
= TREE_OPERAND (base
, 0);
1432 if (TREE_CODE (ssa
) != SSA_NAME
1433 || !SSA_NAME_IS_DEFAULT_DEF (ssa
)
1437 /* Dynamic types are changed in constructors and destructors. */
1438 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (ssa
));
1439 if (index
>= 0 && param_type
&& POINTER_TYPE_P (param_type
))
1440 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1441 parm_ref_data_pass_through_p (fbi
, index
, call
, ssa
));
1444 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1447 iftmp.1_3 = &obj_2(D)->D.1762;
1449 The base of the MEM_REF must be a default definition SSA NAME of a
1450 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1451 whole MEM_REF expression is returned and the offset calculated from any
1452 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1453 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1456 get_ancestor_addr_info (gimple
*assign
, tree
*obj_p
, HOST_WIDE_INT
*offset
)
1459 tree expr
, parm
, obj
;
1462 if (!gimple_assign_single_p (assign
))
1464 expr
= gimple_assign_rhs1 (assign
);
1466 if (TREE_CODE (expr
) != ADDR_EXPR
)
1468 expr
= TREE_OPERAND (expr
, 0);
1470 expr
= get_ref_base_and_extent_hwi (expr
, offset
, &size
, &reverse
);
1472 offset_int mem_offset
;
1474 || TREE_CODE (expr
) != MEM_REF
1475 || !mem_ref_offset (expr
).is_constant (&mem_offset
))
1477 parm
= TREE_OPERAND (expr
, 0);
1478 if (TREE_CODE (parm
) != SSA_NAME
1479 || !SSA_NAME_IS_DEFAULT_DEF (parm
)
1480 || TREE_CODE (SSA_NAME_VAR (parm
)) != PARM_DECL
)
1483 *offset
+= mem_offset
.to_short_addr () * BITS_PER_UNIT
;
1489 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1490 statement PHI, try to find out whether NAME is in fact a
1491 multiple-inheritance typecast from a descendant into an ancestor of a formal
1492 parameter and thus can be described by an ancestor jump function and if so,
1493 write the appropriate function into JFUNC.
1495 Essentially we want to match the following pattern:
1503 iftmp.1_3 = &obj_2(D)->D.1762;
1506 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1507 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1511 compute_complex_ancestor_jump_func (struct ipa_func_body_info
*fbi
,
1512 class ipa_node_params
*info
,
1513 struct ipa_jump_func
*jfunc
,
1514 gcall
*call
, gphi
*phi
)
1516 HOST_WIDE_INT offset
;
1517 gimple
*assign
, *cond
;
1518 basic_block phi_bb
, assign_bb
, cond_bb
;
1519 tree tmp
, parm
, expr
, obj
;
1522 if (gimple_phi_num_args (phi
) != 2)
1525 if (integer_zerop (PHI_ARG_DEF (phi
, 1)))
1526 tmp
= PHI_ARG_DEF (phi
, 0);
1527 else if (integer_zerop (PHI_ARG_DEF (phi
, 0)))
1528 tmp
= PHI_ARG_DEF (phi
, 1);
1531 if (TREE_CODE (tmp
) != SSA_NAME
1532 || SSA_NAME_IS_DEFAULT_DEF (tmp
)
1533 || !POINTER_TYPE_P (TREE_TYPE (tmp
))
1534 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp
))) != RECORD_TYPE
)
1537 assign
= SSA_NAME_DEF_STMT (tmp
);
1538 assign_bb
= gimple_bb (assign
);
1539 if (!single_pred_p (assign_bb
))
1541 expr
= get_ancestor_addr_info (assign
, &obj
, &offset
);
1544 parm
= TREE_OPERAND (expr
, 0);
1545 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (parm
));
1549 cond_bb
= single_pred (assign_bb
);
1550 cond
= last_stmt (cond_bb
);
1552 || gimple_code (cond
) != GIMPLE_COND
1553 || gimple_cond_code (cond
) != NE_EXPR
1554 || gimple_cond_lhs (cond
) != parm
1555 || !integer_zerop (gimple_cond_rhs (cond
)))
1558 phi_bb
= gimple_bb (phi
);
1559 for (i
= 0; i
< 2; i
++)
1561 basic_block pred
= EDGE_PRED (phi_bb
, i
)->src
;
1562 if (pred
!= assign_bb
&& pred
!= cond_bb
)
1566 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1567 parm_ref_data_pass_through_p (fbi
, index
, call
, parm
));
1570 /* Inspect the given TYPE and return true iff it has the same structure (the
1571 same number of fields of the same types) as a C++ member pointer. If
1572 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1573 corresponding fields there. */
1576 type_like_member_ptr_p (tree type
, tree
*method_ptr
, tree
*delta
)
1580 if (TREE_CODE (type
) != RECORD_TYPE
)
1583 fld
= TYPE_FIELDS (type
);
1584 if (!fld
|| !POINTER_TYPE_P (TREE_TYPE (fld
))
1585 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld
))) != METHOD_TYPE
1586 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1592 fld
= DECL_CHAIN (fld
);
1593 if (!fld
|| INTEGRAL_TYPE_P (fld
)
1594 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1599 if (DECL_CHAIN (fld
))
1605 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1606 return the rhs of its defining statement, and this statement is stored in
1607 *RHS_STMT. Otherwise return RHS as it is. */
1610 get_ssa_def_if_simple_copy (tree rhs
, gimple
**rhs_stmt
)
1612 while (TREE_CODE (rhs
) == SSA_NAME
&& !SSA_NAME_IS_DEFAULT_DEF (rhs
))
1614 gimple
*def_stmt
= SSA_NAME_DEF_STMT (rhs
);
1616 if (gimple_assign_single_p (def_stmt
))
1617 rhs
= gimple_assign_rhs1 (def_stmt
);
1620 *rhs_stmt
= def_stmt
;
1625 /* Simple linked list, describing contents of an aggregate before call. */
1627 struct ipa_known_agg_contents_list
1629 /* Offset and size of the described part of the aggregate. */
1630 HOST_WIDE_INT offset
, size
;
1632 /* Type of the described part of the aggregate. */
1635 /* Known constant value or jump function data describing contents. */
1636 struct ipa_load_agg_data value
;
1638 /* Pointer to the next structure in the list. */
1639 struct ipa_known_agg_contents_list
*next
;
1642 /* Add an aggregate content item into a linked list of
1643 ipa_known_agg_contents_list structure, in which all elements
1644 are sorted ascendingly by offset. */
1647 add_to_agg_contents_list (struct ipa_known_agg_contents_list
**plist
,
1648 struct ipa_known_agg_contents_list
*item
)
1650 struct ipa_known_agg_contents_list
*list
= *plist
;
1652 for (; list
; list
= list
->next
)
1654 if (list
->offset
>= item
->offset
)
1657 plist
= &list
->next
;
1664 /* Check whether a given aggregate content is clobbered by certain element in
1665 a linked list of ipa_known_agg_contents_list. */
1668 clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list
*list
,
1669 struct ipa_known_agg_contents_list
*item
)
1671 for (; list
; list
= list
->next
)
1673 if (list
->offset
>= item
->offset
)
1674 return list
->offset
< item
->offset
+ item
->size
;
1676 if (list
->offset
+ list
->size
> item
->offset
)
1683 /* Build aggregate jump function from LIST, assuming there are exactly
1684 VALUE_COUNT entries there and that offset of the passed argument
1685 is ARG_OFFSET and store it into JFUNC. */
1688 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list
*list
,
1689 int value_count
, HOST_WIDE_INT arg_offset
,
1690 struct ipa_jump_func
*jfunc
)
1692 vec_safe_reserve (jfunc
->agg
.items
, value_count
, true);
1693 for (; list
; list
= list
->next
)
1695 struct ipa_agg_jf_item item
;
1696 tree operand
= list
->value
.pass_through
.operand
;
1698 if (list
->value
.pass_through
.formal_id
>= 0)
1700 /* Content value is derived from some formal paramerter. */
1701 if (list
->value
.offset
>= 0)
1702 item
.jftype
= IPA_JF_LOAD_AGG
;
1704 item
.jftype
= IPA_JF_PASS_THROUGH
;
1706 item
.value
.load_agg
= list
->value
;
1708 item
.value
.pass_through
.operand
1709 = unshare_expr_without_location (operand
);
1713 /* Content value is known constant. */
1714 item
.jftype
= IPA_JF_CONST
;
1715 item
.value
.constant
= unshare_expr_without_location (operand
);
1720 item
.type
= list
->type
;
1721 gcc_assert (tree_to_shwi (TYPE_SIZE (list
->type
)) == list
->size
);
1723 item
.offset
= list
->offset
- arg_offset
;
1724 gcc_assert ((item
.offset
% BITS_PER_UNIT
) == 0);
1726 jfunc
->agg
.items
->quick_push (item
);
1730 /* Given an assignment statement STMT, try to collect information into
1731 AGG_VALUE that will be used to construct jump function for RHS of the
1732 assignment, from which content value of an aggregate part comes.
1734 Besides constant and simple pass-through jump functions, also try to
1735 identify whether it matches the following pattern that can be described by
1736 a load-value-from-aggregate jump function, which is a derivative of simple
1737 pass-through jump function.
1743 *(q_5 + 4) = *(p_3(D) + 28) op 1;
1747 Here IPA_LOAD_AGG_DATA data structure is informative enough to describe
1748 constant, simple pass-through and load-vale-from-aggregate. If value
1749 is constant, it will be kept in field OPERAND, and field FORMAL_ID is
1750 set to -1. For simple pass-through and load-value-from-aggregate, field
1751 FORMAL_ID specifies the related formal parameter index, and field
1752 OFFSET can be used to distinguish them, -1 means simple pass-through,
1753 otherwise means load-value-from-aggregate. */
1756 analyze_agg_content_value (struct ipa_func_body_info
*fbi
,
1757 struct ipa_load_agg_data
*agg_value
,
1760 tree lhs
= gimple_assign_lhs (stmt
);
1761 tree rhs1
= gimple_assign_rhs1 (stmt
);
1762 enum tree_code code
;
1765 /* Initialize jump function data for the aggregate part. */
1766 memset (agg_value
, 0, sizeof (*agg_value
));
1767 agg_value
->pass_through
.operation
= NOP_EXPR
;
1768 agg_value
->pass_through
.formal_id
= -1;
1769 agg_value
->offset
= -1;
1771 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs
)) /* TODO: Support aggregate type. */
1772 || TREE_THIS_VOLATILE (lhs
)
1773 || TREE_CODE (lhs
) == BIT_FIELD_REF
1774 || contains_bitfld_component_ref_p (lhs
))
1777 /* Skip SSA copies. */
1778 while (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
1780 if (TREE_CODE (rhs1
) != SSA_NAME
|| SSA_NAME_IS_DEFAULT_DEF (rhs1
))
1783 stmt
= SSA_NAME_DEF_STMT (rhs1
);
1784 if (!is_gimple_assign (stmt
))
1787 rhs1
= gimple_assign_rhs1 (stmt
);
1790 if (gphi
*phi
= dyn_cast
<gphi
*> (stmt
))
1792 /* Also special case like the following (a is a formal parameter):
1794 _12 = *a_11(D).dim[0].stride;
1796 # iftmp.22_9 = PHI <_12(2), 1(3)>
1798 parm.6.dim[0].stride = iftmp.22_9;
1800 __x_MOD_foo (&parm.6, b_31(D));
1802 The aggregate function describing parm.6.dim[0].stride is encoded as a
1803 PASS-THROUGH jump function with ASSERT_EXPR operation whith operand 1
1804 (the constant from the PHI node). */
1806 if (gimple_phi_num_args (phi
) != 2)
1808 tree arg0
= gimple_phi_arg_def (phi
, 0);
1809 tree arg1
= gimple_phi_arg_def (phi
, 1);
1812 if (is_gimple_ip_invariant (arg1
))
1817 else if (is_gimple_ip_invariant (arg0
))
1825 rhs1
= get_ssa_def_if_simple_copy (rhs1
, &stmt
);
1826 if (!is_gimple_assign (stmt
))
1830 agg_value
->pass_through
.operand
= operand
;
1832 else if (is_gimple_assign (stmt
))
1834 code
= gimple_assign_rhs_code (stmt
);
1835 switch (gimple_assign_rhs_class (stmt
))
1837 case GIMPLE_SINGLE_RHS
:
1838 if (is_gimple_ip_invariant (rhs1
))
1840 agg_value
->pass_through
.operand
= rhs1
;
1846 case GIMPLE_UNARY_RHS
:
1847 /* NOTE: A GIMPLE_UNARY_RHS operation might not be tcc_unary
1848 (truth_not_expr is example), GIMPLE_BINARY_RHS does not imply
1849 tcc_binary, this subtleness is somewhat misleading.
1851 Since tcc_unary is widely used in IPA-CP code to check an operation
1852 with one operand, here we only allow tc_unary operation to avoid
1853 possible problem. Then we can use (opclass == tc_unary) or not to
1854 distinguish unary and binary. */
1855 if (TREE_CODE_CLASS (code
) != tcc_unary
|| CONVERT_EXPR_CODE_P (code
))
1858 rhs1
= get_ssa_def_if_simple_copy (rhs1
, &stmt
);
1861 case GIMPLE_BINARY_RHS
:
1863 gimple
*rhs1_stmt
= stmt
;
1864 gimple
*rhs2_stmt
= stmt
;
1865 tree rhs2
= gimple_assign_rhs2 (stmt
);
1867 rhs1
= get_ssa_def_if_simple_copy (rhs1
, &rhs1_stmt
);
1868 rhs2
= get_ssa_def_if_simple_copy (rhs2
, &rhs2_stmt
);
1870 if (is_gimple_ip_invariant (rhs2
))
1872 agg_value
->pass_through
.operand
= rhs2
;
1875 else if (is_gimple_ip_invariant (rhs1
))
1877 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
1878 code
= swap_tree_comparison (code
);
1879 else if (!commutative_tree_code (code
))
1882 agg_value
->pass_through
.operand
= rhs1
;
1889 if (TREE_CODE_CLASS (code
) != tcc_comparison
1890 && !useless_type_conversion_p (TREE_TYPE (lhs
),
1903 if (TREE_CODE (rhs1
) != SSA_NAME
)
1904 index
= load_from_unmodified_param_or_agg (fbi
, fbi
->info
, stmt
,
1906 &agg_value
->by_ref
);
1907 else if (SSA_NAME_IS_DEFAULT_DEF (rhs1
))
1908 index
= ipa_get_param_decl_index (fbi
->info
, SSA_NAME_VAR (rhs1
));
1912 if (agg_value
->offset
>= 0)
1913 agg_value
->type
= TREE_TYPE (rhs1
);
1914 agg_value
->pass_through
.formal_id
= index
;
1915 agg_value
->pass_through
.operation
= code
;
1918 agg_value
->pass_through
.operand
= NULL_TREE
;
1921 /* If STMT is a memory store to the object whose address is BASE, extract
1922 information (offset, size, and value) into CONTENT, and return true,
1923 otherwise we conservatively assume the whole object is modified with
1924 unknown content, and return false. CHECK_REF means that access to object
1925 is expected to be in form of MEM_REF expression. */
1928 extract_mem_content (struct ipa_func_body_info
*fbi
,
1929 gimple
*stmt
, tree base
, bool check_ref
,
1930 struct ipa_known_agg_contents_list
*content
)
1932 HOST_WIDE_INT lhs_offset
, lhs_size
;
1935 if (!is_gimple_assign (stmt
))
1938 tree lhs
= gimple_assign_lhs (stmt
);
1939 tree lhs_base
= get_ref_base_and_extent_hwi (lhs
, &lhs_offset
, &lhs_size
,
1946 if (TREE_CODE (lhs_base
) != MEM_REF
1947 || TREE_OPERAND (lhs_base
, 0) != base
1948 || !integer_zerop (TREE_OPERAND (lhs_base
, 1)))
1951 else if (lhs_base
!= base
)
1954 content
->offset
= lhs_offset
;
1955 content
->size
= lhs_size
;
1956 content
->type
= TREE_TYPE (lhs
);
1957 content
->next
= NULL
;
1959 analyze_agg_content_value (fbi
, &content
->value
, stmt
);
1963 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1964 in ARG is filled in constants or values that are derived from caller's
1965 formal parameter in the way described by some kinds of jump functions. FBI
1966 is the context of the caller function for interprocedural analysis. ARG can
1967 either be an aggregate expression or a pointer to an aggregate. ARG_TYPE is
1968 the type of the aggregate, JFUNC is the jump function for the aggregate. */
1971 determine_known_aggregate_parts (struct ipa_func_body_info
*fbi
,
1972 gcall
*call
, tree arg
,
1974 struct ipa_jump_func
*jfunc
)
1976 struct ipa_known_agg_contents_list
*list
= NULL
, *all_list
= NULL
;
1977 bitmap visited
= NULL
;
1978 int item_count
= 0, value_count
= 0;
1979 HOST_WIDE_INT arg_offset
, arg_size
;
1981 bool check_ref
, by_ref
;
1983 int max_agg_items
= opt_for_fn (fbi
->node
->decl
, param_ipa_max_agg_items
);
1985 if (max_agg_items
== 0)
1988 /* The function operates in three stages. First, we prepare check_ref, r,
1989 arg_base and arg_offset based on what is actually passed as an actual
1992 if (POINTER_TYPE_P (arg_type
))
1995 if (TREE_CODE (arg
) == SSA_NAME
)
1998 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type
)))
1999 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
2004 type_size
= TYPE_SIZE (TREE_TYPE (arg_type
));
2005 arg_size
= tree_to_uhwi (type_size
);
2006 ao_ref_init_from_ptr_and_size (&r
, arg_base
, NULL_TREE
);
2008 else if (TREE_CODE (arg
) == ADDR_EXPR
)
2012 arg
= TREE_OPERAND (arg
, 0);
2013 arg_base
= get_ref_base_and_extent_hwi (arg
, &arg_offset
,
2014 &arg_size
, &reverse
);
2017 if (DECL_P (arg_base
))
2020 ao_ref_init (&r
, arg_base
);
2032 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg
)));
2036 arg_base
= get_ref_base_and_extent_hwi (arg
, &arg_offset
,
2037 &arg_size
, &reverse
);
2041 ao_ref_init (&r
, arg
);
2044 /* Second stage traverses virtual SSA web backwards starting from the call
2045 statement, only looks at individual dominating virtual operand (its
2046 definition dominates the call), as long as it is confident that content
2047 of the aggregate is affected by definition of the virtual operand, it
2048 builds a sorted linked list of ipa_agg_jf_list describing that. */
2050 for (tree dom_vuse
= gimple_vuse (call
);
2051 dom_vuse
&& fbi
->aa_walk_budget
> 0;)
2053 gimple
*stmt
= SSA_NAME_DEF_STMT (dom_vuse
);
2055 if (gimple_code (stmt
) == GIMPLE_PHI
)
2057 dom_vuse
= get_continuation_for_phi (stmt
, &r
, true,
2058 fbi
->aa_walk_budget
,
2059 &visited
, false, NULL
, NULL
);
2063 fbi
->aa_walk_budget
--;
2064 if (stmt_may_clobber_ref_p_1 (stmt
, &r
))
2066 struct ipa_known_agg_contents_list
*content
2067 = XALLOCA (struct ipa_known_agg_contents_list
);
2069 if (!extract_mem_content (fbi
, stmt
, arg_base
, check_ref
, content
))
2072 /* Now we get a dominating virtual operand, and need to check
2073 whether its value is clobbered any other dominating one. */
2074 if ((content
->value
.pass_through
.formal_id
>= 0
2075 || content
->value
.pass_through
.operand
)
2076 && !clobber_by_agg_contents_list_p (all_list
, content
))
2078 struct ipa_known_agg_contents_list
*copy
2079 = XALLOCA (struct ipa_known_agg_contents_list
);
2081 /* Add to the list consisting of only dominating virtual
2082 operands, whose definitions can finally reach the call. */
2083 add_to_agg_contents_list (&list
, (*copy
= *content
, copy
));
2085 if (++value_count
== max_agg_items
)
2089 /* Add to the list consisting of all dominating virtual operands. */
2090 add_to_agg_contents_list (&all_list
, content
);
2092 if (++item_count
== 2 * max_agg_items
)
2095 dom_vuse
= gimple_vuse (stmt
);
2099 BITMAP_FREE (visited
);
2101 /* Third stage just goes over the list and creates an appropriate vector of
2102 ipa_agg_jf_item structures out of it, of course only if there are
2103 any meaningful items to begin with. */
2107 jfunc
->agg
.by_ref
= by_ref
;
2108 build_agg_jump_func_from_list (list
, value_count
, arg_offset
, jfunc
);
2113 /* Return the Ith param type of callee associated with call graph
2117 ipa_get_callee_param_type (struct cgraph_edge
*e
, int i
)
2120 tree type
= (e
->callee
2121 ? TREE_TYPE (e
->callee
->decl
)
2122 : gimple_call_fntype (e
->call_stmt
));
2123 tree t
= TYPE_ARG_TYPES (type
);
2125 for (n
= 0; n
< i
; n
++)
2132 return TREE_VALUE (t
);
2135 t
= DECL_ARGUMENTS (e
->callee
->decl
);
2136 for (n
= 0; n
< i
; n
++)
2143 return TREE_TYPE (t
);
2147 /* Return ipa_bits with VALUE and MASK values, which can be either a newly
2148 allocated structure or a previously existing one shared with other jump
2149 functions and/or transformation summaries. */
2152 ipa_get_ipa_bits_for_value (const widest_int
&value
, const widest_int
&mask
)
2158 ipa_bits
**slot
= ipa_bits_hash_table
->find_slot (&tmp
, INSERT
);
2162 ipa_bits
*res
= ggc_alloc
<ipa_bits
> ();
2170 /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
2171 table in order to avoid creating multiple same ipa_bits structures. */
2174 ipa_set_jfunc_bits (ipa_jump_func
*jf
, const widest_int
&value
,
2175 const widest_int
&mask
)
2177 jf
->bits
= ipa_get_ipa_bits_for_value (value
, mask
);
2180 /* Return a pointer to a value_range just like *TMP, but either find it in
2181 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
2183 static value_range
*
2184 ipa_get_value_range (value_range
*tmp
)
2186 value_range
**slot
= ipa_vr_hash_table
->find_slot (tmp
, INSERT
);
2190 value_range
*vr
= new (ggc_alloc
<value_range
> ()) value_range
;
2197 /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
2198 equiv set. Use hash table in order to avoid creating multiple same copies of
2201 static value_range
*
2202 ipa_get_value_range (enum value_range_kind kind
, tree min
, tree max
)
2204 value_range
tmp (min
, max
, kind
);
2205 return ipa_get_value_range (&tmp
);
2208 /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
2209 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
2210 same value_range structures. */
2213 ipa_set_jfunc_vr (ipa_jump_func
*jf
, enum value_range_kind type
,
2216 jf
->m_vr
= ipa_get_value_range (type
, min
, max
);
2219 /* Assign to JF a pointer to a value_range just like TMP but either fetch a
2220 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
2223 ipa_set_jfunc_vr (ipa_jump_func
*jf
, value_range
*tmp
)
2225 jf
->m_vr
= ipa_get_value_range (tmp
);
2228 /* Compute jump function for all arguments of callsite CS and insert the
2229 information in the jump_functions array in the ipa_edge_args corresponding
2230 to this callsite. */
2233 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info
*fbi
,
2234 struct cgraph_edge
*cs
)
2236 ipa_node_params
*info
= ipa_node_params_sum
->get (cs
->caller
);
2237 ipa_edge_args
*args
= ipa_edge_args_sum
->get_create (cs
);
2238 gcall
*call
= cs
->call_stmt
;
2239 int n
, arg_num
= gimple_call_num_args (call
);
2240 bool useful_context
= false;
2243 if (arg_num
== 0 || args
->jump_functions
)
2245 vec_safe_grow_cleared (args
->jump_functions
, arg_num
, true);
2246 if (flag_devirtualize
)
2247 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, arg_num
, true);
2249 if (gimple_call_internal_p (call
))
2251 if (ipa_func_spec_opts_forbid_analysis_p (cs
->caller
))
2254 for (n
= 0; n
< arg_num
; n
++)
2256 struct ipa_jump_func
*jfunc
= ipa_get_ith_jump_func (args
, n
);
2257 tree arg
= gimple_call_arg (call
, n
);
2258 tree param_type
= ipa_get_callee_param_type (cs
, n
);
2259 if (flag_devirtualize
&& POINTER_TYPE_P (TREE_TYPE (arg
)))
2262 class ipa_polymorphic_call_context
context (cs
->caller
->decl
,
2265 context
.get_dynamic_type (instance
, arg
, NULL
, cs
->call_stmt
,
2266 &fbi
->aa_walk_budget
);
2267 *ipa_get_ith_polymorhic_call_context (args
, n
) = context
;
2268 if (!context
.useless_p ())
2269 useful_context
= true;
2272 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
2274 bool addr_nonzero
= false;
2275 bool strict_overflow
= false;
2277 if (TREE_CODE (arg
) == SSA_NAME
2279 && get_range_query (cfun
)->range_of_expr (vr
, arg
)
2281 addr_nonzero
= true;
2282 else if (tree_single_nonzero_warnv_p (arg
, &strict_overflow
))
2283 addr_nonzero
= true;
2287 tree z
= build_int_cst (TREE_TYPE (arg
), 0);
2288 ipa_set_jfunc_vr (jfunc
, VR_ANTI_RANGE
, z
, z
);
2291 gcc_assert (!jfunc
->m_vr
);
2295 if (TREE_CODE (arg
) == SSA_NAME
2297 && get_range_query (cfun
)->range_of_expr (vr
, arg
)
2298 && !vr
.undefined_p ())
2301 range_fold_unary_expr (&resvr
, NOP_EXPR
, param_type
,
2302 &vr
, TREE_TYPE (arg
));
2303 if (!resvr
.undefined_p () && !resvr
.varying_p ())
2304 ipa_set_jfunc_vr (jfunc
, &resvr
);
2306 gcc_assert (!jfunc
->m_vr
);
2309 gcc_assert (!jfunc
->m_vr
);
2312 if (INTEGRAL_TYPE_P (TREE_TYPE (arg
))
2313 && (TREE_CODE (arg
) == SSA_NAME
|| TREE_CODE (arg
) == INTEGER_CST
))
2315 if (TREE_CODE (arg
) == SSA_NAME
)
2316 ipa_set_jfunc_bits (jfunc
, 0,
2317 widest_int::from (get_nonzero_bits (arg
),
2318 TYPE_SIGN (TREE_TYPE (arg
))));
2320 ipa_set_jfunc_bits (jfunc
, wi::to_widest (arg
), 0);
2322 else if (POINTER_TYPE_P (TREE_TYPE (arg
)))
2324 unsigned HOST_WIDE_INT bitpos
;
2327 get_pointer_alignment_1 (arg
, &align
, &bitpos
);
2328 widest_int mask
= wi::bit_and_not
2329 (wi::mask
<widest_int
> (TYPE_PRECISION (TREE_TYPE (arg
)), false),
2330 align
/ BITS_PER_UNIT
- 1);
2331 widest_int value
= bitpos
/ BITS_PER_UNIT
;
2332 ipa_set_jfunc_bits (jfunc
, value
, mask
);
2335 gcc_assert (!jfunc
->bits
);
2337 if (is_gimple_ip_invariant (arg
)
2339 && is_global_var (arg
)
2340 && TREE_READONLY (arg
)))
2341 ipa_set_jf_constant (jfunc
, arg
, cs
);
2342 else if (!is_gimple_reg_type (TREE_TYPE (arg
))
2343 && TREE_CODE (arg
) == PARM_DECL
)
2345 int index
= ipa_get_param_decl_index (info
, arg
);
2347 gcc_assert (index
>=0);
2348 /* Aggregate passed by value, check for pass-through, otherwise we
2349 will attempt to fill in aggregate contents later in this
2351 if (parm_preserved_before_stmt_p (fbi
, index
, call
, arg
))
2353 ipa_set_jf_simple_pass_through (jfunc
, index
, false);
2357 else if (TREE_CODE (arg
) == SSA_NAME
)
2359 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
2361 int index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (arg
));
2365 agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
, arg
);
2366 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
2371 gimple
*stmt
= SSA_NAME_DEF_STMT (arg
);
2372 if (is_gimple_assign (stmt
))
2373 compute_complex_assign_jump_func (fbi
, info
, jfunc
,
2374 call
, stmt
, arg
, param_type
);
2375 else if (gimple_code (stmt
) == GIMPLE_PHI
)
2376 compute_complex_ancestor_jump_func (fbi
, info
, jfunc
,
2378 as_a
<gphi
*> (stmt
));
2382 /* If ARG is pointer, we cannot use its type to determine the type of aggregate
2383 passed (because type conversions are ignored in gimple). Usually we can
2384 safely get type from function declaration, but in case of K&R prototypes or
2385 variadic functions we can try our luck with type of the pointer passed.
2386 TODO: Since we look for actual initialization of the memory object, we may better
2387 work out the type based on the memory stores we find. */
2389 param_type
= TREE_TYPE (arg
);
2391 if ((jfunc
->type
!= IPA_JF_PASS_THROUGH
2392 || !ipa_get_jf_pass_through_agg_preserved (jfunc
))
2393 && (jfunc
->type
!= IPA_JF_ANCESTOR
2394 || !ipa_get_jf_ancestor_agg_preserved (jfunc
))
2395 && (AGGREGATE_TYPE_P (TREE_TYPE (arg
))
2396 || POINTER_TYPE_P (param_type
)))
2397 determine_known_aggregate_parts (fbi
, call
, arg
, param_type
, jfunc
);
2399 if (!useful_context
)
2400 vec_free (args
->polymorphic_call_contexts
);
2403 /* Compute jump functions for all edges - both direct and indirect - outgoing
2407 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info
*fbi
, basic_block bb
)
2409 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
2411 struct cgraph_edge
*cs
;
2413 FOR_EACH_VEC_ELT_REVERSE (bi
->cg_edges
, i
, cs
)
2415 struct cgraph_node
*callee
= cs
->callee
;
2419 callee
= callee
->ultimate_alias_target ();
2420 /* We do not need to bother analyzing calls to unknown functions
2421 unless they may become known during lto/whopr. */
2422 if (!callee
->definition
&& !flag_lto
2423 && !gimple_call_fnspec (cs
->call_stmt
).known_p ())
2426 ipa_compute_jump_functions_for_edge (fbi
, cs
);
2430 /* If STMT looks like a statement loading a value from a member pointer formal
2431 parameter, return that parameter and store the offset of the field to
2432 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2433 might be clobbered). If USE_DELTA, then we look for a use of the delta
2434 field rather than the pfn. */
2437 ipa_get_stmt_member_ptr_load_param (gimple
*stmt
, bool use_delta
,
2438 HOST_WIDE_INT
*offset_p
)
2440 tree rhs
, rec
, ref_field
, ref_offset
, fld
, ptr_field
, delta_field
;
2442 if (!gimple_assign_single_p (stmt
))
2445 rhs
= gimple_assign_rhs1 (stmt
);
2446 if (TREE_CODE (rhs
) == COMPONENT_REF
)
2448 ref_field
= TREE_OPERAND (rhs
, 1);
2449 rhs
= TREE_OPERAND (rhs
, 0);
2452 ref_field
= NULL_TREE
;
2453 if (TREE_CODE (rhs
) != MEM_REF
)
2455 rec
= TREE_OPERAND (rhs
, 0);
2456 if (TREE_CODE (rec
) != ADDR_EXPR
)
2458 rec
= TREE_OPERAND (rec
, 0);
2459 if (TREE_CODE (rec
) != PARM_DECL
2460 || !type_like_member_ptr_p (TREE_TYPE (rec
), &ptr_field
, &delta_field
))
2462 ref_offset
= TREE_OPERAND (rhs
, 1);
2469 *offset_p
= int_bit_position (fld
);
2473 if (integer_nonzerop (ref_offset
))
2475 return ref_field
== fld
? rec
: NULL_TREE
;
2478 return tree_int_cst_equal (byte_position (fld
), ref_offset
) ? rec
2482 /* Returns true iff T is an SSA_NAME defined by a statement. */
2485 ipa_is_ssa_with_stmt_def (tree t
)
2487 if (TREE_CODE (t
) == SSA_NAME
2488 && !SSA_NAME_IS_DEFAULT_DEF (t
))
2494 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2495 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2496 indirect call graph edge.
2497 If POLYMORPHIC is true record is as a destination of polymorphic call. */
2499 static struct cgraph_edge
*
2500 ipa_note_param_call (struct cgraph_node
*node
, int param_index
,
2501 gcall
*stmt
, bool polymorphic
)
2503 struct cgraph_edge
*cs
;
2505 cs
= node
->get_edge (stmt
);
2506 cs
->indirect_info
->param_index
= param_index
;
2507 cs
->indirect_info
->agg_contents
= 0;
2508 cs
->indirect_info
->member_ptr
= 0;
2509 cs
->indirect_info
->guaranteed_unmodified
= 0;
2510 ipa_node_params
*info
= ipa_node_params_sum
->get (node
);
2511 ipa_set_param_used_by_indirect_call (info
, param_index
, true);
2512 if (cs
->indirect_info
->polymorphic
|| polymorphic
)
2513 ipa_set_param_used_by_polymorphic_call (info
, param_index
, true);
2517 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2518 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2519 intermediate information about each formal parameter. Currently it checks
2520 whether the call calls a pointer that is a formal parameter and if so, the
2521 parameter is marked with the called flag and an indirect call graph edge
2522 describing the call is created. This is very simple for ordinary pointers
2523 represented in SSA but not-so-nice when it comes to member pointers. The
2524 ugly part of this function does nothing more than trying to match the
2525 pattern of such a call. An example of such a pattern is the gimple dump
2526 below, the call is on the last line:
2529 f$__delta_5 = f.__delta;
2530 f$__pfn_24 = f.__pfn;
2534 f$__delta_5 = MEM[(struct *)&f];
2535 f$__pfn_24 = MEM[(struct *)&f + 4B];
2537 and a few lines below:
2540 D.2496_3 = (int) f$__pfn_24;
2541 D.2497_4 = D.2496_3 & 1;
2548 D.2500_7 = (unsigned int) f$__delta_5;
2549 D.2501_8 = &S + D.2500_7;
2550 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2551 D.2503_10 = *D.2502_9;
2552 D.2504_12 = f$__pfn_24 + -1;
2553 D.2505_13 = (unsigned int) D.2504_12;
2554 D.2506_14 = D.2503_10 + D.2505_13;
2555 D.2507_15 = *D.2506_14;
2556 iftmp.11_16 = (String:: *) D.2507_15;
2559 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2560 D.2500_19 = (unsigned int) f$__delta_5;
2561 D.2508_20 = &S + D.2500_19;
2562 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2564 Such patterns are results of simple calls to a member pointer:
2566 int doprinting (int (MyString::* f)(int) const)
2568 MyString S ("somestring");
2573 Moreover, the function also looks for called pointers loaded from aggregates
2574 passed by value or reference. */
2577 ipa_analyze_indirect_call_uses (struct ipa_func_body_info
*fbi
, gcall
*call
,
2580 class ipa_node_params
*info
= fbi
->info
;
2581 HOST_WIDE_INT offset
;
2584 if (SSA_NAME_IS_DEFAULT_DEF (target
))
2586 tree var
= SSA_NAME_VAR (target
);
2587 int index
= ipa_get_param_decl_index (info
, var
);
2589 ipa_note_param_call (fbi
->node
, index
, call
, false);
2594 gimple
*def
= SSA_NAME_DEF_STMT (target
);
2595 bool guaranteed_unmodified
;
2596 if (gimple_assign_single_p (def
)
2597 && ipa_load_from_parm_agg (fbi
, info
->descriptors
, def
,
2598 gimple_assign_rhs1 (def
), &index
, &offset
,
2599 NULL
, &by_ref
, &guaranteed_unmodified
))
2601 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
,
2603 cs
->indirect_info
->offset
= offset
;
2604 cs
->indirect_info
->agg_contents
= 1;
2605 cs
->indirect_info
->by_ref
= by_ref
;
2606 cs
->indirect_info
->guaranteed_unmodified
= guaranteed_unmodified
;
2610 /* Now we need to try to match the complex pattern of calling a member
2612 if (gimple_code (def
) != GIMPLE_PHI
2613 || gimple_phi_num_args (def
) != 2
2614 || !POINTER_TYPE_P (TREE_TYPE (target
))
2615 || TREE_CODE (TREE_TYPE (TREE_TYPE (target
))) != METHOD_TYPE
)
2618 /* First, we need to check whether one of these is a load from a member
2619 pointer that is a parameter to this function. */
2620 tree n1
= PHI_ARG_DEF (def
, 0);
2621 tree n2
= PHI_ARG_DEF (def
, 1);
2622 if (!ipa_is_ssa_with_stmt_def (n1
) || !ipa_is_ssa_with_stmt_def (n2
))
2624 gimple
*d1
= SSA_NAME_DEF_STMT (n1
);
2625 gimple
*d2
= SSA_NAME_DEF_STMT (n2
);
2628 basic_block bb
, virt_bb
;
2629 basic_block join
= gimple_bb (def
);
2630 if ((rec
= ipa_get_stmt_member_ptr_load_param (d1
, false, &offset
)))
2632 if (ipa_get_stmt_member_ptr_load_param (d2
, false, NULL
))
2635 bb
= EDGE_PRED (join
, 0)->src
;
2636 virt_bb
= gimple_bb (d2
);
2638 else if ((rec
= ipa_get_stmt_member_ptr_load_param (d2
, false, &offset
)))
2640 bb
= EDGE_PRED (join
, 1)->src
;
2641 virt_bb
= gimple_bb (d1
);
2646 /* Second, we need to check that the basic blocks are laid out in the way
2647 corresponding to the pattern. */
2649 if (!single_pred_p (virt_bb
) || !single_succ_p (virt_bb
)
2650 || single_pred (virt_bb
) != bb
2651 || single_succ (virt_bb
) != join
)
2654 /* Third, let's see that the branching is done depending on the least
2655 significant bit of the pfn. */
2657 gimple
*branch
= last_stmt (bb
);
2658 if (!branch
|| gimple_code (branch
) != GIMPLE_COND
)
2661 if ((gimple_cond_code (branch
) != NE_EXPR
2662 && gimple_cond_code (branch
) != EQ_EXPR
)
2663 || !integer_zerop (gimple_cond_rhs (branch
)))
2666 tree cond
= gimple_cond_lhs (branch
);
2667 if (!ipa_is_ssa_with_stmt_def (cond
))
2670 def
= SSA_NAME_DEF_STMT (cond
);
2671 if (!is_gimple_assign (def
)
2672 || gimple_assign_rhs_code (def
) != BIT_AND_EXPR
2673 || !integer_onep (gimple_assign_rhs2 (def
)))
2676 cond
= gimple_assign_rhs1 (def
);
2677 if (!ipa_is_ssa_with_stmt_def (cond
))
2680 def
= SSA_NAME_DEF_STMT (cond
);
2682 if (is_gimple_assign (def
)
2683 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
2685 cond
= gimple_assign_rhs1 (def
);
2686 if (!ipa_is_ssa_with_stmt_def (cond
))
2688 def
= SSA_NAME_DEF_STMT (cond
);
2692 rec2
= ipa_get_stmt_member_ptr_load_param (def
,
2693 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2694 == ptrmemfunc_vbit_in_delta
),
2699 index
= ipa_get_param_decl_index (info
, rec
);
2701 && parm_preserved_before_stmt_p (fbi
, index
, call
, rec
))
2703 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
,
2705 cs
->indirect_info
->offset
= offset
;
2706 cs
->indirect_info
->agg_contents
= 1;
2707 cs
->indirect_info
->member_ptr
= 1;
2708 cs
->indirect_info
->guaranteed_unmodified
= 1;
2714 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2715 object referenced in the expression is a formal parameter of the caller
2716 FBI->node (described by FBI->info), create a call note for the
2720 ipa_analyze_virtual_call_uses (struct ipa_func_body_info
*fbi
,
2721 gcall
*call
, tree target
)
2723 tree obj
= OBJ_TYPE_REF_OBJECT (target
);
2725 HOST_WIDE_INT anc_offset
;
2727 if (!flag_devirtualize
)
2730 if (TREE_CODE (obj
) != SSA_NAME
)
2733 class ipa_node_params
*info
= fbi
->info
;
2734 if (SSA_NAME_IS_DEFAULT_DEF (obj
))
2736 if (TREE_CODE (SSA_NAME_VAR (obj
)) != PARM_DECL
)
2740 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (obj
));
2741 gcc_assert (index
>= 0);
2742 if (detect_type_change_ssa (fbi
, obj
, obj_type_ref_class (target
),
2748 gimple
*stmt
= SSA_NAME_DEF_STMT (obj
);
2751 expr
= get_ancestor_addr_info (stmt
, &obj
, &anc_offset
);
2754 index
= ipa_get_param_decl_index (info
,
2755 SSA_NAME_VAR (TREE_OPERAND (expr
, 0)));
2756 gcc_assert (index
>= 0);
2757 if (detect_type_change (fbi
, obj
, expr
, obj_type_ref_class (target
),
2762 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
,
2764 class cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
2765 ii
->offset
= anc_offset
;
2766 ii
->otr_token
= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target
));
2767 ii
->otr_type
= obj_type_ref_class (target
);
2768 ii
->polymorphic
= 1;
2771 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2772 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2773 containing intermediate information about each formal parameter. */
2776 ipa_analyze_call_uses (struct ipa_func_body_info
*fbi
, gcall
*call
)
2778 tree target
= gimple_call_fn (call
);
2781 || (TREE_CODE (target
) != SSA_NAME
2782 && !virtual_method_call_p (target
)))
2785 struct cgraph_edge
*cs
= fbi
->node
->get_edge (call
);
2786 /* If we previously turned the call into a direct call, there is
2787 no need to analyze. */
2788 if (cs
&& !cs
->indirect_unknown_callee
)
2791 if (cs
->indirect_info
->polymorphic
&& flag_devirtualize
)
2794 tree target
= gimple_call_fn (call
);
2795 ipa_polymorphic_call_context
context (current_function_decl
,
2796 target
, call
, &instance
);
2798 gcc_checking_assert (cs
->indirect_info
->otr_type
2799 == obj_type_ref_class (target
));
2800 gcc_checking_assert (cs
->indirect_info
->otr_token
2801 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target
)));
2803 cs
->indirect_info
->vptr_changed
2804 = !context
.get_dynamic_type (instance
,
2805 OBJ_TYPE_REF_OBJECT (target
),
2806 obj_type_ref_class (target
), call
,
2807 &fbi
->aa_walk_budget
);
2808 cs
->indirect_info
->context
= context
;
2811 if (TREE_CODE (target
) == SSA_NAME
)
2812 ipa_analyze_indirect_call_uses (fbi
, call
, target
);
2813 else if (virtual_method_call_p (target
))
2814 ipa_analyze_virtual_call_uses (fbi
, call
, target
);
2818 /* Analyze the call statement STMT with respect to formal parameters (described
2819 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2820 formal parameters are called. */
2823 ipa_analyze_stmt_uses (struct ipa_func_body_info
*fbi
, gimple
*stmt
)
2825 if (is_gimple_call (stmt
))
2826 ipa_analyze_call_uses (fbi
, as_a
<gcall
*> (stmt
));
2829 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2830 If OP is a parameter declaration, mark it as used in the info structure
2834 visit_ref_for_mod_analysis (gimple
*, tree op
, tree
, void *data
)
2836 class ipa_node_params
*info
= (class ipa_node_params
*) data
;
2838 op
= get_base_address (op
);
2840 && TREE_CODE (op
) == PARM_DECL
)
2842 int index
= ipa_get_param_decl_index (info
, op
);
2843 gcc_assert (index
>= 0);
2844 ipa_set_param_used (info
, index
, true);
2850 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2851 the findings in various structures of the associated ipa_node_params
2852 structure, such as parameter flags, notes etc. FBI holds various data about
2853 the function being analyzed. */
2856 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info
*fbi
, basic_block bb
)
2858 gimple_stmt_iterator gsi
;
2859 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2861 gimple
*stmt
= gsi_stmt (gsi
);
2863 if (is_gimple_debug (stmt
))
2866 ipa_analyze_stmt_uses (fbi
, stmt
);
2867 walk_stmt_load_store_addr_ops (stmt
, fbi
->info
,
2868 visit_ref_for_mod_analysis
,
2869 visit_ref_for_mod_analysis
,
2870 visit_ref_for_mod_analysis
);
2872 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2873 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), fbi
->info
,
2874 visit_ref_for_mod_analysis
,
2875 visit_ref_for_mod_analysis
,
2876 visit_ref_for_mod_analysis
);
2879 /* Calculate controlled uses of parameters of NODE. */
2882 ipa_analyze_controlled_uses (struct cgraph_node
*node
)
2884 ipa_node_params
*info
= ipa_node_params_sum
->get (node
);
2886 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2888 tree parm
= ipa_get_param (info
, i
);
2889 int controlled_uses
= 0;
2891 /* For SSA regs see if parameter is used. For non-SSA we compute
2892 the flag during modification analysis. */
2893 if (is_gimple_reg (parm
))
2895 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
),
2897 if (ddef
&& !has_zero_uses (ddef
))
2899 imm_use_iterator imm_iter
;
2900 use_operand_p use_p
;
2902 ipa_set_param_used (info
, i
, true);
2903 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, ddef
)
2904 if (!is_gimple_call (USE_STMT (use_p
)))
2906 if (!is_gimple_debug (USE_STMT (use_p
)))
2908 controlled_uses
= IPA_UNDESCRIBED_USE
;
2916 controlled_uses
= 0;
2919 controlled_uses
= IPA_UNDESCRIBED_USE
;
2920 ipa_set_controlled_uses (info
, i
, controlled_uses
);
2924 /* Free stuff in BI. */
2927 free_ipa_bb_info (struct ipa_bb_info
*bi
)
2929 bi
->cg_edges
.release ();
2930 bi
->param_aa_statuses
.release ();
2933 /* Dominator walker driving the analysis. */
2935 class analysis_dom_walker
: public dom_walker
2938 analysis_dom_walker (struct ipa_func_body_info
*fbi
)
2939 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
) {}
2941 virtual edge
before_dom_children (basic_block
);
2944 struct ipa_func_body_info
*m_fbi
;
2948 analysis_dom_walker::before_dom_children (basic_block bb
)
2950 ipa_analyze_params_uses_in_bb (m_fbi
, bb
);
2951 ipa_compute_jump_functions_for_bb (m_fbi
, bb
);
2955 /* Release body info FBI. */
2958 ipa_release_body_info (struct ipa_func_body_info
*fbi
)
2961 struct ipa_bb_info
*bi
;
2963 FOR_EACH_VEC_ELT (fbi
->bb_infos
, i
, bi
)
2964 free_ipa_bb_info (bi
);
2965 fbi
->bb_infos
.release ();
2968 /* Initialize the array describing properties of formal parameters
2969 of NODE, analyze their uses and compute jump functions associated
2970 with actual arguments of calls from within NODE. */
2973 ipa_analyze_node (struct cgraph_node
*node
)
2975 struct ipa_func_body_info fbi
;
2976 class ipa_node_params
*info
;
2978 ipa_check_create_node_params ();
2979 ipa_check_create_edge_args ();
2980 info
= ipa_node_params_sum
->get_create (node
);
2982 if (info
->analysis_done
)
2984 info
->analysis_done
= 1;
2986 if (ipa_func_spec_opts_forbid_analysis_p (node
))
2988 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2990 ipa_set_param_used (info
, i
, true);
2991 ipa_set_controlled_uses (info
, i
, IPA_UNDESCRIBED_USE
);
2996 struct function
*func
= DECL_STRUCT_FUNCTION (node
->decl
);
2998 calculate_dominance_info (CDI_DOMINATORS
);
2999 ipa_initialize_node_params (node
);
3000 ipa_analyze_controlled_uses (node
);
3004 fbi
.bb_infos
= vNULL
;
3005 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
), true);
3006 fbi
.param_count
= ipa_get_param_count (info
);
3007 fbi
.aa_walk_budget
= opt_for_fn (node
->decl
, param_ipa_max_aa_steps
);
3009 for (struct cgraph_edge
*cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
3011 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
3012 bi
->cg_edges
.safe_push (cs
);
3015 for (struct cgraph_edge
*cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
3017 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
3018 bi
->cg_edges
.safe_push (cs
);
3021 analysis_dom_walker (&fbi
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
3023 ipa_release_body_info (&fbi
);
3024 free_dominance_info (CDI_DOMINATORS
);
3028 /* Update the jump functions associated with call graph edge E when the call
3029 graph edge CS is being inlined, assuming that E->caller is already (possibly
3030 indirectly) inlined into CS->callee and that E has not been inlined. */
3033 update_jump_functions_after_inlining (struct cgraph_edge
*cs
,
3034 struct cgraph_edge
*e
)
3036 ipa_edge_args
*top
= ipa_edge_args_sum
->get (cs
);
3037 ipa_edge_args
*args
= ipa_edge_args_sum
->get (e
);
3040 int count
= ipa_get_cs_argument_count (args
);
3043 for (i
= 0; i
< count
; i
++)
3045 struct ipa_jump_func
*dst
= ipa_get_ith_jump_func (args
, i
);
3046 class ipa_polymorphic_call_context
*dst_ctx
3047 = ipa_get_ith_polymorhic_call_context (args
, i
);
3051 struct ipa_agg_jf_item
*item
;
3054 FOR_EACH_VEC_ELT (*dst
->agg
.items
, j
, item
)
3057 struct ipa_jump_func
*src
;
3059 if (item
->jftype
!= IPA_JF_PASS_THROUGH
3060 && item
->jftype
!= IPA_JF_LOAD_AGG
)
3063 dst_fid
= item
->value
.pass_through
.formal_id
;
3064 if (!top
|| dst_fid
>= ipa_get_cs_argument_count (top
))
3066 item
->jftype
= IPA_JF_UNKNOWN
;
3070 item
->value
.pass_through
.formal_id
= -1;
3071 src
= ipa_get_ith_jump_func (top
, dst_fid
);
3072 if (src
->type
== IPA_JF_CONST
)
3074 if (item
->jftype
== IPA_JF_PASS_THROUGH
3075 && item
->value
.pass_through
.operation
== NOP_EXPR
)
3077 item
->jftype
= IPA_JF_CONST
;
3078 item
->value
.constant
= src
->value
.constant
.value
;
3082 else if (src
->type
== IPA_JF_PASS_THROUGH
3083 && src
->value
.pass_through
.operation
== NOP_EXPR
)
3085 if (item
->jftype
== IPA_JF_PASS_THROUGH
3086 || !item
->value
.load_agg
.by_ref
3087 || src
->value
.pass_through
.agg_preserved
)
3088 item
->value
.pass_through
.formal_id
3089 = src
->value
.pass_through
.formal_id
;
3091 else if (src
->type
== IPA_JF_ANCESTOR
)
3093 if (item
->jftype
== IPA_JF_PASS_THROUGH
)
3095 if (!src
->value
.ancestor
.offset
)
3096 item
->value
.pass_through
.formal_id
3097 = src
->value
.ancestor
.formal_id
;
3099 else if (src
->value
.ancestor
.agg_preserved
)
3101 gcc_checking_assert (item
->value
.load_agg
.by_ref
);
3103 item
->value
.pass_through
.formal_id
3104 = src
->value
.ancestor
.formal_id
;
3105 item
->value
.load_agg
.offset
3106 += src
->value
.ancestor
.offset
;
3110 if (item
->value
.pass_through
.formal_id
< 0)
3111 item
->jftype
= IPA_JF_UNKNOWN
;
3117 ipa_set_jf_unknown (dst
);
3121 if (dst
->type
== IPA_JF_ANCESTOR
)
3123 struct ipa_jump_func
*src
;
3124 int dst_fid
= dst
->value
.ancestor
.formal_id
;
3125 class ipa_polymorphic_call_context
*src_ctx
3126 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
3128 /* Variable number of arguments can cause havoc if we try to access
3129 one that does not exist in the inlined edge. So make sure we
3131 if (dst_fid
>= ipa_get_cs_argument_count (top
))
3133 ipa_set_jf_unknown (dst
);
3137 src
= ipa_get_ith_jump_func (top
, dst_fid
);
3139 if (src_ctx
&& !src_ctx
->useless_p ())
3141 class ipa_polymorphic_call_context ctx
= *src_ctx
;
3143 /* TODO: Make type preserved safe WRT contexts. */
3144 if (!ipa_get_jf_ancestor_type_preserved (dst
))
3145 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
3146 ctx
.offset_by (dst
->value
.ancestor
.offset
);
3147 if (!ctx
.useless_p ())
3151 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
3153 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
3156 dst_ctx
->combine_with (ctx
);
3160 /* Parameter and argument in ancestor jump function must be pointer
3161 type, which means access to aggregate must be by-reference. */
3162 gcc_assert (!src
->agg
.items
|| src
->agg
.by_ref
);
3164 if (src
->agg
.items
&& dst
->value
.ancestor
.agg_preserved
)
3166 struct ipa_agg_jf_item
*item
;
3169 /* Currently we do not produce clobber aggregate jump functions,
3170 replace with merging when we do. */
3171 gcc_assert (!dst
->agg
.items
);
3173 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
3174 dst
->agg
.by_ref
= src
->agg
.by_ref
;
3175 FOR_EACH_VEC_SAFE_ELT (dst
->agg
.items
, j
, item
)
3176 item
->offset
-= dst
->value
.ancestor
.offset
;
3179 if (src
->type
== IPA_JF_PASS_THROUGH
3180 && src
->value
.pass_through
.operation
== NOP_EXPR
)
3182 dst
->value
.ancestor
.formal_id
= src
->value
.pass_through
.formal_id
;
3183 dst
->value
.ancestor
.agg_preserved
&=
3184 src
->value
.pass_through
.agg_preserved
;
3186 else if (src
->type
== IPA_JF_ANCESTOR
)
3188 dst
->value
.ancestor
.formal_id
= src
->value
.ancestor
.formal_id
;
3189 dst
->value
.ancestor
.offset
+= src
->value
.ancestor
.offset
;
3190 dst
->value
.ancestor
.agg_preserved
&=
3191 src
->value
.ancestor
.agg_preserved
;
3194 ipa_set_jf_unknown (dst
);
3196 else if (dst
->type
== IPA_JF_PASS_THROUGH
)
3198 struct ipa_jump_func
*src
;
3199 /* We must check range due to calls with variable number of arguments
3200 and we cannot combine jump functions with operations. */
3201 if (dst
->value
.pass_through
.operation
== NOP_EXPR
3202 && (top
&& dst
->value
.pass_through
.formal_id
3203 < ipa_get_cs_argument_count (top
)))
3205 int dst_fid
= dst
->value
.pass_through
.formal_id
;
3206 src
= ipa_get_ith_jump_func (top
, dst_fid
);
3207 bool dst_agg_p
= ipa_get_jf_pass_through_agg_preserved (dst
);
3208 class ipa_polymorphic_call_context
*src_ctx
3209 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
3211 if (src_ctx
&& !src_ctx
->useless_p ())
3213 class ipa_polymorphic_call_context ctx
= *src_ctx
;
3215 /* TODO: Make type preserved safe WRT contexts. */
3216 if (!ipa_get_jf_pass_through_type_preserved (dst
))
3217 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
3218 if (!ctx
.useless_p ())
3222 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
3224 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
3226 dst_ctx
->combine_with (ctx
);
3231 case IPA_JF_UNKNOWN
:
3232 ipa_set_jf_unknown (dst
);
3235 ipa_set_jf_cst_copy (dst
, src
);
3238 case IPA_JF_PASS_THROUGH
:
3240 int formal_id
= ipa_get_jf_pass_through_formal_id (src
);
3241 enum tree_code operation
;
3242 operation
= ipa_get_jf_pass_through_operation (src
);
3244 if (operation
== NOP_EXPR
)
3248 && ipa_get_jf_pass_through_agg_preserved (src
);
3249 ipa_set_jf_simple_pass_through (dst
, formal_id
, agg_p
);
3251 else if (TREE_CODE_CLASS (operation
) == tcc_unary
)
3252 ipa_set_jf_unary_pass_through (dst
, formal_id
, operation
);
3255 tree operand
= ipa_get_jf_pass_through_operand (src
);
3256 ipa_set_jf_arith_pass_through (dst
, formal_id
, operand
,
3261 case IPA_JF_ANCESTOR
:
3265 && ipa_get_jf_ancestor_agg_preserved (src
);
3266 ipa_set_ancestor_jf (dst
,
3267 ipa_get_jf_ancestor_offset (src
),
3268 ipa_get_jf_ancestor_formal_id (src
),
3277 && (dst_agg_p
|| !src
->agg
.by_ref
))
3279 /* Currently we do not produce clobber aggregate jump
3280 functions, replace with merging when we do. */
3281 gcc_assert (!dst
->agg
.items
);
3283 dst
->agg
.by_ref
= src
->agg
.by_ref
;
3284 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
3288 ipa_set_jf_unknown (dst
);
3293 /* If TARGET is an addr_expr of a function declaration, make it the
3294 (SPECULATIVE)destination of an indirect edge IE and return the edge.
3295 Otherwise, return NULL. */
3297 struct cgraph_edge
*
3298 ipa_make_edge_direct_to_target (struct cgraph_edge
*ie
, tree target
,
3301 struct cgraph_node
*callee
;
3302 bool unreachable
= false;
3304 if (TREE_CODE (target
) == ADDR_EXPR
)
3305 target
= TREE_OPERAND (target
, 0);
3306 if (TREE_CODE (target
) != FUNCTION_DECL
)
3308 target
= canonicalize_constructor_val (target
, NULL
);
3309 if (!target
|| TREE_CODE (target
) != FUNCTION_DECL
)
3311 /* Member pointer call that goes through a VMT lookup. */
3312 if (ie
->indirect_info
->member_ptr
3313 /* Or if target is not an invariant expression and we do not
3314 know if it will evaulate to function at runtime.
3315 This can happen when folding through &VAR, where &VAR
3316 is IP invariant, but VAR itself is not.
3318 TODO: Revisit this when GCC 5 is branched. It seems that
3319 member_ptr check is not needed and that we may try to fold
3320 the expression and see if VAR is readonly. */
3321 || !is_gimple_ip_invariant (target
))
3323 if (dump_enabled_p ())
3325 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, ie
->call_stmt
,
3326 "discovered direct call non-invariant %s\n",
3327 ie
->caller
->dump_name ());
3333 if (dump_enabled_p ())
3335 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, ie
->call_stmt
,
3336 "discovered direct call to non-function in %s, "
3337 "making it __builtin_unreachable\n",
3338 ie
->caller
->dump_name ());
3341 target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
3342 callee
= cgraph_node::get_create (target
);
3346 callee
= cgraph_node::get (target
);
3349 callee
= cgraph_node::get (target
);
3351 /* Because may-edges are not explicitely represented and vtable may be external,
3352 we may create the first reference to the object in the unit. */
3353 if (!callee
|| callee
->inlined_to
)
3356 /* We are better to ensure we can refer to it.
3357 In the case of static functions we are out of luck, since we already
3358 removed its body. In the case of public functions we may or may
3359 not introduce the reference. */
3360 if (!canonicalize_constructor_val (target
, NULL
)
3361 || !TREE_PUBLIC (target
))
3364 fprintf (dump_file
, "ipa-prop: Discovered call to a known target "
3365 "(%s -> %s) but cannot refer to it. Giving up.\n",
3366 ie
->caller
->dump_name (),
3367 ie
->callee
->dump_name ());
3370 callee
= cgraph_node::get_create (target
);
3373 /* If the edge is already speculated. */
3374 if (speculative
&& ie
->speculative
)
3378 cgraph_edge
*e2
= ie
->speculative_call_for_target (callee
);
3382 fprintf (dump_file
, "ipa-prop: Discovered call to a "
3383 "speculative target (%s -> %s) but the call is "
3384 "already speculated to different target. "
3386 ie
->caller
->dump_name (), callee
->dump_name ());
3392 "ipa-prop: Discovered call to a speculative target "
3393 "(%s -> %s) this agree with previous speculation.\n",
3394 ie
->caller
->dump_name (), callee
->dump_name ());
3400 if (!dbg_cnt (devirt
))
3403 ipa_check_create_node_params ();
3405 /* We cannot make edges to inline clones. It is bug that someone removed
3406 the cgraph node too early. */
3407 gcc_assert (!callee
->inlined_to
);
3409 if (dump_file
&& !unreachable
)
3411 fprintf (dump_file
, "ipa-prop: Discovered %s call to a %s target "
3412 "(%s -> %s), for stmt ",
3413 ie
->indirect_info
->polymorphic
? "a virtual" : "an indirect",
3414 speculative
? "speculative" : "known",
3415 ie
->caller
->dump_name (),
3416 callee
->dump_name ());
3418 print_gimple_stmt (dump_file
, ie
->call_stmt
, 2, TDF_SLIM
);
3420 fprintf (dump_file
, "with uid %i\n", ie
->lto_stmt_uid
);
3422 if (dump_enabled_p ())
3424 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, ie
->call_stmt
,
3425 "converting indirect call in %s to direct call to %s\n",
3426 ie
->caller
->dump_name (), callee
->dump_name ());
3430 struct cgraph_edge
*orig
= ie
;
3431 ie
= cgraph_edge::make_direct (ie
, callee
);
3432 /* If we resolved speculative edge the cost is already up to date
3433 for direct call (adjusted by inline_edge_duplication_hook). */
3436 ipa_call_summary
*es
= ipa_call_summaries
->get (ie
);
3437 es
->call_stmt_size
-= (eni_size_weights
.indirect_call_cost
3438 - eni_size_weights
.call_cost
);
3439 es
->call_stmt_time
-= (eni_time_weights
.indirect_call_cost
3440 - eni_time_weights
.call_cost
);
3445 if (!callee
->can_be_discarded_p ())
3448 alias
= dyn_cast
<cgraph_node
*> (callee
->noninterposable_alias ());
3452 /* make_speculative will update ie's cost to direct call cost. */
3453 ie
= ie
->make_speculative
3454 (callee
, ie
->count
.apply_scale (8, 10));
3460 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3461 CONSTRUCTOR and return it. Return NULL if the search fails for some
3465 find_constructor_constant_at_offset (tree constructor
, HOST_WIDE_INT req_offset
)
3467 tree type
= TREE_TYPE (constructor
);
3468 if (TREE_CODE (type
) != ARRAY_TYPE
3469 && TREE_CODE (type
) != RECORD_TYPE
)
3474 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor
), ix
, index
, val
)
3476 HOST_WIDE_INT elt_offset
;
3477 if (TREE_CODE (type
) == ARRAY_TYPE
)
3480 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (type
));
3481 gcc_assert (TREE_CODE (unit_size
) == INTEGER_CST
);
3485 if (TREE_CODE (index
) == RANGE_EXPR
)
3486 off
= wi::to_offset (TREE_OPERAND (index
, 0));
3488 off
= wi::to_offset (index
);
3489 if (TYPE_DOMAIN (type
) && TYPE_MIN_VALUE (TYPE_DOMAIN (type
)))
3491 tree low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
3492 gcc_assert (TREE_CODE (unit_size
) == INTEGER_CST
);
3493 off
= wi::sext (off
- wi::to_offset (low_bound
),
3494 TYPE_PRECISION (TREE_TYPE (index
)));
3496 off
*= wi::to_offset (unit_size
);
3497 /* ??? Handle more than just the first index of a
3501 off
= wi::to_offset (unit_size
) * ix
;
3503 off
= wi::lshift (off
, LOG2_BITS_PER_UNIT
);
3504 if (!wi::fits_shwi_p (off
) || wi::neg_p (off
))
3506 elt_offset
= off
.to_shwi ();
3508 else if (TREE_CODE (type
) == RECORD_TYPE
)
3510 gcc_checking_assert (index
&& TREE_CODE (index
) == FIELD_DECL
);
3511 if (DECL_BIT_FIELD (index
))
3513 elt_offset
= int_bit_position (index
);
3518 if (elt_offset
> req_offset
)
3521 if (TREE_CODE (val
) == CONSTRUCTOR
)
3522 return find_constructor_constant_at_offset (val
,
3523 req_offset
- elt_offset
);
3525 if (elt_offset
== req_offset
3526 && is_gimple_reg_type (TREE_TYPE (val
))
3527 && is_gimple_ip_invariant (val
))
3533 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3534 invariant from a static constructor and if so, return it. Otherwise return
3538 ipa_find_agg_cst_from_init (tree scalar
, HOST_WIDE_INT offset
, bool by_ref
)
3542 if (TREE_CODE (scalar
) != ADDR_EXPR
)
3544 scalar
= TREE_OPERAND (scalar
, 0);
3548 || !is_global_var (scalar
)
3549 || !TREE_READONLY (scalar
)
3550 || !DECL_INITIAL (scalar
)
3551 || TREE_CODE (DECL_INITIAL (scalar
)) != CONSTRUCTOR
)
3554 return find_constructor_constant_at_offset (DECL_INITIAL (scalar
), offset
);
3557 /* Retrieve value from AGG, a set of known offset/value for an aggregate or
3558 static initializer of SCALAR (which can be NULL) for the given OFFSET or
3559 return NULL if there is none. BY_REF specifies whether the value has to be
3560 passed by reference or by value. If FROM_GLOBAL_CONSTANT is non-NULL, then
3561 the boolean it points to is set to true if the value comes from an
3562 initializer of a constant. */
3565 ipa_find_agg_cst_for_param (struct ipa_agg_value_set
*agg
, tree scalar
,
3566 HOST_WIDE_INT offset
, bool by_ref
,
3567 bool *from_global_constant
)
3569 struct ipa_agg_value
*item
;
3574 tree res
= ipa_find_agg_cst_from_init (scalar
, offset
, by_ref
);
3577 if (from_global_constant
)
3578 *from_global_constant
= true;
3584 || by_ref
!= agg
->by_ref
)
3587 FOR_EACH_VEC_ELT (agg
->items
, i
, item
)
3588 if (item
->offset
== offset
)
3590 /* Currently we do not have clobber values, return NULL for them once
3592 gcc_checking_assert (is_gimple_ip_invariant (item
->value
));
3593 if (from_global_constant
)
3594 *from_global_constant
= false;
3600 /* Remove a reference to SYMBOL from the list of references of a node given by
3601 reference description RDESC. Return true if the reference has been
3602 successfully found and removed. */
3605 remove_described_reference (symtab_node
*symbol
, struct ipa_cst_ref_desc
*rdesc
)
3607 struct ipa_ref
*to_del
;
3608 struct cgraph_edge
*origin
;
3613 to_del
= origin
->caller
->find_reference (symbol
, origin
->call_stmt
,
3614 origin
->lto_stmt_uid
);
3618 to_del
->remove_reference ();
3620 fprintf (dump_file
, "ipa-prop: Removed a reference from %s to %s.\n",
3621 origin
->caller
->dump_name (), symbol
->dump_name ());
3625 /* If JFUNC has a reference description with refcount different from
3626 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3627 NULL. JFUNC must be a constant jump function. */
3629 static struct ipa_cst_ref_desc
*
3630 jfunc_rdesc_usable (struct ipa_jump_func
*jfunc
)
3632 struct ipa_cst_ref_desc
*rdesc
= ipa_get_jf_constant_rdesc (jfunc
);
3633 if (rdesc
&& rdesc
->refcount
!= IPA_UNDESCRIBED_USE
)
3639 /* If the value of constant jump function JFUNC is an address of a function
3640 declaration, return the associated call graph node. Otherwise return
3643 static cgraph_node
*
3644 cgraph_node_for_jfunc (struct ipa_jump_func
*jfunc
)
3646 gcc_checking_assert (jfunc
->type
== IPA_JF_CONST
);
3647 tree cst
= ipa_get_jf_constant (jfunc
);
3648 if (TREE_CODE (cst
) != ADDR_EXPR
3649 || TREE_CODE (TREE_OPERAND (cst
, 0)) != FUNCTION_DECL
)
3652 return cgraph_node::get (TREE_OPERAND (cst
, 0));
3656 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3657 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3658 the edge specified in the rdesc. Return false if either the symbol or the
3659 reference could not be found, otherwise return true. */
3662 try_decrement_rdesc_refcount (struct ipa_jump_func
*jfunc
)
3664 struct ipa_cst_ref_desc
*rdesc
;
3665 if (jfunc
->type
== IPA_JF_CONST
3666 && (rdesc
= jfunc_rdesc_usable (jfunc
))
3667 && --rdesc
->refcount
== 0)
3669 symtab_node
*symbol
= cgraph_node_for_jfunc (jfunc
);
3673 return remove_described_reference (symbol
, rdesc
);
3678 /* Try to find a destination for indirect edge IE that corresponds to a simple
3679 call or a call of a member function pointer and where the destination is a
3680 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3681 the type of the parameter to which the result of JFUNC is passed. If it can
3682 be determined, return the newly direct edge, otherwise return NULL.
3683 NEW_ROOT and NEW_ROOT_INFO is the node and its info that JFUNC lattices are
3686 static struct cgraph_edge
*
3687 try_make_edge_direct_simple_call (struct cgraph_edge
*ie
,
3688 struct ipa_jump_func
*jfunc
, tree target_type
,
3689 struct cgraph_node
*new_root
,
3690 class ipa_node_params
*new_root_info
)
3692 struct cgraph_edge
*cs
;
3694 bool agg_contents
= ie
->indirect_info
->agg_contents
;
3695 tree scalar
= ipa_value_from_jfunc (new_root_info
, jfunc
, target_type
);
3698 bool from_global_constant
;
3699 ipa_agg_value_set agg
= ipa_agg_value_set_from_jfunc (new_root_info
,
3702 target
= ipa_find_agg_cst_for_param (&agg
, scalar
,
3703 ie
->indirect_info
->offset
,
3704 ie
->indirect_info
->by_ref
,
3705 &from_global_constant
);
3708 && !from_global_constant
3709 && !ie
->indirect_info
->guaranteed_unmodified
)
3716 cs
= ipa_make_edge_direct_to_target (ie
, target
);
3718 if (cs
&& !agg_contents
)
3721 gcc_checking_assert (cs
->callee
3723 || jfunc
->type
!= IPA_JF_CONST
3724 || !cgraph_node_for_jfunc (jfunc
)
3725 || cs
->callee
== cgraph_node_for_jfunc (jfunc
)));
3726 ok
= try_decrement_rdesc_refcount (jfunc
);
3727 gcc_checking_assert (ok
);
3733 /* Return the target to be used in cases of impossible devirtualization. IE
3734 and target (the latter can be NULL) are dumped when dumping is enabled. */
3737 ipa_impossible_devirt_target (struct cgraph_edge
*ie
, tree target
)
3743 "Type inconsistent devirtualization: %s->%s\n",
3744 ie
->caller
->dump_name (),
3745 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target
)));
3748 "No devirtualization target in %s\n",
3749 ie
->caller
->dump_name ());
3751 tree new_target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
3752 cgraph_node::get_create (new_target
);
3756 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3757 call based on a formal parameter which is described by jump function JFUNC
3758 and if it can be determined, make it direct and return the direct edge.
3759 Otherwise, return NULL. CTX describes the polymorphic context that the
3760 parameter the call is based on brings along with it. NEW_ROOT and
3761 NEW_ROOT_INFO is the node and its info that JFUNC lattices are relative
3764 static struct cgraph_edge
*
3765 try_make_edge_direct_virtual_call (struct cgraph_edge
*ie
,
3766 struct ipa_jump_func
*jfunc
,
3767 class ipa_polymorphic_call_context ctx
,
3768 struct cgraph_node
*new_root
,
3769 class ipa_node_params
*new_root_info
)
3772 bool speculative
= false;
3774 if (!opt_for_fn (ie
->caller
->decl
, flag_devirtualize
))
3777 gcc_assert (!ie
->indirect_info
->by_ref
);
3779 /* Try to do lookup via known virtual table pointer value. */
3780 if (!ie
->indirect_info
->vptr_changed
3781 || opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
))
3784 unsigned HOST_WIDE_INT offset
;
3785 tree scalar
= (jfunc
->type
== IPA_JF_CONST
) ? ipa_get_jf_constant (jfunc
)
3787 ipa_agg_value_set agg
= ipa_agg_value_set_from_jfunc (new_root_info
,
3790 tree t
= ipa_find_agg_cst_for_param (&agg
, scalar
,
3791 ie
->indirect_info
->offset
,
3794 if (t
&& vtable_pointer_value_to_vtable (t
, &vtable
, &offset
))
3797 t
= gimple_get_virt_method_for_vtable (ie
->indirect_info
->otr_token
,
3798 vtable
, offset
, &can_refer
);
3802 || fndecl_built_in_p (t
, BUILT_IN_UNREACHABLE
)
3803 || !possible_polymorphic_call_target_p
3804 (ie
, cgraph_node::get (t
)))
3806 /* Do not speculate builtin_unreachable, it is stupid! */
3807 if (!ie
->indirect_info
->vptr_changed
)
3808 target
= ipa_impossible_devirt_target (ie
, target
);
3815 speculative
= ie
->indirect_info
->vptr_changed
;
3821 ipa_polymorphic_call_context
ie_context (ie
);
3822 vec
<cgraph_node
*>targets
;
3825 ctx
.offset_by (ie
->indirect_info
->offset
);
3826 if (ie
->indirect_info
->vptr_changed
)
3827 ctx
.possible_dynamic_type_change (ie
->in_polymorphic_cdtor
,
3828 ie
->indirect_info
->otr_type
);
3829 ctx
.combine_with (ie_context
, ie
->indirect_info
->otr_type
);
3830 targets
= possible_polymorphic_call_targets
3831 (ie
->indirect_info
->otr_type
,
3832 ie
->indirect_info
->otr_token
,
3834 if (final
&& targets
.length () <= 1)
3836 speculative
= false;
3837 if (targets
.length () == 1)
3838 target
= targets
[0]->decl
;
3840 target
= ipa_impossible_devirt_target (ie
, NULL_TREE
);
3842 else if (!target
&& opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
)
3843 && !ie
->speculative
&& ie
->maybe_hot_p ())
3846 n
= try_speculative_devirtualization (ie
->indirect_info
->otr_type
,
3847 ie
->indirect_info
->otr_token
,
3848 ie
->indirect_info
->context
);
3858 if (!possible_polymorphic_call_target_p
3859 (ie
, cgraph_node::get_create (target
)))
3863 target
= ipa_impossible_devirt_target (ie
, target
);
3865 return ipa_make_edge_direct_to_target (ie
, target
, speculative
);
3871 /* Update the param called notes associated with NODE when CS is being inlined,
3872 assuming NODE is (potentially indirectly) inlined into CS->callee.
3873 Moreover, if the callee is discovered to be constant, create a new cgraph
3874 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3875 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3878 update_indirect_edges_after_inlining (struct cgraph_edge
*cs
,
3879 struct cgraph_node
*node
,
3880 vec
<cgraph_edge
*> *new_edges
)
3882 class ipa_edge_args
*top
;
3883 struct cgraph_edge
*ie
, *next_ie
, *new_direct_edge
;
3884 struct cgraph_node
*new_root
;
3885 class ipa_node_params
*new_root_info
, *inlined_node_info
;
3888 ipa_check_create_edge_args ();
3889 top
= ipa_edge_args_sum
->get (cs
);
3890 new_root
= cs
->caller
->inlined_to
3891 ? cs
->caller
->inlined_to
: cs
->caller
;
3892 new_root_info
= ipa_node_params_sum
->get (new_root
);
3893 inlined_node_info
= ipa_node_params_sum
->get (cs
->callee
->function_symbol ());
3895 for (ie
= node
->indirect_calls
; ie
; ie
= next_ie
)
3897 class cgraph_indirect_call_info
*ici
= ie
->indirect_info
;
3898 struct ipa_jump_func
*jfunc
;
3901 next_ie
= ie
->next_callee
;
3903 if (ici
->param_index
== -1)
3906 /* We must check range due to calls with variable number of arguments: */
3907 if (!top
|| ici
->param_index
>= ipa_get_cs_argument_count (top
))
3909 ici
->param_index
= -1;
3913 param_index
= ici
->param_index
;
3914 jfunc
= ipa_get_ith_jump_func (top
, param_index
);
3916 auto_vec
<cgraph_node
*, 4> spec_targets
;
3917 if (ie
->speculative
)
3918 for (cgraph_edge
*direct
= ie
->first_speculative_call_target ();
3920 direct
= direct
->next_speculative_call_target ())
3921 spec_targets
.safe_push (direct
->callee
);
3923 if (!opt_for_fn (node
->decl
, flag_indirect_inlining
))
3924 new_direct_edge
= NULL
;
3925 else if (ici
->polymorphic
)
3927 ipa_polymorphic_call_context ctx
;
3928 ctx
= ipa_context_from_jfunc (new_root_info
, cs
, param_index
, jfunc
);
3929 new_direct_edge
= try_make_edge_direct_virtual_call (ie
, jfunc
, ctx
,
3935 tree target_type
= ipa_get_type (inlined_node_info
, param_index
);
3936 new_direct_edge
= try_make_edge_direct_simple_call (ie
, jfunc
,
3942 /* If speculation was removed, then we need to do nothing. */
3943 if (new_direct_edge
&& new_direct_edge
!= ie
3944 && spec_targets
.contains (new_direct_edge
->callee
))
3946 new_direct_edge
->indirect_inlining_edge
= 1;
3948 if (!new_direct_edge
->speculative
)
3951 else if (new_direct_edge
)
3953 new_direct_edge
->indirect_inlining_edge
= 1;
3956 new_edges
->safe_push (new_direct_edge
);
3959 /* If speculative edge was introduced we still need to update
3960 call info of the indirect edge. */
3961 if (!new_direct_edge
->speculative
)
3964 if (jfunc
->type
== IPA_JF_PASS_THROUGH
3965 && ipa_get_jf_pass_through_operation (jfunc
) == NOP_EXPR
)
3967 if (ici
->agg_contents
3968 && !ipa_get_jf_pass_through_agg_preserved (jfunc
)
3969 && !ici
->polymorphic
)
3970 ici
->param_index
= -1;
3973 ici
->param_index
= ipa_get_jf_pass_through_formal_id (jfunc
);
3974 if (ici
->polymorphic
3975 && !ipa_get_jf_pass_through_type_preserved (jfunc
))
3976 ici
->vptr_changed
= true;
3977 ipa_set_param_used_by_indirect_call (new_root_info
,
3978 ici
->param_index
, true);
3979 if (ici
->polymorphic
)
3980 ipa_set_param_used_by_polymorphic_call (new_root_info
,
3981 ici
->param_index
, true);
3984 else if (jfunc
->type
== IPA_JF_ANCESTOR
)
3986 if (ici
->agg_contents
3987 && !ipa_get_jf_ancestor_agg_preserved (jfunc
)
3988 && !ici
->polymorphic
)
3989 ici
->param_index
= -1;
3992 ici
->param_index
= ipa_get_jf_ancestor_formal_id (jfunc
);
3993 ici
->offset
+= ipa_get_jf_ancestor_offset (jfunc
);
3994 if (ici
->polymorphic
3995 && !ipa_get_jf_ancestor_type_preserved (jfunc
))
3996 ici
->vptr_changed
= true;
3997 ipa_set_param_used_by_indirect_call (new_root_info
,
3998 ici
->param_index
, true);
3999 if (ici
->polymorphic
)
4000 ipa_set_param_used_by_polymorphic_call (new_root_info
,
4001 ici
->param_index
, true);
4005 /* Either we can find a destination for this edge now or never. */
4006 ici
->param_index
= -1;
4012 /* Recursively traverse subtree of NODE (including node) made of inlined
4013 cgraph_edges when CS has been inlined and invoke
4014 update_indirect_edges_after_inlining on all nodes and
4015 update_jump_functions_after_inlining on all non-inlined edges that lead out
4016 of this subtree. Newly discovered indirect edges will be added to
4017 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
4021 propagate_info_to_inlined_callees (struct cgraph_edge
*cs
,
4022 struct cgraph_node
*node
,
4023 vec
<cgraph_edge
*> *new_edges
)
4025 struct cgraph_edge
*e
;
4028 res
= update_indirect_edges_after_inlining (cs
, node
, new_edges
);
4030 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4031 if (!e
->inline_failed
)
4032 res
|= propagate_info_to_inlined_callees (cs
, e
->callee
, new_edges
);
4034 update_jump_functions_after_inlining (cs
, e
);
4035 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4036 update_jump_functions_after_inlining (cs
, e
);
4041 /* Combine two controlled uses counts as done during inlining. */
4044 combine_controlled_uses_counters (int c
, int d
)
4046 if (c
== IPA_UNDESCRIBED_USE
|| d
== IPA_UNDESCRIBED_USE
)
4047 return IPA_UNDESCRIBED_USE
;
4052 /* Propagate number of controlled users from CS->caleee to the new root of the
4053 tree of inlined nodes. */
4056 propagate_controlled_uses (struct cgraph_edge
*cs
)
4058 ipa_edge_args
*args
= ipa_edge_args_sum
->get (cs
);
4061 struct cgraph_node
*new_root
= cs
->caller
->inlined_to
4062 ? cs
->caller
->inlined_to
: cs
->caller
;
4063 ipa_node_params
*new_root_info
= ipa_node_params_sum
->get (new_root
);
4064 ipa_node_params
*old_root_info
= ipa_node_params_sum
->get (cs
->callee
);
4070 count
= MIN (ipa_get_cs_argument_count (args
),
4071 ipa_get_param_count (old_root_info
));
4072 for (i
= 0; i
< count
; i
++)
4074 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
4075 struct ipa_cst_ref_desc
*rdesc
;
4077 if (jf
->type
== IPA_JF_PASS_THROUGH
)
4080 src_idx
= ipa_get_jf_pass_through_formal_id (jf
);
4081 c
= ipa_get_controlled_uses (new_root_info
, src_idx
);
4082 d
= ipa_get_controlled_uses (old_root_info
, i
);
4084 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf
)
4085 == NOP_EXPR
|| c
== IPA_UNDESCRIBED_USE
);
4086 c
= combine_controlled_uses_counters (c
, d
);
4087 ipa_set_controlled_uses (new_root_info
, src_idx
, c
);
4088 if (c
== 0 && new_root_info
->ipcp_orig_node
)
4090 struct cgraph_node
*n
;
4091 struct ipa_ref
*ref
;
4092 tree t
= new_root_info
->known_csts
[src_idx
];
4094 if (t
&& TREE_CODE (t
) == ADDR_EXPR
4095 && TREE_CODE (TREE_OPERAND (t
, 0)) == FUNCTION_DECL
4096 && (n
= cgraph_node::get (TREE_OPERAND (t
, 0)))
4097 && (ref
= new_root
->find_reference (n
, NULL
, 0)))
4100 fprintf (dump_file
, "ipa-prop: Removing cloning-created "
4101 "reference from %s to %s.\n",
4102 new_root
->dump_name (),
4104 ref
->remove_reference ();
4108 else if (jf
->type
== IPA_JF_CONST
4109 && (rdesc
= jfunc_rdesc_usable (jf
)))
4111 int d
= ipa_get_controlled_uses (old_root_info
, i
);
4112 int c
= rdesc
->refcount
;
4113 rdesc
->refcount
= combine_controlled_uses_counters (c
, d
);
4114 if (rdesc
->refcount
== 0)
4116 tree cst
= ipa_get_jf_constant (jf
);
4117 struct cgraph_node
*n
;
4118 gcc_checking_assert (TREE_CODE (cst
) == ADDR_EXPR
4119 && TREE_CODE (TREE_OPERAND (cst
, 0))
4121 n
= cgraph_node::get (TREE_OPERAND (cst
, 0));
4124 struct cgraph_node
*clone
;
4126 ok
= remove_described_reference (n
, rdesc
);
4127 gcc_checking_assert (ok
);
4130 while (clone
->inlined_to
4131 && clone
->ipcp_clone
4132 && clone
!= rdesc
->cs
->caller
)
4134 struct ipa_ref
*ref
;
4135 ref
= clone
->find_reference (n
, NULL
, 0);
4139 fprintf (dump_file
, "ipa-prop: Removing "
4140 "cloning-created reference "
4142 clone
->dump_name (),
4144 ref
->remove_reference ();
4146 clone
= clone
->callers
->caller
;
4153 for (i
= ipa_get_param_count (old_root_info
);
4154 i
< ipa_get_cs_argument_count (args
);
4157 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
4159 if (jf
->type
== IPA_JF_CONST
)
4161 struct ipa_cst_ref_desc
*rdesc
= jfunc_rdesc_usable (jf
);
4163 rdesc
->refcount
= IPA_UNDESCRIBED_USE
;
4165 else if (jf
->type
== IPA_JF_PASS_THROUGH
)
4166 ipa_set_controlled_uses (new_root_info
,
4167 jf
->value
.pass_through
.formal_id
,
4168 IPA_UNDESCRIBED_USE
);
4172 /* Update jump functions and call note functions on inlining the call site CS.
4173 CS is expected to lead to a node already cloned by
4174 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
4175 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
4179 ipa_propagate_indirect_call_infos (struct cgraph_edge
*cs
,
4180 vec
<cgraph_edge
*> *new_edges
)
4183 /* Do nothing if the preparation phase has not been carried out yet
4184 (i.e. during early inlining). */
4185 if (!ipa_node_params_sum
)
4187 gcc_assert (ipa_edge_args_sum
);
4189 propagate_controlled_uses (cs
);
4190 changed
= propagate_info_to_inlined_callees (cs
, cs
->callee
, new_edges
);
4191 ipa_node_params_sum
->remove (cs
->callee
);
4193 ipa_edge_args
*args
= ipa_edge_args_sum
->get (cs
);
4197 if (args
->jump_functions
)
4199 struct ipa_jump_func
*jf
;
4201 FOR_EACH_VEC_ELT (*args
->jump_functions
, i
, jf
)
4202 if (jf
->type
== IPA_JF_CONST
4203 && ipa_get_jf_constant_rdesc (jf
))
4210 ipa_edge_args_sum
->remove (cs
);
4212 if (ipcp_transformation_sum
)
4213 ipcp_transformation_sum
->remove (cs
->callee
);
4218 /* Ensure that array of edge arguments infos is big enough to accommodate a
4219 structure for all edges and reallocates it if not. Also, allocate
4220 associated hash tables is they do not already exist. */
4223 ipa_check_create_edge_args (void)
4225 if (!ipa_edge_args_sum
)
4227 = (new (ggc_alloc_no_dtor
<ipa_edge_args_sum_t
> ())
4228 ipa_edge_args_sum_t (symtab
, true));
4229 if (!ipa_bits_hash_table
)
4230 ipa_bits_hash_table
= hash_table
<ipa_bit_ggc_hash_traits
>::create_ggc (37);
4231 if (!ipa_vr_hash_table
)
4232 ipa_vr_hash_table
= hash_table
<ipa_vr_ggc_hash_traits
>::create_ggc (37);
4235 /* Free all ipa_edge structures. */
4238 ipa_free_all_edge_args (void)
4240 if (!ipa_edge_args_sum
)
4243 ggc_delete (ipa_edge_args_sum
);
4244 ipa_edge_args_sum
= NULL
;
4247 /* Free all ipa_node_params structures. */
4250 ipa_free_all_node_params (void)
4252 if (ipa_node_params_sum
)
4253 ggc_delete (ipa_node_params_sum
);
4254 ipa_node_params_sum
= NULL
;
4257 /* Initialize IPA CP transformation summary and also allocate any necessary hash
4258 tables if they do not already exist. */
4261 ipcp_transformation_initialize (void)
4263 if (!ipa_bits_hash_table
)
4264 ipa_bits_hash_table
= hash_table
<ipa_bit_ggc_hash_traits
>::create_ggc (37);
4265 if (!ipa_vr_hash_table
)
4266 ipa_vr_hash_table
= hash_table
<ipa_vr_ggc_hash_traits
>::create_ggc (37);
4267 if (ipcp_transformation_sum
== NULL
)
4269 ipcp_transformation_sum
= ipcp_transformation_t::create_ggc (symtab
);
4270 ipcp_transformation_sum
->disable_insertion_hook ();
4274 /* Release the IPA CP transformation summary. */
4277 ipcp_free_transformation_sum (void)
4279 if (!ipcp_transformation_sum
)
4282 ipcp_transformation_sum
->~function_summary
<ipcp_transformation
*> ();
4283 ggc_free (ipcp_transformation_sum
);
4284 ipcp_transformation_sum
= NULL
;
4287 /* Set the aggregate replacements of NODE to be AGGVALS. */
4290 ipa_set_node_agg_value_chain (struct cgraph_node
*node
,
4291 struct ipa_agg_replacement_value
*aggvals
)
4293 ipcp_transformation_initialize ();
4294 ipcp_transformation
*s
= ipcp_transformation_sum
->get_create (node
);
4295 s
->agg_values
= aggvals
;
4298 /* Hook that is called by cgraph.c when an edge is removed. Adjust reference
4299 count data structures accordingly. */
4302 ipa_edge_args_sum_t::remove (cgraph_edge
*cs
, ipa_edge_args
*args
)
4304 if (args
->jump_functions
)
4306 struct ipa_jump_func
*jf
;
4308 FOR_EACH_VEC_ELT (*args
->jump_functions
, i
, jf
)
4310 struct ipa_cst_ref_desc
*rdesc
;
4311 try_decrement_rdesc_refcount (jf
);
4312 if (jf
->type
== IPA_JF_CONST
4313 && (rdesc
= ipa_get_jf_constant_rdesc (jf
))
4320 /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
4321 reference count data strucutres accordingly. */
4324 ipa_edge_args_sum_t::duplicate (cgraph_edge
*src
, cgraph_edge
*dst
,
4325 ipa_edge_args
*old_args
, ipa_edge_args
*new_args
)
4329 new_args
->jump_functions
= vec_safe_copy (old_args
->jump_functions
);
4330 if (old_args
->polymorphic_call_contexts
)
4331 new_args
->polymorphic_call_contexts
4332 = vec_safe_copy (old_args
->polymorphic_call_contexts
);
4334 for (i
= 0; i
< vec_safe_length (old_args
->jump_functions
); i
++)
4336 struct ipa_jump_func
*src_jf
= ipa_get_ith_jump_func (old_args
, i
);
4337 struct ipa_jump_func
*dst_jf
= ipa_get_ith_jump_func (new_args
, i
);
4339 dst_jf
->agg
.items
= vec_safe_copy (dst_jf
->agg
.items
);
4341 if (src_jf
->type
== IPA_JF_CONST
)
4343 struct ipa_cst_ref_desc
*src_rdesc
= jfunc_rdesc_usable (src_jf
);
4346 dst_jf
->value
.constant
.rdesc
= NULL
;
4347 else if (src
->caller
== dst
->caller
)
4349 struct ipa_ref
*ref
;
4350 symtab_node
*n
= cgraph_node_for_jfunc (src_jf
);
4351 gcc_checking_assert (n
);
4352 ref
= src
->caller
->find_reference (n
, src
->call_stmt
,
4354 gcc_checking_assert (ref
);
4355 dst
->caller
->clone_reference (ref
, ref
->stmt
);
4357 struct ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
4358 dst_rdesc
->cs
= dst
;
4359 dst_rdesc
->refcount
= src_rdesc
->refcount
;
4360 dst_rdesc
->next_duplicate
= NULL
;
4361 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
4363 else if (src_rdesc
->cs
== src
)
4365 struct ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
4366 dst_rdesc
->cs
= dst
;
4367 dst_rdesc
->refcount
= src_rdesc
->refcount
;
4368 dst_rdesc
->next_duplicate
= src_rdesc
->next_duplicate
;
4369 src_rdesc
->next_duplicate
= dst_rdesc
;
4370 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
4374 struct ipa_cst_ref_desc
*dst_rdesc
;
4375 /* This can happen during inlining, when a JFUNC can refer to a
4376 reference taken in a function up in the tree of inline clones.
4377 We need to find the duplicate that refers to our tree of
4380 gcc_assert (dst
->caller
->inlined_to
);
4381 for (dst_rdesc
= src_rdesc
->next_duplicate
;
4383 dst_rdesc
= dst_rdesc
->next_duplicate
)
4385 struct cgraph_node
*top
;
4386 top
= dst_rdesc
->cs
->caller
->inlined_to
4387 ? dst_rdesc
->cs
->caller
->inlined_to
4388 : dst_rdesc
->cs
->caller
;
4389 if (dst
->caller
->inlined_to
== top
)
4392 gcc_assert (dst_rdesc
);
4393 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
4396 else if (dst_jf
->type
== IPA_JF_PASS_THROUGH
4397 && src
->caller
== dst
->caller
)
4399 struct cgraph_node
*inline_root
= dst
->caller
->inlined_to
4400 ? dst
->caller
->inlined_to
: dst
->caller
;
4401 ipa_node_params
*root_info
= ipa_node_params_sum
->get (inline_root
);
4402 int idx
= ipa_get_jf_pass_through_formal_id (dst_jf
);
4404 int c
= ipa_get_controlled_uses (root_info
, idx
);
4405 if (c
!= IPA_UNDESCRIBED_USE
)
4408 ipa_set_controlled_uses (root_info
, idx
, c
);
4414 /* Analyze newly added function into callgraph. */
4417 ipa_add_new_function (cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
4419 if (node
->has_gimple_body_p ())
4420 ipa_analyze_node (node
);
4423 /* Hook that is called by summary when a node is duplicated. */
4426 ipa_node_params_t::duplicate(cgraph_node
*src
, cgraph_node
*dst
,
4427 ipa_node_params
*old_info
,
4428 ipa_node_params
*new_info
)
4430 ipa_agg_replacement_value
*old_av
, *new_av
;
4432 new_info
->descriptors
= vec_safe_copy (old_info
->descriptors
);
4433 new_info
->lattices
= NULL
;
4434 new_info
->ipcp_orig_node
= old_info
->ipcp_orig_node
;
4435 new_info
->known_csts
= old_info
->known_csts
.copy ();
4436 new_info
->known_contexts
= old_info
->known_contexts
.copy ();
4438 new_info
->analysis_done
= old_info
->analysis_done
;
4439 new_info
->node_enqueued
= old_info
->node_enqueued
;
4440 new_info
->versionable
= old_info
->versionable
;
4442 old_av
= ipa_get_agg_replacements_for_node (src
);
4448 struct ipa_agg_replacement_value
*v
;
4450 v
= ggc_alloc
<ipa_agg_replacement_value
> ();
4451 memcpy (v
, old_av
, sizeof (*v
));
4454 old_av
= old_av
->next
;
4456 ipa_set_node_agg_value_chain (dst
, new_av
);
4460 /* Duplication of ipcp transformation summaries. */
4463 ipcp_transformation_t::duplicate(cgraph_node
*, cgraph_node
*dst
,
4464 ipcp_transformation
*src_trans
,
4465 ipcp_transformation
*dst_trans
)
4467 /* Avoid redundant work of duplicating vectors we will never use. */
4468 if (dst
->inlined_to
)
4470 dst_trans
->bits
= vec_safe_copy (src_trans
->bits
);
4471 dst_trans
->m_vr
= vec_safe_copy (src_trans
->m_vr
);
4472 ipa_agg_replacement_value
*agg
= src_trans
->agg_values
,
4473 **aggptr
= &dst_trans
->agg_values
;
4476 *aggptr
= ggc_alloc
<ipa_agg_replacement_value
> ();
4479 aggptr
= &(*aggptr
)->next
;
4483 /* Register our cgraph hooks if they are not already there. */
4486 ipa_register_cgraph_hooks (void)
4488 ipa_check_create_node_params ();
4489 ipa_check_create_edge_args ();
4491 function_insertion_hook_holder
=
4492 symtab
->add_cgraph_insertion_hook (&ipa_add_new_function
, NULL
);
4495 /* Unregister our cgraph hooks if they are not already there. */
4498 ipa_unregister_cgraph_hooks (void)
4500 if (function_insertion_hook_holder
)
4501 symtab
->remove_cgraph_insertion_hook (function_insertion_hook_holder
);
4502 function_insertion_hook_holder
= NULL
;
4505 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4506 longer needed after ipa-cp. */
4509 ipa_free_all_structures_after_ipa_cp (void)
4511 if (!optimize
&& !in_lto_p
)
4513 ipa_free_all_edge_args ();
4514 ipa_free_all_node_params ();
4515 ipcp_sources_pool
.release ();
4516 ipcp_cst_values_pool
.release ();
4517 ipcp_poly_ctx_values_pool
.release ();
4518 ipcp_agg_lattice_pool
.release ();
4519 ipa_unregister_cgraph_hooks ();
4520 ipa_refdesc_pool
.release ();
4524 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4525 longer needed after indirect inlining. */
4528 ipa_free_all_structures_after_iinln (void)
4530 ipa_free_all_edge_args ();
4531 ipa_free_all_node_params ();
4532 ipa_unregister_cgraph_hooks ();
4533 ipcp_sources_pool
.release ();
4534 ipcp_cst_values_pool
.release ();
4535 ipcp_poly_ctx_values_pool
.release ();
4536 ipcp_agg_lattice_pool
.release ();
4537 ipa_refdesc_pool
.release ();
4540 /* Print ipa_tree_map data structures of all functions in the
4544 ipa_print_node_params (FILE *f
, struct cgraph_node
*node
)
4547 class ipa_node_params
*info
;
4549 if (!node
->definition
)
4551 info
= ipa_node_params_sum
->get (node
);
4552 fprintf (f
, " function %s parameter descriptors:\n", node
->dump_name ());
4555 fprintf (f
, " no params return\n");
4558 count
= ipa_get_param_count (info
);
4559 for (i
= 0; i
< count
; i
++)
4564 ipa_dump_param (f
, info
, i
);
4565 if (ipa_is_param_used (info
, i
))
4566 fprintf (f
, " used");
4567 if (ipa_is_param_used_by_ipa_predicates (info
, i
))
4568 fprintf (f
, " used_by_ipa_predicates");
4569 if (ipa_is_param_used_by_indirect_call (info
, i
))
4570 fprintf (f
, " used_by_indirect_call");
4571 if (ipa_is_param_used_by_polymorphic_call (info
, i
))
4572 fprintf (f
, " used_by_polymorphic_call");
4573 c
= ipa_get_controlled_uses (info
, i
);
4574 if (c
== IPA_UNDESCRIBED_USE
)
4575 fprintf (f
, " undescribed_use");
4577 fprintf (f
, " controlled_uses=%i", c
);
4582 /* Print ipa_tree_map data structures of all functions in the
4586 ipa_print_all_params (FILE * f
)
4588 struct cgraph_node
*node
;
4590 fprintf (f
, "\nFunction parameters:\n");
4591 FOR_EACH_FUNCTION (node
)
4592 ipa_print_node_params (f
, node
);
4595 /* Dump the AV linked list. */
4598 ipa_dump_agg_replacement_values (FILE *f
, struct ipa_agg_replacement_value
*av
)
4601 fprintf (f
, " Aggregate replacements:");
4602 for (; av
; av
= av
->next
)
4604 fprintf (f
, "%s %i[" HOST_WIDE_INT_PRINT_DEC
"]=", comma
? "," : "",
4605 av
->index
, av
->offset
);
4606 print_generic_expr (f
, av
->value
);
4612 /* Stream out jump function JUMP_FUNC to OB. */
4615 ipa_write_jump_function (struct output_block
*ob
,
4616 struct ipa_jump_func
*jump_func
)
4618 struct ipa_agg_jf_item
*item
;
4619 struct bitpack_d bp
;
4623 /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4624 as well as WPA memory by handling them specially. */
4625 if (jump_func
->type
== IPA_JF_CONST
4626 && TREE_CODE (jump_func
->value
.constant
.value
) == ADDR_EXPR
)
4629 streamer_write_uhwi (ob
, jump_func
->type
* 2 + flag
);
4630 switch (jump_func
->type
)
4632 case IPA_JF_UNKNOWN
:
4636 EXPR_LOCATION (jump_func
->value
.constant
.value
) == UNKNOWN_LOCATION
);
4637 stream_write_tree (ob
,
4639 ? TREE_OPERAND (jump_func
->value
.constant
.value
, 0)
4640 : jump_func
->value
.constant
.value
, true);
4642 case IPA_JF_PASS_THROUGH
:
4643 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.operation
);
4644 if (jump_func
->value
.pass_through
.operation
== NOP_EXPR
)
4646 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4647 bp
= bitpack_create (ob
->main_stream
);
4648 bp_pack_value (&bp
, jump_func
->value
.pass_through
.agg_preserved
, 1);
4649 streamer_write_bitpack (&bp
);
4651 else if (TREE_CODE_CLASS (jump_func
->value
.pass_through
.operation
)
4653 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4656 stream_write_tree (ob
, jump_func
->value
.pass_through
.operand
, true);
4657 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4660 case IPA_JF_ANCESTOR
:
4661 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.offset
);
4662 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.formal_id
);
4663 bp
= bitpack_create (ob
->main_stream
);
4664 bp_pack_value (&bp
, jump_func
->value
.ancestor
.agg_preserved
, 1);
4665 streamer_write_bitpack (&bp
);
4668 fatal_error (UNKNOWN_LOCATION
, "invalid jump function in LTO stream");
4671 count
= vec_safe_length (jump_func
->agg
.items
);
4672 streamer_write_uhwi (ob
, count
);
4675 bp
= bitpack_create (ob
->main_stream
);
4676 bp_pack_value (&bp
, jump_func
->agg
.by_ref
, 1);
4677 streamer_write_bitpack (&bp
);
4680 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, i
, item
)
4682 stream_write_tree (ob
, item
->type
, true);
4683 streamer_write_uhwi (ob
, item
->offset
);
4684 streamer_write_uhwi (ob
, item
->jftype
);
4685 switch (item
->jftype
)
4687 case IPA_JF_UNKNOWN
:
4690 stream_write_tree (ob
, item
->value
.constant
, true);
4692 case IPA_JF_PASS_THROUGH
:
4693 case IPA_JF_LOAD_AGG
:
4694 streamer_write_uhwi (ob
, item
->value
.pass_through
.operation
);
4695 streamer_write_uhwi (ob
, item
->value
.pass_through
.formal_id
);
4696 if (TREE_CODE_CLASS (item
->value
.pass_through
.operation
)
4698 stream_write_tree (ob
, item
->value
.pass_through
.operand
, true);
4699 if (item
->jftype
== IPA_JF_LOAD_AGG
)
4701 stream_write_tree (ob
, item
->value
.load_agg
.type
, true);
4702 streamer_write_uhwi (ob
, item
->value
.load_agg
.offset
);
4703 bp
= bitpack_create (ob
->main_stream
);
4704 bp_pack_value (&bp
, item
->value
.load_agg
.by_ref
, 1);
4705 streamer_write_bitpack (&bp
);
4709 fatal_error (UNKNOWN_LOCATION
,
4710 "invalid jump function in LTO stream");
4714 bp
= bitpack_create (ob
->main_stream
);
4715 bp_pack_value (&bp
, !!jump_func
->bits
, 1);
4716 streamer_write_bitpack (&bp
);
4717 if (jump_func
->bits
)
4719 streamer_write_widest_int (ob
, jump_func
->bits
->value
);
4720 streamer_write_widest_int (ob
, jump_func
->bits
->mask
);
4722 bp_pack_value (&bp
, !!jump_func
->m_vr
, 1);
4723 streamer_write_bitpack (&bp
);
4724 if (jump_func
->m_vr
)
4726 streamer_write_enum (ob
->main_stream
, value_rang_type
,
4727 VR_LAST
, jump_func
->m_vr
->kind ());
4728 stream_write_tree (ob
, jump_func
->m_vr
->min (), true);
4729 stream_write_tree (ob
, jump_func
->m_vr
->max (), true);
4733 /* Read in jump function JUMP_FUNC from IB. */
4736 ipa_read_jump_function (class lto_input_block
*ib
,
4737 struct ipa_jump_func
*jump_func
,
4738 struct cgraph_edge
*cs
,
4739 class data_in
*data_in
,
4742 enum jump_func_type jftype
;
4743 enum tree_code operation
;
4745 int val
= streamer_read_uhwi (ib
);
4746 bool flag
= val
& 1;
4748 jftype
= (enum jump_func_type
) (val
/ 2);
4751 case IPA_JF_UNKNOWN
:
4752 ipa_set_jf_unknown (jump_func
);
4756 tree t
= stream_read_tree (ib
, data_in
);
4757 if (flag
&& prevails
)
4758 t
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (t
)), t
);
4759 ipa_set_jf_constant (jump_func
, t
, cs
);
4762 case IPA_JF_PASS_THROUGH
:
4763 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
4764 if (operation
== NOP_EXPR
)
4766 int formal_id
= streamer_read_uhwi (ib
);
4767 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4768 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4769 ipa_set_jf_simple_pass_through (jump_func
, formal_id
, agg_preserved
);
4771 else if (TREE_CODE_CLASS (operation
) == tcc_unary
)
4773 int formal_id
= streamer_read_uhwi (ib
);
4774 ipa_set_jf_unary_pass_through (jump_func
, formal_id
, operation
);
4778 tree operand
= stream_read_tree (ib
, data_in
);
4779 int formal_id
= streamer_read_uhwi (ib
);
4780 ipa_set_jf_arith_pass_through (jump_func
, formal_id
, operand
,
4784 case IPA_JF_ANCESTOR
:
4786 HOST_WIDE_INT offset
= streamer_read_uhwi (ib
);
4787 int formal_id
= streamer_read_uhwi (ib
);
4788 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4789 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4790 ipa_set_ancestor_jf (jump_func
, offset
, formal_id
, agg_preserved
);
4794 fatal_error (UNKNOWN_LOCATION
, "invalid jump function in LTO stream");
4797 count
= streamer_read_uhwi (ib
);
4800 jump_func
->agg
.items
= NULL
;
4801 vec_safe_reserve (jump_func
->agg
.items
, count
, true);
4805 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4806 jump_func
->agg
.by_ref
= bp_unpack_value (&bp
, 1);
4808 for (i
= 0; i
< count
; i
++)
4810 struct ipa_agg_jf_item item
;
4811 item
.type
= stream_read_tree (ib
, data_in
);
4812 item
.offset
= streamer_read_uhwi (ib
);
4813 item
.jftype
= (enum jump_func_type
) streamer_read_uhwi (ib
);
4815 switch (item
.jftype
)
4817 case IPA_JF_UNKNOWN
:
4820 item
.value
.constant
= stream_read_tree (ib
, data_in
);
4822 case IPA_JF_PASS_THROUGH
:
4823 case IPA_JF_LOAD_AGG
:
4824 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
4825 item
.value
.pass_through
.operation
= operation
;
4826 item
.value
.pass_through
.formal_id
= streamer_read_uhwi (ib
);
4827 if (TREE_CODE_CLASS (operation
) == tcc_unary
)
4828 item
.value
.pass_through
.operand
= NULL_TREE
;
4830 item
.value
.pass_through
.operand
= stream_read_tree (ib
, data_in
);
4831 if (item
.jftype
== IPA_JF_LOAD_AGG
)
4833 struct bitpack_d bp
;
4834 item
.value
.load_agg
.type
= stream_read_tree (ib
, data_in
);
4835 item
.value
.load_agg
.offset
= streamer_read_uhwi (ib
);
4836 bp
= streamer_read_bitpack (ib
);
4837 item
.value
.load_agg
.by_ref
= bp_unpack_value (&bp
, 1);
4841 fatal_error (UNKNOWN_LOCATION
,
4842 "invalid jump function in LTO stream");
4845 jump_func
->agg
.items
->quick_push (item
);
4848 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4849 bool bits_known
= bp_unpack_value (&bp
, 1);
4852 widest_int value
= streamer_read_widest_int (ib
);
4853 widest_int mask
= streamer_read_widest_int (ib
);
4855 ipa_set_jfunc_bits (jump_func
, value
, mask
);
4858 jump_func
->bits
= NULL
;
4860 struct bitpack_d vr_bp
= streamer_read_bitpack (ib
);
4861 bool vr_known
= bp_unpack_value (&vr_bp
, 1);
4864 enum value_range_kind type
= streamer_read_enum (ib
, value_range_kind
,
4866 tree min
= stream_read_tree (ib
, data_in
);
4867 tree max
= stream_read_tree (ib
, data_in
);
4869 ipa_set_jfunc_vr (jump_func
, type
, min
, max
);
4872 jump_func
->m_vr
= NULL
;
4875 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4876 relevant to indirect inlining to OB. */
4879 ipa_write_indirect_edge_info (struct output_block
*ob
,
4880 struct cgraph_edge
*cs
)
4882 class cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4883 struct bitpack_d bp
;
4885 streamer_write_hwi (ob
, ii
->param_index
);
4886 bp
= bitpack_create (ob
->main_stream
);
4887 bp_pack_value (&bp
, ii
->polymorphic
, 1);
4888 bp_pack_value (&bp
, ii
->agg_contents
, 1);
4889 bp_pack_value (&bp
, ii
->member_ptr
, 1);
4890 bp_pack_value (&bp
, ii
->by_ref
, 1);
4891 bp_pack_value (&bp
, ii
->guaranteed_unmodified
, 1);
4892 bp_pack_value (&bp
, ii
->vptr_changed
, 1);
4893 streamer_write_bitpack (&bp
);
4894 if (ii
->agg_contents
|| ii
->polymorphic
)
4895 streamer_write_hwi (ob
, ii
->offset
);
4897 gcc_assert (ii
->offset
== 0);
4899 if (ii
->polymorphic
)
4901 streamer_write_hwi (ob
, ii
->otr_token
);
4902 stream_write_tree (ob
, ii
->otr_type
, true);
4903 ii
->context
.stream_out (ob
);
4907 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4908 relevant to indirect inlining from IB. */
4911 ipa_read_indirect_edge_info (class lto_input_block
*ib
,
4912 class data_in
*data_in
,
4913 struct cgraph_edge
*cs
,
4914 class ipa_node_params
*info
)
4916 class cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4917 struct bitpack_d bp
;
4919 ii
->param_index
= (int) streamer_read_hwi (ib
);
4920 bp
= streamer_read_bitpack (ib
);
4921 ii
->polymorphic
= bp_unpack_value (&bp
, 1);
4922 ii
->agg_contents
= bp_unpack_value (&bp
, 1);
4923 ii
->member_ptr
= bp_unpack_value (&bp
, 1);
4924 ii
->by_ref
= bp_unpack_value (&bp
, 1);
4925 ii
->guaranteed_unmodified
= bp_unpack_value (&bp
, 1);
4926 ii
->vptr_changed
= bp_unpack_value (&bp
, 1);
4927 if (ii
->agg_contents
|| ii
->polymorphic
)
4928 ii
->offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4931 if (ii
->polymorphic
)
4933 ii
->otr_token
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4934 ii
->otr_type
= stream_read_tree (ib
, data_in
);
4935 ii
->context
.stream_in (ib
, data_in
);
4937 if (info
&& ii
->param_index
>= 0)
4939 if (ii
->polymorphic
)
4940 ipa_set_param_used_by_polymorphic_call (info
,
4941 ii
->param_index
, true);
4942 ipa_set_param_used_by_indirect_call (info
,
4943 ii
->param_index
, true);
4947 /* Stream out NODE info to OB. */
4950 ipa_write_node_info (struct output_block
*ob
, struct cgraph_node
*node
)
4953 lto_symtab_encoder_t encoder
;
4954 ipa_node_params
*info
= ipa_node_params_sum
->get (node
);
4956 struct cgraph_edge
*e
;
4957 struct bitpack_d bp
;
4959 encoder
= ob
->decl_state
->symtab_node_encoder
;
4960 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4961 streamer_write_uhwi (ob
, node_ref
);
4963 streamer_write_uhwi (ob
, ipa_get_param_count (info
));
4964 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4965 streamer_write_uhwi (ob
, ipa_get_param_move_cost (info
, j
));
4966 bp
= bitpack_create (ob
->main_stream
);
4967 gcc_assert (info
->analysis_done
4968 || ipa_get_param_count (info
) == 0);
4969 gcc_assert (!info
->node_enqueued
);
4970 gcc_assert (!info
->ipcp_orig_node
);
4971 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4972 bp_pack_value (&bp
, ipa_is_param_used (info
, j
), 1);
4973 streamer_write_bitpack (&bp
);
4974 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4976 streamer_write_hwi (ob
, ipa_get_controlled_uses (info
, j
));
4977 stream_write_tree (ob
, ipa_get_type (info
, j
), true);
4979 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4981 ipa_edge_args
*args
= ipa_edge_args_sum
->get (e
);
4985 streamer_write_uhwi (ob
, 0);
4989 streamer_write_uhwi (ob
,
4990 ipa_get_cs_argument_count (args
) * 2
4991 + (args
->polymorphic_call_contexts
!= NULL
));
4992 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4994 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4995 if (args
->polymorphic_call_contexts
!= NULL
)
4996 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
4999 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
5001 ipa_edge_args
*args
= ipa_edge_args_sum
->get (e
);
5003 streamer_write_uhwi (ob
, 0);
5006 streamer_write_uhwi (ob
,
5007 ipa_get_cs_argument_count (args
) * 2
5008 + (args
->polymorphic_call_contexts
!= NULL
));
5009 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
5011 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
5012 if (args
->polymorphic_call_contexts
!= NULL
)
5013 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
5016 ipa_write_indirect_edge_info (ob
, e
);
5020 /* Stream in edge E from IB. */
5023 ipa_read_edge_info (class lto_input_block
*ib
,
5024 class data_in
*data_in
,
5025 struct cgraph_edge
*e
, bool prevails
)
5027 int count
= streamer_read_uhwi (ib
);
5028 bool contexts_computed
= count
& 1;
5034 && (e
->possibly_call_in_translation_unit_p ()
5035 /* Also stream in jump functions to builtins in hope that they
5036 will get fnspecs. */
5037 || fndecl_built_in_p (e
->callee
->decl
, BUILT_IN_NORMAL
)))
5039 ipa_edge_args
*args
= ipa_edge_args_sum
->get_create (e
);
5040 vec_safe_grow_cleared (args
->jump_functions
, count
, true);
5041 if (contexts_computed
)
5042 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
, true);
5043 for (int k
= 0; k
< count
; k
++)
5045 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
5047 if (contexts_computed
)
5048 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in
5054 for (int k
= 0; k
< count
; k
++)
5056 struct ipa_jump_func dummy
;
5057 ipa_read_jump_function (ib
, &dummy
, e
,
5059 if (contexts_computed
)
5061 class ipa_polymorphic_call_context ctx
;
5062 ctx
.stream_in (ib
, data_in
);
5068 /* Stream in NODE info from IB. */
5071 ipa_read_node_info (class lto_input_block
*ib
, struct cgraph_node
*node
,
5072 class data_in
*data_in
)
5075 struct cgraph_edge
*e
;
5076 struct bitpack_d bp
;
5077 bool prevails
= node
->prevailing_p ();
5078 ipa_node_params
*info
5079 = prevails
? ipa_node_params_sum
->get_create (node
) : NULL
;
5081 int param_count
= streamer_read_uhwi (ib
);
5084 ipa_alloc_node_params (node
, param_count
);
5085 for (k
= 0; k
< param_count
; k
++)
5086 (*info
->descriptors
)[k
].move_cost
= streamer_read_uhwi (ib
);
5087 if (ipa_get_param_count (info
) != 0)
5088 info
->analysis_done
= true;
5089 info
->node_enqueued
= false;
5092 for (k
= 0; k
< param_count
; k
++)
5093 streamer_read_uhwi (ib
);
5095 bp
= streamer_read_bitpack (ib
);
5096 for (k
= 0; k
< param_count
; k
++)
5098 bool used
= bp_unpack_value (&bp
, 1);
5101 ipa_set_param_used (info
, k
, used
);
5103 for (k
= 0; k
< param_count
; k
++)
5105 int nuses
= streamer_read_hwi (ib
);
5106 tree type
= stream_read_tree (ib
, data_in
);
5110 ipa_set_controlled_uses (info
, k
, nuses
);
5111 (*info
->descriptors
)[k
].decl_or_type
= type
;
5114 for (e
= node
->callees
; e
; e
= e
->next_callee
)
5115 ipa_read_edge_info (ib
, data_in
, e
, prevails
);
5116 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
5118 ipa_read_edge_info (ib
, data_in
, e
, prevails
);
5119 ipa_read_indirect_edge_info (ib
, data_in
, e
, info
);
5123 /* Write jump functions for nodes in SET. */
5126 ipa_prop_write_jump_functions (void)
5128 struct output_block
*ob
;
5129 unsigned int count
= 0;
5130 lto_symtab_encoder_iterator lsei
;
5131 lto_symtab_encoder_t encoder
;
5133 if (!ipa_node_params_sum
|| !ipa_edge_args_sum
)
5136 ob
= create_output_block (LTO_section_jump_functions
);
5137 encoder
= ob
->decl_state
->symtab_node_encoder
;
5139 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5140 lsei_next_function_in_partition (&lsei
))
5142 cgraph_node
*node
= lsei_cgraph_node (lsei
);
5143 if (node
->has_gimple_body_p ()
5144 && ipa_node_params_sum
->get (node
) != NULL
)
5148 streamer_write_uhwi (ob
, count
);
5150 /* Process all of the functions. */
5151 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5152 lsei_next_function_in_partition (&lsei
))
5154 cgraph_node
*node
= lsei_cgraph_node (lsei
);
5155 if (node
->has_gimple_body_p ()
5156 && ipa_node_params_sum
->get (node
) != NULL
)
5157 ipa_write_node_info (ob
, node
);
5159 streamer_write_char_stream (ob
->main_stream
, 0);
5160 produce_asm (ob
, NULL
);
5161 destroy_output_block (ob
);
5164 /* Read section in file FILE_DATA of length LEN with data DATA. */
5167 ipa_prop_read_section (struct lto_file_decl_data
*file_data
, const char *data
,
5170 const struct lto_function_header
*header
=
5171 (const struct lto_function_header
*) data
;
5172 const int cfg_offset
= sizeof (struct lto_function_header
);
5173 const int main_offset
= cfg_offset
+ header
->cfg_size
;
5174 const int string_offset
= main_offset
+ header
->main_size
;
5175 class data_in
*data_in
;
5179 lto_input_block
ib_main ((const char *) data
+ main_offset
,
5180 header
->main_size
, file_data
->mode_table
);
5183 lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
5184 header
->string_size
, vNULL
);
5185 count
= streamer_read_uhwi (&ib_main
);
5187 for (i
= 0; i
< count
; i
++)
5190 struct cgraph_node
*node
;
5191 lto_symtab_encoder_t encoder
;
5193 index
= streamer_read_uhwi (&ib_main
);
5194 encoder
= file_data
->symtab_node_encoder
;
5195 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
5197 gcc_assert (node
->definition
);
5198 ipa_read_node_info (&ib_main
, node
, data_in
);
5200 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
5202 lto_data_in_delete (data_in
);
5205 /* Read ipcp jump functions. */
5208 ipa_prop_read_jump_functions (void)
5210 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
5211 struct lto_file_decl_data
*file_data
;
5214 ipa_check_create_node_params ();
5215 ipa_check_create_edge_args ();
5216 ipa_register_cgraph_hooks ();
5218 while ((file_data
= file_data_vec
[j
++]))
5222 = lto_get_summary_section_data (file_data
, LTO_section_jump_functions
,
5225 ipa_prop_read_section (file_data
, data
, len
);
5230 write_ipcp_transformation_info (output_block
*ob
, cgraph_node
*node
)
5233 unsigned int count
= 0;
5234 lto_symtab_encoder_t encoder
;
5235 struct ipa_agg_replacement_value
*aggvals
, *av
;
5237 aggvals
= ipa_get_agg_replacements_for_node (node
);
5238 encoder
= ob
->decl_state
->symtab_node_encoder
;
5239 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
5240 streamer_write_uhwi (ob
, node_ref
);
5242 for (av
= aggvals
; av
; av
= av
->next
)
5244 streamer_write_uhwi (ob
, count
);
5246 for (av
= aggvals
; av
; av
= av
->next
)
5248 struct bitpack_d bp
;
5250 streamer_write_uhwi (ob
, av
->offset
);
5251 streamer_write_uhwi (ob
, av
->index
);
5252 stream_write_tree (ob
, av
->value
, true);
5254 bp
= bitpack_create (ob
->main_stream
);
5255 bp_pack_value (&bp
, av
->by_ref
, 1);
5256 streamer_write_bitpack (&bp
);
5259 ipcp_transformation
*ts
= ipcp_get_transformation_summary (node
);
5260 if (ts
&& vec_safe_length (ts
->m_vr
) > 0)
5262 count
= ts
->m_vr
->length ();
5263 streamer_write_uhwi (ob
, count
);
5264 for (unsigned i
= 0; i
< count
; ++i
)
5266 struct bitpack_d bp
;
5267 ipa_vr
*parm_vr
= &(*ts
->m_vr
)[i
];
5268 bp
= bitpack_create (ob
->main_stream
);
5269 bp_pack_value (&bp
, parm_vr
->known
, 1);
5270 streamer_write_bitpack (&bp
);
5273 streamer_write_enum (ob
->main_stream
, value_rang_type
,
5274 VR_LAST
, parm_vr
->type
);
5275 streamer_write_wide_int (ob
, parm_vr
->min
);
5276 streamer_write_wide_int (ob
, parm_vr
->max
);
5281 streamer_write_uhwi (ob
, 0);
5283 if (ts
&& vec_safe_length (ts
->bits
) > 0)
5285 count
= ts
->bits
->length ();
5286 streamer_write_uhwi (ob
, count
);
5288 for (unsigned i
= 0; i
< count
; ++i
)
5290 const ipa_bits
*bits_jfunc
= (*ts
->bits
)[i
];
5291 struct bitpack_d bp
= bitpack_create (ob
->main_stream
);
5292 bp_pack_value (&bp
, !!bits_jfunc
, 1);
5293 streamer_write_bitpack (&bp
);
5296 streamer_write_widest_int (ob
, bits_jfunc
->value
);
5297 streamer_write_widest_int (ob
, bits_jfunc
->mask
);
5302 streamer_write_uhwi (ob
, 0);
5305 /* Stream in the aggregate value replacement chain for NODE from IB. */
5308 read_ipcp_transformation_info (lto_input_block
*ib
, cgraph_node
*node
,
5311 struct ipa_agg_replacement_value
*aggvals
= NULL
;
5312 unsigned int count
, i
;
5314 count
= streamer_read_uhwi (ib
);
5315 for (i
= 0; i
<count
; i
++)
5317 struct ipa_agg_replacement_value
*av
;
5318 struct bitpack_d bp
;
5320 av
= ggc_alloc
<ipa_agg_replacement_value
> ();
5321 av
->offset
= streamer_read_uhwi (ib
);
5322 av
->index
= streamer_read_uhwi (ib
);
5323 av
->value
= stream_read_tree (ib
, data_in
);
5324 bp
= streamer_read_bitpack (ib
);
5325 av
->by_ref
= bp_unpack_value (&bp
, 1);
5329 ipa_set_node_agg_value_chain (node
, aggvals
);
5331 count
= streamer_read_uhwi (ib
);
5334 ipcp_transformation_initialize ();
5335 ipcp_transformation
*ts
= ipcp_transformation_sum
->get_create (node
);
5336 vec_safe_grow_cleared (ts
->m_vr
, count
, true);
5337 for (i
= 0; i
< count
; i
++)
5340 parm_vr
= &(*ts
->m_vr
)[i
];
5341 struct bitpack_d bp
;
5342 bp
= streamer_read_bitpack (ib
);
5343 parm_vr
->known
= bp_unpack_value (&bp
, 1);
5346 parm_vr
->type
= streamer_read_enum (ib
, value_range_kind
,
5348 parm_vr
->min
= streamer_read_wide_int (ib
);
5349 parm_vr
->max
= streamer_read_wide_int (ib
);
5353 count
= streamer_read_uhwi (ib
);
5356 ipcp_transformation_initialize ();
5357 ipcp_transformation
*ts
= ipcp_transformation_sum
->get_create (node
);
5358 vec_safe_grow_cleared (ts
->bits
, count
, true);
5360 for (i
= 0; i
< count
; i
++)
5362 struct bitpack_d bp
= streamer_read_bitpack (ib
);
5363 bool known
= bp_unpack_value (&bp
, 1);
5366 const widest_int value
= streamer_read_widest_int (ib
);
5367 const widest_int mask
= streamer_read_widest_int (ib
);
5369 = ipa_get_ipa_bits_for_value (value
, mask
);
5370 (*ts
->bits
)[i
] = bits
;
5376 /* Write all aggregate replacement for nodes in set. */
5379 ipcp_write_transformation_summaries (void)
5381 struct cgraph_node
*node
;
5382 struct output_block
*ob
;
5383 unsigned int count
= 0;
5384 lto_symtab_encoder_iterator lsei
;
5385 lto_symtab_encoder_t encoder
;
5387 ob
= create_output_block (LTO_section_ipcp_transform
);
5388 encoder
= ob
->decl_state
->symtab_node_encoder
;
5390 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5391 lsei_next_function_in_partition (&lsei
))
5393 node
= lsei_cgraph_node (lsei
);
5394 if (node
->has_gimple_body_p ())
5398 streamer_write_uhwi (ob
, count
);
5400 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5401 lsei_next_function_in_partition (&lsei
))
5403 node
= lsei_cgraph_node (lsei
);
5404 if (node
->has_gimple_body_p ())
5405 write_ipcp_transformation_info (ob
, node
);
5407 streamer_write_char_stream (ob
->main_stream
, 0);
5408 produce_asm (ob
, NULL
);
5409 destroy_output_block (ob
);
5412 /* Read replacements section in file FILE_DATA of length LEN with data
5416 read_replacements_section (struct lto_file_decl_data
*file_data
,
5420 const struct lto_function_header
*header
=
5421 (const struct lto_function_header
*) data
;
5422 const int cfg_offset
= sizeof (struct lto_function_header
);
5423 const int main_offset
= cfg_offset
+ header
->cfg_size
;
5424 const int string_offset
= main_offset
+ header
->main_size
;
5425 class data_in
*data_in
;
5429 lto_input_block
ib_main ((const char *) data
+ main_offset
,
5430 header
->main_size
, file_data
->mode_table
);
5432 data_in
= lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
5433 header
->string_size
, vNULL
);
5434 count
= streamer_read_uhwi (&ib_main
);
5436 for (i
= 0; i
< count
; i
++)
5439 struct cgraph_node
*node
;
5440 lto_symtab_encoder_t encoder
;
5442 index
= streamer_read_uhwi (&ib_main
);
5443 encoder
= file_data
->symtab_node_encoder
;
5444 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
5446 gcc_assert (node
->definition
);
5447 read_ipcp_transformation_info (&ib_main
, node
, data_in
);
5449 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
5451 lto_data_in_delete (data_in
);
5454 /* Read IPA-CP aggregate replacements. */
5457 ipcp_read_transformation_summaries (void)
5459 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
5460 struct lto_file_decl_data
*file_data
;
5463 while ((file_data
= file_data_vec
[j
++]))
5467 = lto_get_summary_section_data (file_data
, LTO_section_ipcp_transform
,
5470 read_replacements_section (file_data
, data
, len
);
5474 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5478 adjust_agg_replacement_values (struct cgraph_node
*node
,
5479 struct ipa_agg_replacement_value
*aggval
)
5481 struct ipa_agg_replacement_value
*v
;
5482 clone_info
*cinfo
= clone_info::get (node
);
5484 if (!cinfo
|| !cinfo
->param_adjustments
)
5487 auto_vec
<int, 16> new_indices
;
5488 cinfo
->param_adjustments
->get_updated_indices (&new_indices
);
5489 for (v
= aggval
; v
; v
= v
->next
)
5491 gcc_checking_assert (v
->index
>= 0);
5493 if ((unsigned) v
->index
< new_indices
.length ())
5494 v
->index
= new_indices
[v
->index
];
5496 /* This can happen if we know about a constant passed by reference by
5497 an argument which is never actually used for anything, let alone
5498 loading that constant. */
5503 /* Dominator walker driving the ipcp modification phase. */
5505 class ipcp_modif_dom_walker
: public dom_walker
5508 ipcp_modif_dom_walker (struct ipa_func_body_info
*fbi
,
5509 vec
<ipa_param_descriptor
, va_gc
> *descs
,
5510 struct ipa_agg_replacement_value
*av
,
5512 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
), m_descriptors (descs
),
5513 m_aggval (av
), m_something_changed (sc
) {}
5515 virtual edge
before_dom_children (basic_block
);
5517 { return gimple_purge_all_dead_eh_edges (m_need_eh_cleanup
); }
5520 struct ipa_func_body_info
*m_fbi
;
5521 vec
<ipa_param_descriptor
, va_gc
> *m_descriptors
;
5522 struct ipa_agg_replacement_value
*m_aggval
;
5523 bool *m_something_changed
;
5524 auto_bitmap m_need_eh_cleanup
;
5528 ipcp_modif_dom_walker::before_dom_children (basic_block bb
)
5530 gimple_stmt_iterator gsi
;
5531 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5533 struct ipa_agg_replacement_value
*v
;
5534 gimple
*stmt
= gsi_stmt (gsi
);
5536 HOST_WIDE_INT offset
;
5541 if (!gimple_assign_load_p (stmt
))
5543 rhs
= gimple_assign_rhs1 (stmt
);
5544 if (!is_gimple_reg_type (TREE_TYPE (rhs
)))
5549 while (handled_component_p (t
))
5551 /* V_C_E can do things like convert an array of integers to one
5552 bigger integer and similar things we do not handle below. */
5553 if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
5558 t
= TREE_OPERAND (t
, 0);
5563 if (!ipa_load_from_parm_agg (m_fbi
, m_descriptors
, stmt
, rhs
, &index
,
5564 &offset
, &size
, &by_ref
))
5566 for (v
= m_aggval
; v
; v
= v
->next
)
5567 if (v
->index
== index
5568 && v
->offset
== offset
)
5571 || v
->by_ref
!= by_ref
5572 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v
->value
))),
5576 gcc_checking_assert (is_gimple_ip_invariant (v
->value
));
5577 if (!useless_type_conversion_p (TREE_TYPE (rhs
), TREE_TYPE (v
->value
)))
5579 if (fold_convertible_p (TREE_TYPE (rhs
), v
->value
))
5580 val
= fold_build1 (NOP_EXPR
, TREE_TYPE (rhs
), v
->value
);
5581 else if (TYPE_SIZE (TREE_TYPE (rhs
))
5582 == TYPE_SIZE (TREE_TYPE (v
->value
)))
5583 val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (rhs
), v
->value
);
5588 fprintf (dump_file
, " const ");
5589 print_generic_expr (dump_file
, v
->value
);
5590 fprintf (dump_file
, " can't be converted to type of ");
5591 print_generic_expr (dump_file
, rhs
);
5592 fprintf (dump_file
, "\n");
5600 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5602 fprintf (dump_file
, "Modifying stmt:\n ");
5603 print_gimple_stmt (dump_file
, stmt
, 0);
5605 gimple_assign_set_rhs_from_tree (&gsi
, val
);
5608 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5610 fprintf (dump_file
, "into:\n ");
5611 print_gimple_stmt (dump_file
, stmt
, 0);
5612 fprintf (dump_file
, "\n");
5615 *m_something_changed
= true;
5616 if (maybe_clean_eh_stmt (stmt
))
5617 bitmap_set_bit (m_need_eh_cleanup
, bb
->index
);
5622 /* Return true if we have recorded VALUE and MASK about PARM.
5623 Set VALUE and MASk accordingly. */
5626 ipcp_get_parm_bits (tree parm
, tree
*value
, widest_int
*mask
)
5628 cgraph_node
*cnode
= cgraph_node::get (current_function_decl
);
5629 ipcp_transformation
*ts
= ipcp_get_transformation_summary (cnode
);
5630 if (!ts
|| vec_safe_length (ts
->bits
) == 0)
5634 for (tree p
= DECL_ARGUMENTS (current_function_decl
);
5635 p
!= parm
; p
= DECL_CHAIN (p
))
5638 /* Ignore static chain. */
5643 clone_info
*cinfo
= clone_info::get (cnode
);
5644 if (cinfo
&& cinfo
->param_adjustments
)
5646 i
= cinfo
->param_adjustments
->get_original_index (i
);
5651 vec
<ipa_bits
*, va_gc
> &bits
= *ts
->bits
;
5654 *mask
= bits
[i
]->mask
;
5655 *value
= wide_int_to_tree (TREE_TYPE (parm
), bits
[i
]->value
);
5660 /* Update bits info of formal parameters as described in
5661 ipcp_transformation. */
5664 ipcp_update_bits (struct cgraph_node
*node
)
5666 ipcp_transformation
*ts
= ipcp_get_transformation_summary (node
);
5668 if (!ts
|| vec_safe_length (ts
->bits
) == 0)
5670 vec
<ipa_bits
*, va_gc
> &bits
= *ts
->bits
;
5671 unsigned count
= bits
.length ();
5675 auto_vec
<int, 16> new_indices
;
5676 bool need_remapping
= false;
5677 clone_info
*cinfo
= clone_info::get (node
);
5678 if (cinfo
&& cinfo
->param_adjustments
)
5680 cinfo
->param_adjustments
->get_updated_indices (&new_indices
);
5681 need_remapping
= true;
5683 auto_vec
<tree
, 16> parm_decls
;
5684 push_function_arg_decls (&parm_decls
, node
->decl
);
5686 for (unsigned i
= 0; i
< count
; ++i
)
5691 if (i
>= new_indices
.length ())
5693 int idx
= new_indices
[i
];
5696 parm
= parm_decls
[idx
];
5699 parm
= parm_decls
[i
];
5700 gcc_checking_assert (parm
);
5704 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm
))
5705 || POINTER_TYPE_P (TREE_TYPE (parm
)))
5706 || !is_gimple_reg (parm
))
5709 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
), parm
);
5715 fprintf (dump_file
, "Adjusting mask for param %u to ", i
);
5716 print_hex (bits
[i
]->mask
, dump_file
);
5717 fprintf (dump_file
, "\n");
5720 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef
)))
5722 unsigned prec
= TYPE_PRECISION (TREE_TYPE (ddef
));
5723 signop sgn
= TYPE_SIGN (TREE_TYPE (ddef
));
5725 wide_int nonzero_bits
= wide_int::from (bits
[i
]->mask
, prec
, UNSIGNED
)
5726 | wide_int::from (bits
[i
]->value
, prec
, sgn
);
5727 set_nonzero_bits (ddef
, nonzero_bits
);
5731 unsigned tem
= bits
[i
]->mask
.to_uhwi ();
5732 unsigned HOST_WIDE_INT bitpos
= bits
[i
]->value
.to_uhwi ();
5733 unsigned align
= tem
& -tem
;
5734 unsigned misalign
= bitpos
& (align
- 1);
5739 fprintf (dump_file
, "Adjusting align: %u, misalign: %u\n", align
, misalign
);
5741 unsigned old_align
, old_misalign
;
5742 struct ptr_info_def
*pi
= get_ptr_info (ddef
);
5743 bool old_known
= get_ptr_info_alignment (pi
, &old_align
, &old_misalign
);
5746 && old_align
> align
)
5750 fprintf (dump_file
, "But alignment was already %u.\n", old_align
);
5751 if ((old_misalign
& (align
- 1)) != misalign
)
5752 fprintf (dump_file
, "old_misalign (%u) and misalign (%u) mismatch\n",
5753 old_misalign
, misalign
);
5759 && ((misalign
& (old_align
- 1)) != old_misalign
)
5761 fprintf (dump_file
, "old_misalign (%u) and misalign (%u) mismatch\n",
5762 old_misalign
, misalign
);
5764 set_ptr_info_alignment (pi
, align
, misalign
);
5771 ipa_vr::nonzero_p (tree expr_type
) const
5773 if (type
== VR_ANTI_RANGE
&& wi::eq_p (min
, 0) && wi::eq_p (max
, 0))
5776 unsigned prec
= TYPE_PRECISION (expr_type
);
5777 return (type
== VR_RANGE
5778 && TYPE_UNSIGNED (expr_type
)
5779 && wi::eq_p (min
, wi::one (prec
))
5780 && wi::eq_p (max
, wi::max_value (prec
, TYPE_SIGN (expr_type
))));
5783 /* Update value range of formal parameters as described in
5784 ipcp_transformation. */
5787 ipcp_update_vr (struct cgraph_node
*node
)
5789 ipcp_transformation
*ts
= ipcp_get_transformation_summary (node
);
5790 if (!ts
|| vec_safe_length (ts
->m_vr
) == 0)
5792 const vec
<ipa_vr
, va_gc
> &vr
= *ts
->m_vr
;
5793 unsigned count
= vr
.length ();
5797 auto_vec
<int, 16> new_indices
;
5798 bool need_remapping
= false;
5799 clone_info
*cinfo
= clone_info::get (node
);
5800 if (cinfo
&& cinfo
->param_adjustments
)
5802 cinfo
->param_adjustments
->get_updated_indices (&new_indices
);
5803 need_remapping
= true;
5805 auto_vec
<tree
, 16> parm_decls
;
5806 push_function_arg_decls (&parm_decls
, node
->decl
);
5808 for (unsigned i
= 0; i
< count
; ++i
)
5814 if (i
>= new_indices
.length ())
5816 remapped_idx
= new_indices
[i
];
5817 if (remapped_idx
< 0)
5823 parm
= parm_decls
[remapped_idx
];
5825 gcc_checking_assert (parm
);
5826 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
), parm
);
5828 if (!ddef
|| !is_gimple_reg (parm
))
5832 && (vr
[i
].type
== VR_RANGE
|| vr
[i
].type
== VR_ANTI_RANGE
))
5834 tree type
= TREE_TYPE (ddef
);
5835 unsigned prec
= TYPE_PRECISION (type
);
5836 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef
)))
5840 fprintf (dump_file
, "Setting value range of param %u "
5841 "(now %i) ", i
, remapped_idx
);
5842 fprintf (dump_file
, "%s[",
5843 (vr
[i
].type
== VR_ANTI_RANGE
) ? "~" : "");
5844 print_decs (vr
[i
].min
, dump_file
);
5845 fprintf (dump_file
, ", ");
5846 print_decs (vr
[i
].max
, dump_file
);
5847 fprintf (dump_file
, "]\n");
5849 set_range_info (ddef
, vr
[i
].type
,
5850 wide_int_storage::from (vr
[i
].min
, prec
,
5852 wide_int_storage::from (vr
[i
].max
, prec
,
5855 else if (POINTER_TYPE_P (TREE_TYPE (ddef
))
5856 && vr
[i
].nonzero_p (TREE_TYPE (ddef
)))
5859 fprintf (dump_file
, "Setting nonnull for %u\n", i
);
5860 set_ptr_nonnull (ddef
);
5866 /* IPCP transformation phase doing propagation of aggregate values. */
5869 ipcp_transform_function (struct cgraph_node
*node
)
5871 vec
<ipa_param_descriptor
, va_gc
> *descriptors
= NULL
;
5872 struct ipa_func_body_info fbi
;
5873 struct ipa_agg_replacement_value
*aggval
;
5875 bool something_changed
= false;
5877 gcc_checking_assert (cfun
);
5878 gcc_checking_assert (current_function_decl
);
5881 fprintf (dump_file
, "Modification phase of node %s\n",
5882 node
->dump_name ());
5884 ipcp_update_bits (node
);
5885 ipcp_update_vr (node
);
5886 aggval
= ipa_get_agg_replacements_for_node (node
);
5889 param_count
= count_formal_params (node
->decl
);
5890 if (param_count
== 0)
5892 adjust_agg_replacement_values (node
, aggval
);
5894 ipa_dump_agg_replacement_values (dump_file
, aggval
);
5898 fbi
.bb_infos
= vNULL
;
5899 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
), true);
5900 fbi
.param_count
= param_count
;
5901 fbi
.aa_walk_budget
= opt_for_fn (node
->decl
, param_ipa_max_aa_steps
);
5903 vec_safe_grow_cleared (descriptors
, param_count
, true);
5904 ipa_populate_param_decls (node
, *descriptors
);
5905 calculate_dominance_info (CDI_DOMINATORS
);
5906 ipcp_modif_dom_walker
walker (&fbi
, descriptors
, aggval
, &something_changed
);
5907 walker
.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5908 free_dominance_info (CDI_DOMINATORS
);
5909 bool cfg_changed
= walker
.cleanup_eh ();
5912 struct ipa_bb_info
*bi
;
5913 FOR_EACH_VEC_ELT (fbi
.bb_infos
, i
, bi
)
5914 free_ipa_bb_info (bi
);
5915 fbi
.bb_infos
.release ();
5917 ipcp_transformation
*s
= ipcp_transformation_sum
->get (node
);
5918 s
->agg_values
= NULL
;
5922 vec_free (descriptors
);
5924 if (!something_changed
)
5928 delete_unreachable_blocks_update_callgraph (node
, false);
5930 return TODO_update_ssa_only_virtuals
;
5934 /* Return true if OTHER describes same agg value. */
5936 ipa_agg_value::equal_to (const ipa_agg_value
&other
)
5938 return offset
== other
.offset
5939 && operand_equal_p (value
, other
.value
, 0);
5942 /* Destructor also removing individual aggregate values. */
5944 ipa_auto_call_arg_values::~ipa_auto_call_arg_values ()
5946 ipa_release_agg_values (m_known_aggs
, false);
5951 #include "gt-ipa-prop.h"