1 /* Interprocedural analyses.
2 Copyright (C) 2005-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
30 #include "tree-streamer.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-iterator.h"
35 #include "gimple-fold.h"
38 #include "stor-layout.h"
39 #include "print-tree.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
50 #include "ipa-utils.h"
54 #include "tree-cfgcleanup.h"
56 #include "symtab-clones.h"
57 #include "attr-fnspec.h"
58 #include "gimple-range.h"
59 #include "value-range-storage.h"
61 /* Function summary where the parameter infos are actually stored. */
62 ipa_node_params_t
*ipa_node_params_sum
= NULL
;
64 function_summary
<ipcp_transformation
*> *ipcp_transformation_sum
= NULL
;
66 /* Edge summary for IPA-CP edge information. */
67 ipa_edge_args_sum_t
*ipa_edge_args_sum
;
69 /* Traits for a hash table for reusing ranges. */
71 struct ipa_vr_ggc_hash_traits
: public ggc_cache_remove
<ipa_vr
*>
73 typedef ipa_vr
*value_type
;
74 typedef const vrange
*compare_type
;
76 hash (const ipa_vr
*p
)
78 // This never get called, except in the verification code, as
79 // ipa_get_value_range() calculates the hash itself. This
80 // function is mostly here for completness' sake.
84 add_vrange (vr
, hstate
);
88 equal (const ipa_vr
*a
, const vrange
*b
)
90 return a
->equal_p (*b
);
92 static const bool empty_zero_p
= true;
94 mark_empty (ipa_vr
*&p
)
99 is_empty (const ipa_vr
*p
)
104 is_deleted (const ipa_vr
*p
)
106 return p
== reinterpret_cast<const ipa_vr
*> (1);
109 mark_deleted (ipa_vr
*&p
)
111 p
= reinterpret_cast<ipa_vr
*> (1);
115 /* Hash table for avoid repeated allocations of equal ranges. */
116 static GTY ((cache
)) hash_table
<ipa_vr_ggc_hash_traits
> *ipa_vr_hash_table
;
118 /* Holders of ipa cgraph hooks: */
119 static struct cgraph_node_hook_list
*function_insertion_hook_holder
;
121 /* Description of a reference to an IPA constant. */
122 struct ipa_cst_ref_desc
124 /* Edge that corresponds to the statement which took the reference. */
125 struct cgraph_edge
*cs
;
126 /* Linked list of duplicates created when call graph edges are cloned. */
127 struct ipa_cst_ref_desc
*next_duplicate
;
128 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
129 if out of control. */
133 /* Allocation pool for reference descriptions. */
135 static object_allocator
<ipa_cst_ref_desc
> ipa_refdesc_pool
136 ("IPA-PROP ref descriptions");
144 ipa_vr::ipa_vr (const vrange
&r
)
145 : m_storage (ggc_alloc_vrange_storage (r
)),
151 ipa_vr::equal_p (const vrange
&r
) const
153 gcc_checking_assert (!r
.undefined_p ());
154 return (types_compatible_p (m_type
, r
.type ()) && m_storage
->equal_p (r
));
158 ipa_vr::get_vrange (Value_Range
&r
) const
161 m_storage
->get_vrange (r
, m_type
);
165 ipa_vr::set_unknown ()
168 ggc_free (m_storage
);
174 ipa_vr::streamer_read (lto_input_block
*ib
, data_in
*data_in
)
176 struct bitpack_d bp
= streamer_read_bitpack (ib
);
177 bool known
= bp_unpack_value (&bp
, 1);
181 streamer_read_value_range (ib
, data_in
, vr
);
182 if (!m_storage
|| !m_storage
->fits_p (vr
))
185 ggc_free (m_storage
);
186 m_storage
= ggc_alloc_vrange_storage (vr
);
188 m_storage
->set_vrange (vr
);
199 ipa_vr::streamer_write (output_block
*ob
) const
201 struct bitpack_d bp
= bitpack_create (ob
->main_stream
);
202 bp_pack_value (&bp
, !!m_storage
, 1);
203 streamer_write_bitpack (&bp
);
206 Value_Range
vr (m_type
);
207 m_storage
->get_vrange (vr
, m_type
);
208 streamer_write_vrange (ob
, vr
);
213 ipa_vr::dump (FILE *out
) const
217 Value_Range
vr (m_type
);
218 m_storage
->get_vrange (vr
, m_type
);
222 fprintf (out
, "NO RANGE");
225 // These stubs are because we use an ipa_vr in a hash_traits and
226 // hash-traits.h defines an extern of gt_ggc_mx (T &) instead of
227 // picking up the gt_ggc_mx (T *) version.
229 gt_pch_nx (ipa_vr
*&x
)
231 return gt_pch_nx ((ipa_vr
*) x
);
235 gt_ggc_mx (ipa_vr
*&x
)
237 return gt_ggc_mx ((ipa_vr
*) x
);
241 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
242 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
245 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node
*node
)
247 tree fs_opts
= DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node
->decl
);
251 return !opt_for_fn (node
->decl
, optimize
) || !opt_for_fn (node
->decl
, flag_ipa_cp
);
254 /* Return index of the formal whose tree is PTREE in function which corresponds
258 ipa_get_param_decl_index_1 (vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
263 count
= vec_safe_length (descriptors
);
264 for (i
= 0; i
< count
; i
++)
265 if ((*descriptors
)[i
].decl_or_type
== ptree
)
271 /* Return index of the formal whose tree is PTREE in function which corresponds
275 ipa_get_param_decl_index (class ipa_node_params
*info
, tree ptree
)
277 return ipa_get_param_decl_index_1 (info
->descriptors
, ptree
);
280 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
284 ipa_populate_param_decls (struct cgraph_node
*node
,
285 vec
<ipa_param_descriptor
, va_gc
> &descriptors
)
293 gcc_assert (gimple_has_body_p (fndecl
));
294 fnargs
= DECL_ARGUMENTS (fndecl
);
296 for (parm
= fnargs
; parm
; parm
= DECL_CHAIN (parm
))
298 descriptors
[param_num
].decl_or_type
= parm
;
299 unsigned int cost
= estimate_move_cost (TREE_TYPE (parm
), true);
300 descriptors
[param_num
].move_cost
= cost
;
301 /* Watch overflow, move_cost is a bitfield. */
302 gcc_checking_assert (cost
== descriptors
[param_num
].move_cost
);
307 /* Return how many formal parameters FNDECL has. */
310 count_formal_params (tree fndecl
)
314 gcc_assert (gimple_has_body_p (fndecl
));
316 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
322 /* Return the declaration of Ith formal parameter of the function corresponding
323 to INFO. Note there is no setter function as this array is built just once
324 using ipa_initialize_node_params. */
327 ipa_dump_param (FILE *file
, class ipa_node_params
*info
, int i
)
329 fprintf (file
, "param #%i", i
);
330 if ((*info
->descriptors
)[i
].decl_or_type
)
333 print_generic_expr (file
, (*info
->descriptors
)[i
].decl_or_type
);
337 /* If necessary, allocate vector of parameter descriptors in info of NODE.
338 Return true if they were allocated, false if not. */
341 ipa_alloc_node_params (struct cgraph_node
*node
, int param_count
)
343 ipa_node_params
*info
= ipa_node_params_sum
->get_create (node
);
345 if (!info
->descriptors
&& param_count
)
347 vec_safe_grow_cleared (info
->descriptors
, param_count
, true);
354 /* Initialize the ipa_node_params structure associated with NODE by counting
355 the function parameters, creating the descriptors and populating their
359 ipa_initialize_node_params (struct cgraph_node
*node
)
361 ipa_node_params
*info
= ipa_node_params_sum
->get_create (node
);
363 if (!info
->descriptors
364 && ipa_alloc_node_params (node
, count_formal_params (node
->decl
)))
365 ipa_populate_param_decls (node
, *info
->descriptors
);
368 /* Print the jump functions associated with call graph edge CS to file F. */
371 ipa_print_node_jump_functions_for_edge (FILE *f
, struct cgraph_edge
*cs
)
373 ipa_edge_args
*args
= ipa_edge_args_sum
->get (cs
);
374 int count
= ipa_get_cs_argument_count (args
);
376 for (int i
= 0; i
< count
; i
++)
378 struct ipa_jump_func
*jump_func
;
379 enum jump_func_type type
;
381 jump_func
= ipa_get_ith_jump_func (args
, i
);
382 type
= jump_func
->type
;
384 fprintf (f
, " param %d: ", i
);
385 if (type
== IPA_JF_UNKNOWN
)
386 fprintf (f
, "UNKNOWN\n");
387 else if (type
== IPA_JF_CONST
)
389 tree val
= jump_func
->value
.constant
.value
;
390 fprintf (f
, "CONST: ");
391 print_generic_expr (f
, val
);
392 if (TREE_CODE (val
) == ADDR_EXPR
393 && TREE_CODE (TREE_OPERAND (val
, 0)) == CONST_DECL
)
396 print_generic_expr (f
, DECL_INITIAL (TREE_OPERAND (val
, 0)));
400 else if (type
== IPA_JF_PASS_THROUGH
)
402 fprintf (f
, "PASS THROUGH: ");
403 fprintf (f
, "%d, op %s",
404 jump_func
->value
.pass_through
.formal_id
,
405 get_tree_code_name(jump_func
->value
.pass_through
.operation
));
406 if (jump_func
->value
.pass_through
.operation
!= NOP_EXPR
)
409 print_generic_expr (f
, jump_func
->value
.pass_through
.operand
);
411 if (jump_func
->value
.pass_through
.agg_preserved
)
412 fprintf (f
, ", agg_preserved");
413 if (jump_func
->value
.pass_through
.refdesc_decremented
)
414 fprintf (f
, ", refdesc_decremented");
417 else if (type
== IPA_JF_ANCESTOR
)
419 fprintf (f
, "ANCESTOR: ");
420 fprintf (f
, "%d, offset " HOST_WIDE_INT_PRINT_DEC
,
421 jump_func
->value
.ancestor
.formal_id
,
422 jump_func
->value
.ancestor
.offset
);
423 if (jump_func
->value
.ancestor
.agg_preserved
)
424 fprintf (f
, ", agg_preserved");
425 if (jump_func
->value
.ancestor
.keep_null
)
426 fprintf (f
, ", keep_null");
430 if (jump_func
->agg
.items
)
432 struct ipa_agg_jf_item
*item
;
435 fprintf (f
, " Aggregate passed by %s:\n",
436 jump_func
->agg
.by_ref
? "reference" : "value");
437 FOR_EACH_VEC_ELT (*jump_func
->agg
.items
, j
, item
)
439 fprintf (f
, " offset: " HOST_WIDE_INT_PRINT_DEC
", ",
441 fprintf (f
, "type: ");
442 print_generic_expr (f
, item
->type
);
444 if (item
->jftype
== IPA_JF_PASS_THROUGH
)
445 fprintf (f
, "PASS THROUGH: %d,",
446 item
->value
.pass_through
.formal_id
);
447 else if (item
->jftype
== IPA_JF_LOAD_AGG
)
449 fprintf (f
, "LOAD AGG: %d",
450 item
->value
.pass_through
.formal_id
);
451 fprintf (f
, " [offset: " HOST_WIDE_INT_PRINT_DEC
", by %s],",
452 item
->value
.load_agg
.offset
,
453 item
->value
.load_agg
.by_ref
? "reference"
457 if (item
->jftype
== IPA_JF_PASS_THROUGH
458 || item
->jftype
== IPA_JF_LOAD_AGG
)
460 fprintf (f
, " op %s",
461 get_tree_code_name (item
->value
.pass_through
.operation
));
462 if (item
->value
.pass_through
.operation
!= NOP_EXPR
)
465 print_generic_expr (f
, item
->value
.pass_through
.operand
);
468 else if (item
->jftype
== IPA_JF_CONST
)
470 fprintf (f
, "CONST: ");
471 print_generic_expr (f
, item
->value
.constant
);
473 else if (item
->jftype
== IPA_JF_UNKNOWN
)
474 fprintf (f
, "UNKNOWN: " HOST_WIDE_INT_PRINT_DEC
" bits",
475 tree_to_uhwi (TYPE_SIZE (item
->type
)));
480 class ipa_polymorphic_call_context
*ctx
481 = ipa_get_ith_polymorhic_call_context (args
, i
);
482 if (ctx
&& !ctx
->useless_p ())
484 fprintf (f
, " Context: ");
485 ctx
->dump (dump_file
);
490 jump_func
->m_vr
->dump (f
);
494 fprintf (f
, " Unknown VR\n");
499 /* Print the jump functions of all arguments on all call graph edges going from
503 ipa_print_node_jump_functions (FILE *f
, struct cgraph_node
*node
)
505 struct cgraph_edge
*cs
;
507 fprintf (f
, " Jump functions of caller %s:\n", node
->dump_name ());
508 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
511 fprintf (f
, " callsite %s -> %s : \n",
513 cs
->callee
->dump_name ());
514 if (!ipa_edge_args_info_available_for_edge_p (cs
))
515 fprintf (f
, " no arg info\n");
517 ipa_print_node_jump_functions_for_edge (f
, cs
);
520 for (cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
522 class cgraph_indirect_call_info
*ii
;
524 ii
= cs
->indirect_info
;
525 if (ii
->agg_contents
)
526 fprintf (f
, " indirect %s callsite, calling param %i, "
527 "offset " HOST_WIDE_INT_PRINT_DEC
", %s",
528 ii
->member_ptr
? "member ptr" : "aggregate",
529 ii
->param_index
, ii
->offset
,
530 ii
->by_ref
? "by reference" : "by_value");
532 fprintf (f
, " indirect %s callsite, calling param %i, "
533 "offset " HOST_WIDE_INT_PRINT_DEC
,
534 ii
->polymorphic
? "polymorphic" : "simple", ii
->param_index
,
539 fprintf (f
, ", for stmt ");
540 print_gimple_stmt (f
, cs
->call_stmt
, 0, TDF_SLIM
);
545 ii
->context
.dump (f
);
546 if (!ipa_edge_args_info_available_for_edge_p (cs
))
547 fprintf (f
, " no arg info\n");
549 ipa_print_node_jump_functions_for_edge (f
, cs
);
553 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
556 ipa_print_all_jump_functions (FILE *f
)
558 struct cgraph_node
*node
;
560 fprintf (f
, "\nJump functions:\n");
561 FOR_EACH_FUNCTION (node
)
563 ipa_print_node_jump_functions (f
, node
);
567 /* Set jfunc to be a know-really nothing jump function. */
570 ipa_set_jf_unknown (struct ipa_jump_func
*jfunc
)
572 jfunc
->type
= IPA_JF_UNKNOWN
;
575 /* Set JFUNC to be a copy of another jmp (to be used by jump function
576 combination code). The two functions will share their rdesc. */
579 ipa_set_jf_cst_copy (struct ipa_jump_func
*dst
,
580 struct ipa_jump_func
*src
)
583 gcc_checking_assert (src
->type
== IPA_JF_CONST
);
584 dst
->type
= IPA_JF_CONST
;
585 dst
->value
.constant
= src
->value
.constant
;
588 /* Set JFUNC to be a constant jmp function. */
591 ipa_set_jf_constant (struct ipa_jump_func
*jfunc
, tree constant
,
592 struct cgraph_edge
*cs
)
594 jfunc
->type
= IPA_JF_CONST
;
595 jfunc
->value
.constant
.value
= unshare_expr_without_location (constant
);
597 if (TREE_CODE (constant
) == ADDR_EXPR
598 && (TREE_CODE (TREE_OPERAND (constant
, 0)) == FUNCTION_DECL
599 || (VAR_P (TREE_OPERAND (constant
, 0))
600 && TREE_STATIC (TREE_OPERAND (constant
, 0)))))
602 struct ipa_cst_ref_desc
*rdesc
;
604 rdesc
= ipa_refdesc_pool
.allocate ();
606 rdesc
->next_duplicate
= NULL
;
608 jfunc
->value
.constant
.rdesc
= rdesc
;
611 jfunc
->value
.constant
.rdesc
= NULL
;
614 /* Set JFUNC to be a simple pass-through jump function. */
616 ipa_set_jf_simple_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
619 jfunc
->type
= IPA_JF_PASS_THROUGH
;
620 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
621 jfunc
->value
.pass_through
.formal_id
= formal_id
;
622 jfunc
->value
.pass_through
.operation
= NOP_EXPR
;
623 jfunc
->value
.pass_through
.agg_preserved
= agg_preserved
;
624 jfunc
->value
.pass_through
.refdesc_decremented
= false;
627 /* Set JFUNC to be an unary pass through jump function. */
630 ipa_set_jf_unary_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
631 enum tree_code operation
)
633 jfunc
->type
= IPA_JF_PASS_THROUGH
;
634 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
635 jfunc
->value
.pass_through
.formal_id
= formal_id
;
636 jfunc
->value
.pass_through
.operation
= operation
;
637 jfunc
->value
.pass_through
.agg_preserved
= false;
638 jfunc
->value
.pass_through
.refdesc_decremented
= false;
640 /* Set JFUNC to be an arithmetic pass through jump function. */
643 ipa_set_jf_arith_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
644 tree operand
, enum tree_code operation
)
646 jfunc
->type
= IPA_JF_PASS_THROUGH
;
647 jfunc
->value
.pass_through
.operand
= unshare_expr_without_location (operand
);
648 jfunc
->value
.pass_through
.formal_id
= formal_id
;
649 jfunc
->value
.pass_through
.operation
= operation
;
650 jfunc
->value
.pass_through
.agg_preserved
= false;
651 jfunc
->value
.pass_through
.refdesc_decremented
= false;
654 /* Set JFUNC to be an ancestor jump function. */
657 ipa_set_ancestor_jf (struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
,
658 int formal_id
, bool agg_preserved
, bool keep_null
)
660 jfunc
->type
= IPA_JF_ANCESTOR
;
661 jfunc
->value
.ancestor
.formal_id
= formal_id
;
662 jfunc
->value
.ancestor
.offset
= offset
;
663 jfunc
->value
.ancestor
.agg_preserved
= agg_preserved
;
664 jfunc
->value
.ancestor
.keep_null
= keep_null
;
667 /* Get IPA BB information about the given BB. FBI is the context of analyzis
668 of this function body. */
670 static struct ipa_bb_info
*
671 ipa_get_bb_info (struct ipa_func_body_info
*fbi
, basic_block bb
)
673 gcc_checking_assert (fbi
);
674 return &fbi
->bb_infos
[bb
->index
];
677 /* Structure to be passed in between detect_type_change and
678 check_stmt_for_type_change. */
680 struct prop_type_change_info
682 /* Offset into the object where there is the virtual method pointer we are
684 HOST_WIDE_INT offset
;
685 /* The declaration or SSA_NAME pointer of the base that we are checking for
688 /* Set to true if dynamic type change has been detected. */
689 bool type_maybe_changed
;
692 /* Return true if STMT can modify a virtual method table pointer.
694 This function makes special assumptions about both constructors and
695 destructors which are all the functions that are allowed to alter the VMT
696 pointers. It assumes that destructors begin with assignment into all VMT
697 pointers and that constructors essentially look in the following way:
699 1) The very first thing they do is that they call constructors of ancestor
700 sub-objects that have them.
702 2) Then VMT pointers of this and all its ancestors is set to new values
703 corresponding to the type corresponding to the constructor.
705 3) Only afterwards, other stuff such as constructor of member sub-objects
706 and the code written by the user is run. Only this may include calling
707 virtual functions, directly or indirectly.
709 There is no way to call a constructor of an ancestor sub-object in any
712 This means that we do not have to care whether constructors get the correct
713 type information because they will always change it (in fact, if we define
714 the type to be given by the VMT pointer, it is undefined).
716 The most important fact to derive from the above is that if, for some
717 statement in the section 3, we try to detect whether the dynamic type has
718 changed, we can safely ignore all calls as we examine the function body
719 backwards until we reach statements in section 2 because these calls cannot
720 be ancestor constructors or destructors (if the input is not bogus) and so
721 do not change the dynamic type (this holds true only for automatically
722 allocated objects but at the moment we devirtualize only these). We then
723 must detect that statements in section 2 change the dynamic type and can try
724 to derive the new type. That is enough and we can stop, we will never see
725 the calls into constructors of sub-objects in this code. Therefore we can
726 safely ignore all call statements that we traverse.
730 stmt_may_be_vtbl_ptr_store (gimple
*stmt
)
732 if (is_gimple_call (stmt
))
734 if (gimple_clobber_p (stmt
))
736 else if (is_gimple_assign (stmt
))
738 tree lhs
= gimple_assign_lhs (stmt
);
740 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs
)))
742 if (flag_strict_aliasing
743 && !POINTER_TYPE_P (TREE_TYPE (lhs
)))
746 if (TREE_CODE (lhs
) == COMPONENT_REF
747 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
749 /* In the future we might want to use get_ref_base_and_extent to find
750 if there is a field corresponding to the offset and if so, proceed
751 almost like if it was a component ref. */
757 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
758 to check whether a particular statement may modify the virtual table
759 pointerIt stores its result into DATA, which points to a
760 prop_type_change_info structure. */
763 check_stmt_for_type_change (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef
, void *data
)
765 gimple
*stmt
= SSA_NAME_DEF_STMT (vdef
);
766 struct prop_type_change_info
*tci
= (struct prop_type_change_info
*) data
;
768 if (stmt_may_be_vtbl_ptr_store (stmt
))
770 tci
->type_maybe_changed
= true;
777 /* See if ARG is PARAM_DECl describing instance passed by pointer
778 or reference in FUNCTION. Return false if the dynamic type may change
779 in between beggining of the function until CALL is invoked.
781 Generally functions are not allowed to change type of such instances,
782 but they call destructors. We assume that methods cannot destroy the THIS
783 pointer. Also as a special cases, constructor and destructors may change
784 type of the THIS pointer. */
787 param_type_may_change_p (tree function
, tree arg
, gimple
*call
)
789 /* Pure functions cannot do any changes on the dynamic type;
790 that require writting to memory. */
791 if (flags_from_decl_or_type (function
) & (ECF_PURE
| ECF_CONST
))
793 /* We need to check if we are within inlined consturctor
794 or destructor (ideally we would have way to check that the
795 inline cdtor is actually working on ARG, but we don't have
796 easy tie on this, so punt on all non-pure cdtors.
797 We may also record the types of cdtors and once we know type
798 of the instance match them.
800 Also code unification optimizations may merge calls from
801 different blocks making return values unreliable. So
802 do nothing during late optimization. */
803 if (DECL_STRUCT_FUNCTION (function
)->after_inlining
)
805 if (TREE_CODE (arg
) == SSA_NAME
806 && SSA_NAME_IS_DEFAULT_DEF (arg
)
807 && TREE_CODE (SSA_NAME_VAR (arg
)) == PARM_DECL
)
809 /* Normal (non-THIS) argument. */
810 if ((SSA_NAME_VAR (arg
) != DECL_ARGUMENTS (function
)
811 || TREE_CODE (TREE_TYPE (function
)) != METHOD_TYPE
)
812 /* THIS pointer of an method - here we want to watch constructors
813 and destructors as those definitely may change the dynamic
815 || (TREE_CODE (TREE_TYPE (function
)) == METHOD_TYPE
816 && !DECL_CXX_CONSTRUCTOR_P (function
)
817 && !DECL_CXX_DESTRUCTOR_P (function
)
818 && (SSA_NAME_VAR (arg
) == DECL_ARGUMENTS (function
))))
820 /* Walk the inline stack and watch out for ctors/dtors. */
821 for (tree block
= gimple_block (call
); block
&& TREE_CODE (block
) == BLOCK
;
822 block
= BLOCK_SUPERCONTEXT (block
))
823 if (inlined_polymorphic_ctor_dtor_block_p (block
, false))
831 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
832 callsite CALL) by looking for assignments to its virtual table pointer. If
833 it is, return true. ARG is the object itself (not a pointer
834 to it, unless dereferenced). BASE is the base of the memory access as
835 returned by get_ref_base_and_extent, as is the offset.
837 This is helper function for detect_type_change and detect_type_change_ssa
838 that does the heavy work which is usually unnecesary. */
841 detect_type_change_from_memory_writes (ipa_func_body_info
*fbi
, tree arg
,
842 tree base
, tree comp_type
, gcall
*call
,
843 HOST_WIDE_INT offset
)
845 struct prop_type_change_info tci
;
848 gcc_checking_assert (DECL_P (arg
)
849 || TREE_CODE (arg
) == MEM_REF
850 || handled_component_p (arg
));
852 comp_type
= TYPE_MAIN_VARIANT (comp_type
);
854 /* Const calls cannot call virtual methods through VMT and so type changes do
856 if (!flag_devirtualize
|| !gimple_vuse (call
)
857 /* Be sure expected_type is polymorphic. */
859 || TREE_CODE (comp_type
) != RECORD_TYPE
860 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))
861 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))))
864 if (fbi
->aa_walk_budget
== 0)
867 ao_ref_init (&ao
, arg
);
870 ao
.size
= POINTER_SIZE
;
871 ao
.max_size
= ao
.size
;
874 tci
.object
= get_base_address (arg
);
875 tci
.type_maybe_changed
= false;
878 = walk_aliased_vdefs (&ao
, gimple_vuse (call
), check_stmt_for_type_change
,
879 &tci
, NULL
, NULL
, fbi
->aa_walk_budget
);
881 fbi
->aa_walk_budget
-= walked
;
883 fbi
->aa_walk_budget
= 0;
885 if (walked
>= 0 && !tci
.type_maybe_changed
)
891 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
892 If it is, return true. ARG is the object itself (not a pointer
893 to it, unless dereferenced). BASE is the base of the memory access as
894 returned by get_ref_base_and_extent, as is the offset. */
897 detect_type_change (ipa_func_body_info
*fbi
, tree arg
, tree base
,
898 tree comp_type
, gcall
*call
,
899 HOST_WIDE_INT offset
)
901 if (!flag_devirtualize
)
904 if (TREE_CODE (base
) == MEM_REF
905 && !param_type_may_change_p (current_function_decl
,
906 TREE_OPERAND (base
, 0),
909 return detect_type_change_from_memory_writes (fbi
, arg
, base
, comp_type
,
913 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
914 SSA name (its dereference will become the base and the offset is assumed to
918 detect_type_change_ssa (ipa_func_body_info
*fbi
, tree arg
, tree comp_type
,
921 gcc_checking_assert (TREE_CODE (arg
) == SSA_NAME
);
922 if (!flag_devirtualize
923 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
926 if (!param_type_may_change_p (current_function_decl
, arg
, call
))
929 arg
= build2 (MEM_REF
, ptr_type_node
, arg
,
930 build_int_cst (ptr_type_node
, 0));
932 return detect_type_change_from_memory_writes (fbi
, arg
, arg
, comp_type
,
936 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
937 boolean variable pointed to by DATA. */
940 mark_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
943 bool *b
= (bool *) data
;
948 /* Find the nearest valid aa status for parameter specified by INDEX that
951 static struct ipa_param_aa_status
*
952 find_dominating_aa_status (struct ipa_func_body_info
*fbi
, basic_block bb
,
957 bb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
960 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
961 if (!bi
->param_aa_statuses
.is_empty ()
962 && bi
->param_aa_statuses
[index
].valid
)
963 return &bi
->param_aa_statuses
[index
];
967 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
968 structures and/or intialize the result with a dominating description as
971 static struct ipa_param_aa_status
*
972 parm_bb_aa_status_for_bb (struct ipa_func_body_info
*fbi
, basic_block bb
,
975 gcc_checking_assert (fbi
);
976 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
977 if (bi
->param_aa_statuses
.is_empty ())
978 bi
->param_aa_statuses
.safe_grow_cleared (fbi
->param_count
, true);
979 struct ipa_param_aa_status
*paa
= &bi
->param_aa_statuses
[index
];
982 gcc_checking_assert (!paa
->parm_modified
983 && !paa
->ref_modified
984 && !paa
->pt_modified
);
985 struct ipa_param_aa_status
*dom_paa
;
986 dom_paa
= find_dominating_aa_status (fbi
, bb
, index
);
996 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
997 a value known not to be modified in this function before reaching the
998 statement STMT. FBI holds information about the function we have so far
999 gathered but do not survive the summary building stage. */
1002 parm_preserved_before_stmt_p (struct ipa_func_body_info
*fbi
, int index
,
1003 gimple
*stmt
, tree parm_load
)
1005 struct ipa_param_aa_status
*paa
;
1006 bool modified
= false;
1009 tree base
= get_base_address (parm_load
);
1010 gcc_assert (TREE_CODE (base
) == PARM_DECL
);
1011 if (TREE_READONLY (base
))
1014 gcc_checking_assert (fbi
);
1015 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
1016 if (paa
->parm_modified
|| fbi
->aa_walk_budget
== 0)
1019 gcc_checking_assert (gimple_vuse (stmt
) != NULL_TREE
);
1020 ao_ref_init (&refd
, parm_load
);
1021 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
1022 &modified
, NULL
, NULL
,
1023 fbi
->aa_walk_budget
);
1027 fbi
->aa_walk_budget
= 0;
1030 fbi
->aa_walk_budget
-= walked
;
1031 if (paa
&& modified
)
1032 paa
->parm_modified
= true;
1036 /* If STMT is an assignment that loads a value from an parameter declaration,
1037 return the index of the parameter in ipa_node_params which has not been
1038 modified. Otherwise return -1. */
1041 load_from_unmodified_param (struct ipa_func_body_info
*fbi
,
1042 vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
1048 if (!gimple_assign_single_p (stmt
))
1051 op1
= gimple_assign_rhs1 (stmt
);
1052 if (TREE_CODE (op1
) != PARM_DECL
)
1055 index
= ipa_get_param_decl_index_1 (descriptors
, op1
);
1057 || !parm_preserved_before_stmt_p (fbi
, index
, stmt
, op1
))
1063 /* Return true if memory reference REF (which must be a load through parameter
1064 with INDEX) loads data that are known to be unmodified in this function
1065 before reaching statement STMT. */
1068 parm_ref_data_preserved_p (struct ipa_func_body_info
*fbi
,
1069 int index
, gimple
*stmt
, tree ref
)
1071 struct ipa_param_aa_status
*paa
;
1072 bool modified
= false;
1075 gcc_checking_assert (fbi
);
1076 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
1077 if (paa
->ref_modified
|| fbi
->aa_walk_budget
== 0)
1080 gcc_checking_assert (gimple_vuse (stmt
));
1081 ao_ref_init (&refd
, ref
);
1082 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
1083 &modified
, NULL
, NULL
,
1084 fbi
->aa_walk_budget
);
1088 fbi
->aa_walk_budget
= 0;
1091 fbi
->aa_walk_budget
-= walked
;
1093 paa
->ref_modified
= true;
1097 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1098 is known to be unmodified in this function before reaching call statement
1099 CALL into which it is passed. FBI describes the function body. */
1102 parm_ref_data_pass_through_p (struct ipa_func_body_info
*fbi
, int index
,
1103 gimple
*call
, tree parm
)
1105 bool modified
= false;
1108 /* It's unnecessary to calculate anything about memory contnets for a const
1109 function because it is not goin to use it. But do not cache the result
1110 either. Also, no such calculations for non-pointers. */
1111 if (!gimple_vuse (call
)
1112 || !POINTER_TYPE_P (TREE_TYPE (parm
)))
1115 struct ipa_param_aa_status
*paa
= parm_bb_aa_status_for_bb (fbi
,
1118 if (paa
->pt_modified
|| fbi
->aa_walk_budget
== 0)
1121 ao_ref_init_from_ptr_and_size (&refd
, parm
, NULL_TREE
);
1122 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (call
), mark_modified
,
1123 &modified
, NULL
, NULL
,
1124 fbi
->aa_walk_budget
);
1127 fbi
->aa_walk_budget
= 0;
1131 fbi
->aa_walk_budget
-= walked
;
1133 paa
->pt_modified
= true;
1137 /* Return true if we can prove that OP is a memory reference loading
1138 data from an aggregate passed as a parameter.
1140 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1141 false if it cannot prove that the value has not been modified before the
1142 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1143 if it cannot prove the value has not been modified, in that case it will
1144 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1146 INFO and PARMS_AINFO describe parameters of the current function (but the
1147 latter can be NULL), STMT is the load statement. If function returns true,
1148 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1149 within the aggregate and whether it is a load from a value passed by
1150 reference respectively.
1152 Return false if the offset divided by BITS_PER_UNIT would not fit into an
1156 ipa_load_from_parm_agg (struct ipa_func_body_info
*fbi
,
1157 vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
1158 gimple
*stmt
, tree op
, int *index_p
,
1159 HOST_WIDE_INT
*offset_p
, poly_int64
*size_p
,
1160 bool *by_ref_p
, bool *guaranteed_unmodified
)
1165 tree base
= get_ref_base_and_extent_hwi (op
, offset_p
, &size
, &reverse
);
1168 || (*offset_p
/ BITS_PER_UNIT
) > UINT_MAX
)
1171 /* We can not propagate across volatile loads. */
1172 if (TREE_THIS_VOLATILE (op
))
1177 int index
= ipa_get_param_decl_index_1 (descriptors
, base
);
1179 && parm_preserved_before_stmt_p (fbi
, index
, stmt
, op
))
1185 if (guaranteed_unmodified
)
1186 *guaranteed_unmodified
= true;
1192 if (TREE_CODE (base
) != MEM_REF
1193 || TREE_CODE (TREE_OPERAND (base
, 0)) != SSA_NAME
1194 || !integer_zerop (TREE_OPERAND (base
, 1)))
1197 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base
, 0)))
1199 tree parm
= SSA_NAME_VAR (TREE_OPERAND (base
, 0));
1200 index
= ipa_get_param_decl_index_1 (descriptors
, parm
);
1204 /* This branch catches situations where a pointer parameter is not a
1205 gimple register, for example:
1207 void hip7(S*) (struct S * p)
1209 void (*<T2e4>) (struct S *) D.1867;
1214 D.1867_2 = p.1_1->f;
1219 gimple
*def
= SSA_NAME_DEF_STMT (TREE_OPERAND (base
, 0));
1220 index
= load_from_unmodified_param (fbi
, descriptors
, def
);
1225 bool data_preserved
= parm_ref_data_preserved_p (fbi
, index
, stmt
, op
);
1226 if (!data_preserved
&& !guaranteed_unmodified
)
1233 if (guaranteed_unmodified
)
1234 *guaranteed_unmodified
= data_preserved
;
1240 /* If STMT is an assignment that loads a value from a parameter declaration,
1241 or from an aggregate passed as the parameter either by value or reference,
1242 return the index of the parameter in ipa_node_params. Otherwise return -1.
1244 FBI holds gathered information about the function. INFO describes
1245 parameters of the function, STMT is the assignment statement. If it is a
1246 memory load from an aggregate, *OFFSET_P is filled with offset within the
1247 aggregate, and *BY_REF_P specifies whether the aggregate is passed by
1251 load_from_unmodified_param_or_agg (struct ipa_func_body_info
*fbi
,
1252 class ipa_node_params
*info
,
1254 HOST_WIDE_INT
*offset_p
,
1257 int index
= load_from_unmodified_param (fbi
, info
->descriptors
, stmt
);
1260 /* Load value from a parameter declaration. */
1267 if (!gimple_assign_load_p (stmt
))
1270 tree rhs
= gimple_assign_rhs1 (stmt
);
1272 /* Skip memory reference containing VIEW_CONVERT_EXPR. */
1273 for (tree t
= rhs
; handled_component_p (t
); t
= TREE_OPERAND (t
, 0))
1274 if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
1277 /* Skip memory reference containing bit-field. */
1278 if (TREE_CODE (rhs
) == BIT_FIELD_REF
1279 || contains_bitfld_component_ref_p (rhs
))
1282 if (!ipa_load_from_parm_agg (fbi
, info
->descriptors
, stmt
, rhs
, &index
,
1283 offset_p
, &size
, by_ref_p
))
1286 gcc_assert (!maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (rhs
))),
1290 tree param_type
= ipa_get_type (info
, index
);
1292 if (!param_type
|| !AGGREGATE_TYPE_P (param_type
))
1295 else if (TREE_THIS_VOLATILE (rhs
))
1301 /* Walk pointer adjustemnts from OP (such as POINTER_PLUS and ADDR_EXPR)
1302 to find original pointer. Initialize RET to the pointer which results from
1304 If offset is known return true and initialize OFFSET_RET. */
1307 unadjusted_ptr_and_unit_offset (tree op
, tree
*ret
, poly_int64
*offset_ret
)
1309 poly_int64 offset
= 0;
1310 bool offset_known
= true;
1313 for (i
= 0; i
< param_ipa_jump_function_lookups
; i
++)
1315 if (TREE_CODE (op
) == ADDR_EXPR
)
1317 poly_int64 extra_offset
= 0;
1318 tree base
= get_addr_base_and_unit_offset (TREE_OPERAND (op
, 0),
1322 base
= get_base_address (TREE_OPERAND (op
, 0));
1323 if (TREE_CODE (base
) != MEM_REF
)
1325 offset_known
= false;
1329 if (TREE_CODE (base
) != MEM_REF
)
1331 offset
+= extra_offset
;
1333 op
= TREE_OPERAND (base
, 0);
1334 if (mem_ref_offset (base
).to_shwi (&extra_offset
))
1335 offset
+= extra_offset
;
1337 offset_known
= false;
1339 else if (TREE_CODE (op
) == SSA_NAME
1340 && !SSA_NAME_IS_DEFAULT_DEF (op
))
1342 gimple
*pstmt
= SSA_NAME_DEF_STMT (op
);
1344 if (gimple_assign_single_p (pstmt
))
1345 op
= gimple_assign_rhs1 (pstmt
);
1346 else if (is_gimple_assign (pstmt
)
1347 && gimple_assign_rhs_code (pstmt
) == POINTER_PLUS_EXPR
)
1349 poly_int64 extra_offset
= 0;
1350 if (ptrdiff_tree_p (gimple_assign_rhs2 (pstmt
),
1352 offset
+= extra_offset
;
1354 offset_known
= false;
1355 op
= gimple_assign_rhs1 (pstmt
);
1364 *offset_ret
= offset
;
1365 return offset_known
;
1368 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1369 of an assignment statement STMT, try to determine whether we are actually
1370 handling any of the following cases and construct an appropriate jump
1371 function into JFUNC if so:
1373 1) The passed value is loaded from a formal parameter which is not a gimple
1374 register (most probably because it is addressable, the value has to be
1375 scalar) and we can guarantee the value has not changed. This case can
1376 therefore be described by a simple pass-through jump function. For example:
1385 2) The passed value can be described by a simple arithmetic pass-through
1392 D.2064_4 = a.1(D) + 4;
1395 This case can also occur in combination of the previous one, e.g.:
1403 D.2064_4 = a.0_3 + 4;
1406 3) The passed value is an address of an object within another one (which
1407 also passed by reference). Such situations are described by an ancestor
1408 jump function and describe situations such as:
1410 B::foo() (struct B * const this)
1414 D.1845_2 = &this_1(D)->D.1748;
1417 INFO is the structure describing individual parameters access different
1418 stages of IPA optimizations. PARMS_AINFO contains the information that is
1419 only needed for intraprocedural analysis. */
1422 compute_complex_assign_jump_func (struct ipa_func_body_info
*fbi
,
1423 class ipa_node_params
*info
,
1424 struct ipa_jump_func
*jfunc
,
1425 gcall
*call
, gimple
*stmt
, tree name
,
1428 HOST_WIDE_INT offset
, size
;
1429 tree op1
, tc_ssa
, base
, ssa
;
1433 op1
= gimple_assign_rhs1 (stmt
);
1435 if (TREE_CODE (op1
) == SSA_NAME
)
1437 if (SSA_NAME_IS_DEFAULT_DEF (op1
))
1438 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (op1
));
1440 index
= load_from_unmodified_param (fbi
, info
->descriptors
,
1441 SSA_NAME_DEF_STMT (op1
));
1446 index
= load_from_unmodified_param (fbi
, info
->descriptors
, stmt
);
1447 tc_ssa
= gimple_assign_lhs (stmt
);
1452 switch (gimple_assign_rhs_class (stmt
))
1454 case GIMPLE_BINARY_RHS
:
1456 tree op2
= gimple_assign_rhs2 (stmt
);
1457 if (!is_gimple_ip_invariant (op2
)
1458 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt
))
1460 && !useless_type_conversion_p (TREE_TYPE (name
),
1464 ipa_set_jf_arith_pass_through (jfunc
, index
, op2
,
1465 gimple_assign_rhs_code (stmt
));
1468 case GIMPLE_SINGLE_RHS
:
1470 bool agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
,
1472 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1475 case GIMPLE_UNARY_RHS
:
1476 if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
)))
1477 ipa_set_jf_unary_pass_through (jfunc
, index
,
1478 gimple_assign_rhs_code (stmt
));
1484 if (TREE_CODE (op1
) != ADDR_EXPR
)
1486 op1
= TREE_OPERAND (op1
, 0);
1487 base
= get_ref_base_and_extent_hwi (op1
, &offset
, &size
, &reverse
);
1488 offset_int mem_offset
;
1490 || TREE_CODE (base
) != MEM_REF
1491 || !mem_ref_offset (base
).is_constant (&mem_offset
))
1493 offset
+= mem_offset
.to_short_addr () * BITS_PER_UNIT
;
1494 ssa
= TREE_OPERAND (base
, 0);
1495 if (TREE_CODE (ssa
) != SSA_NAME
1496 || !SSA_NAME_IS_DEFAULT_DEF (ssa
)
1500 /* Dynamic types are changed in constructors and destructors. */
1501 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (ssa
));
1502 if (index
>= 0 && param_type
&& POINTER_TYPE_P (param_type
))
1503 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1504 parm_ref_data_pass_through_p (fbi
, index
, call
, ssa
),
1508 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1511 iftmp.1_3 = &obj_2(D)->D.1762;
1513 The base of the MEM_REF must be a default definition SSA NAME of a
1514 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1515 whole MEM_REF expression is returned and the offset calculated from any
1516 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1517 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1520 get_ancestor_addr_info (gimple
*assign
, tree
*obj_p
, HOST_WIDE_INT
*offset
)
1523 tree expr
, parm
, obj
;
1526 if (!gimple_assign_single_p (assign
))
1528 expr
= gimple_assign_rhs1 (assign
);
1530 if (TREE_CODE (expr
) != ADDR_EXPR
)
1532 expr
= TREE_OPERAND (expr
, 0);
1534 expr
= get_ref_base_and_extent_hwi (expr
, offset
, &size
, &reverse
);
1536 offset_int mem_offset
;
1538 || TREE_CODE (expr
) != MEM_REF
1539 || !mem_ref_offset (expr
).is_constant (&mem_offset
))
1541 parm
= TREE_OPERAND (expr
, 0);
1542 if (TREE_CODE (parm
) != SSA_NAME
1543 || !SSA_NAME_IS_DEFAULT_DEF (parm
)
1544 || TREE_CODE (SSA_NAME_VAR (parm
)) != PARM_DECL
)
1547 *offset
+= mem_offset
.to_short_addr () * BITS_PER_UNIT
;
1553 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1554 statement PHI, try to find out whether NAME is in fact a
1555 multiple-inheritance typecast from a descendant into an ancestor of a formal
1556 parameter and thus can be described by an ancestor jump function and if so,
1557 write the appropriate function into JFUNC.
1559 Essentially we want to match the following pattern:
1567 iftmp.1_3 = &obj_2(D)->D.1762;
1570 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1571 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1575 compute_complex_ancestor_jump_func (struct ipa_func_body_info
*fbi
,
1576 class ipa_node_params
*info
,
1577 struct ipa_jump_func
*jfunc
,
1578 gcall
*call
, gphi
*phi
)
1580 HOST_WIDE_INT offset
;
1582 basic_block phi_bb
, assign_bb
, cond_bb
;
1583 tree tmp
, parm
, expr
, obj
;
1586 if (gimple_phi_num_args (phi
) != 2)
1589 if (integer_zerop (PHI_ARG_DEF (phi
, 1)))
1590 tmp
= PHI_ARG_DEF (phi
, 0);
1591 else if (integer_zerop (PHI_ARG_DEF (phi
, 0)))
1592 tmp
= PHI_ARG_DEF (phi
, 1);
1595 if (TREE_CODE (tmp
) != SSA_NAME
1596 || SSA_NAME_IS_DEFAULT_DEF (tmp
)
1597 || !POINTER_TYPE_P (TREE_TYPE (tmp
))
1598 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp
))) != RECORD_TYPE
)
1601 assign
= SSA_NAME_DEF_STMT (tmp
);
1602 assign_bb
= gimple_bb (assign
);
1603 if (!single_pred_p (assign_bb
))
1605 expr
= get_ancestor_addr_info (assign
, &obj
, &offset
);
1608 parm
= TREE_OPERAND (expr
, 0);
1609 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (parm
));
1613 cond_bb
= single_pred (assign_bb
);
1614 gcond
*cond
= safe_dyn_cast
<gcond
*> (*gsi_last_bb (cond_bb
));
1616 || gimple_cond_code (cond
) != NE_EXPR
1617 || gimple_cond_lhs (cond
) != parm
1618 || !integer_zerop (gimple_cond_rhs (cond
)))
1621 phi_bb
= gimple_bb (phi
);
1622 for (i
= 0; i
< 2; i
++)
1624 basic_block pred
= EDGE_PRED (phi_bb
, i
)->src
;
1625 if (pred
!= assign_bb
&& pred
!= cond_bb
)
1629 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1630 parm_ref_data_pass_through_p (fbi
, index
, call
, parm
),
1634 /* Inspect the given TYPE and return true iff it has the same structure (the
1635 same number of fields of the same types) as a C++ member pointer. If
1636 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1637 corresponding fields there. */
1640 type_like_member_ptr_p (tree type
, tree
*method_ptr
, tree
*delta
)
1644 if (TREE_CODE (type
) != RECORD_TYPE
)
1647 fld
= TYPE_FIELDS (type
);
1648 if (!fld
|| !POINTER_TYPE_P (TREE_TYPE (fld
))
1649 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld
))) != METHOD_TYPE
1650 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1656 fld
= DECL_CHAIN (fld
);
1657 if (!fld
|| INTEGRAL_TYPE_P (fld
)
1658 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1663 if (DECL_CHAIN (fld
))
1669 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1670 return the rhs of its defining statement, and this statement is stored in
1671 *RHS_STMT. Otherwise return RHS as it is. */
1674 get_ssa_def_if_simple_copy (tree rhs
, gimple
**rhs_stmt
)
1676 while (TREE_CODE (rhs
) == SSA_NAME
&& !SSA_NAME_IS_DEFAULT_DEF (rhs
))
1678 gimple
*def_stmt
= SSA_NAME_DEF_STMT (rhs
);
1680 if (gimple_assign_single_p (def_stmt
))
1681 rhs
= gimple_assign_rhs1 (def_stmt
);
1684 *rhs_stmt
= def_stmt
;
1689 /* Simple linked list, describing contents of an aggregate before call. */
1691 struct ipa_known_agg_contents_list
1693 /* Offset and size of the described part of the aggregate. */
1694 HOST_WIDE_INT offset
, size
;
1696 /* Type of the described part of the aggregate. */
1699 /* Known constant value or jump function data describing contents. */
1700 struct ipa_load_agg_data value
;
1702 /* Pointer to the next structure in the list. */
1703 struct ipa_known_agg_contents_list
*next
;
1706 /* Add an aggregate content item into a linked list of
1707 ipa_known_agg_contents_list structure, in which all elements
1708 are sorted ascendingly by offset. */
1711 add_to_agg_contents_list (struct ipa_known_agg_contents_list
**plist
,
1712 struct ipa_known_agg_contents_list
*item
)
1714 struct ipa_known_agg_contents_list
*list
= *plist
;
1716 for (; list
; list
= list
->next
)
1718 if (list
->offset
>= item
->offset
)
1721 plist
= &list
->next
;
1728 /* Check whether a given aggregate content is clobbered by certain element in
1729 a linked list of ipa_known_agg_contents_list. */
1732 clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list
*list
,
1733 struct ipa_known_agg_contents_list
*item
)
1735 for (; list
; list
= list
->next
)
1737 if (list
->offset
>= item
->offset
)
1738 return list
->offset
< item
->offset
+ item
->size
;
1740 if (list
->offset
+ list
->size
> item
->offset
)
1747 /* Build aggregate jump function from LIST, assuming there are exactly
1748 VALUE_COUNT entries there and that offset of the passed argument
1749 is ARG_OFFSET and store it into JFUNC. */
1752 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list
*list
,
1753 int value_count
, HOST_WIDE_INT arg_offset
,
1754 struct ipa_jump_func
*jfunc
)
1756 vec_safe_reserve (jfunc
->agg
.items
, value_count
, true);
1757 for (; list
; list
= list
->next
)
1759 struct ipa_agg_jf_item item
;
1760 tree operand
= list
->value
.pass_through
.operand
;
1762 if (list
->value
.pass_through
.formal_id
>= 0)
1764 /* Content value is derived from some formal paramerter. */
1765 if (list
->value
.offset
>= 0)
1766 item
.jftype
= IPA_JF_LOAD_AGG
;
1768 item
.jftype
= IPA_JF_PASS_THROUGH
;
1770 item
.value
.load_agg
= list
->value
;
1772 item
.value
.pass_through
.operand
1773 = unshare_expr_without_location (operand
);
1777 /* Content value is known constant. */
1778 item
.jftype
= IPA_JF_CONST
;
1779 item
.value
.constant
= unshare_expr_without_location (operand
);
1784 item
.type
= list
->type
;
1785 gcc_assert (tree_to_shwi (TYPE_SIZE (list
->type
)) == list
->size
);
1787 item
.offset
= list
->offset
- arg_offset
;
1788 gcc_assert ((item
.offset
% BITS_PER_UNIT
) == 0);
1790 jfunc
->agg
.items
->quick_push (item
);
1794 /* Given an assignment statement STMT, try to collect information into
1795 AGG_VALUE that will be used to construct jump function for RHS of the
1796 assignment, from which content value of an aggregate part comes.
1798 Besides constant and simple pass-through jump functions, also try to
1799 identify whether it matches the following pattern that can be described by
1800 a load-value-from-aggregate jump function, which is a derivative of simple
1801 pass-through jump function.
1807 *(q_5 + 4) = *(p_3(D) + 28) op 1;
1811 Here IPA_LOAD_AGG_DATA data structure is informative enough to describe
1812 constant, simple pass-through and load-vale-from-aggregate. If value
1813 is constant, it will be kept in field OPERAND, and field FORMAL_ID is
1814 set to -1. For simple pass-through and load-value-from-aggregate, field
1815 FORMAL_ID specifies the related formal parameter index, and field
1816 OFFSET can be used to distinguish them, -1 means simple pass-through,
1817 otherwise means load-value-from-aggregate. */
1820 analyze_agg_content_value (struct ipa_func_body_info
*fbi
,
1821 struct ipa_load_agg_data
*agg_value
,
1824 tree lhs
= gimple_assign_lhs (stmt
);
1825 tree rhs1
= gimple_assign_rhs1 (stmt
);
1826 enum tree_code code
;
1829 /* Initialize jump function data for the aggregate part. */
1830 memset (agg_value
, 0, sizeof (*agg_value
));
1831 agg_value
->pass_through
.operation
= NOP_EXPR
;
1832 agg_value
->pass_through
.formal_id
= -1;
1833 agg_value
->offset
= -1;
1835 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs
)) /* TODO: Support aggregate type. */
1836 || TREE_THIS_VOLATILE (lhs
)
1837 || TREE_CODE (lhs
) == BIT_FIELD_REF
1838 || contains_bitfld_component_ref_p (lhs
))
1841 /* Skip SSA copies. */
1842 while (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
1844 if (TREE_CODE (rhs1
) != SSA_NAME
|| SSA_NAME_IS_DEFAULT_DEF (rhs1
))
1847 stmt
= SSA_NAME_DEF_STMT (rhs1
);
1848 if (!is_gimple_assign (stmt
))
1851 rhs1
= gimple_assign_rhs1 (stmt
);
1854 if (gphi
*phi
= dyn_cast
<gphi
*> (stmt
))
1856 /* Also special case like the following (a is a formal parameter):
1858 _12 = *a_11(D).dim[0].stride;
1860 # iftmp.22_9 = PHI <_12(2), 1(3)>
1862 parm.6.dim[0].stride = iftmp.22_9;
1864 __x_MOD_foo (&parm.6, b_31(D));
1866 The aggregate function describing parm.6.dim[0].stride is encoded as a
1867 PASS-THROUGH jump function with ASSERT_EXPR operation whith operand 1
1868 (the constant from the PHI node). */
1870 if (gimple_phi_num_args (phi
) != 2)
1872 tree arg0
= gimple_phi_arg_def (phi
, 0);
1873 tree arg1
= gimple_phi_arg_def (phi
, 1);
1876 if (is_gimple_ip_invariant (arg1
))
1881 else if (is_gimple_ip_invariant (arg0
))
1889 rhs1
= get_ssa_def_if_simple_copy (rhs1
, &stmt
);
1890 if (!is_gimple_assign (stmt
))
1894 agg_value
->pass_through
.operand
= operand
;
1896 else if (is_gimple_assign (stmt
))
1898 code
= gimple_assign_rhs_code (stmt
);
1899 switch (gimple_assign_rhs_class (stmt
))
1901 case GIMPLE_SINGLE_RHS
:
1902 if (is_gimple_ip_invariant (rhs1
))
1904 agg_value
->pass_through
.operand
= rhs1
;
1910 case GIMPLE_UNARY_RHS
:
1911 /* NOTE: A GIMPLE_UNARY_RHS operation might not be tcc_unary
1912 (truth_not_expr is example), GIMPLE_BINARY_RHS does not imply
1913 tcc_binary, this subtleness is somewhat misleading.
1915 Since tcc_unary is widely used in IPA-CP code to check an operation
1916 with one operand, here we only allow tc_unary operation to avoid
1917 possible problem. Then we can use (opclass == tc_unary) or not to
1918 distinguish unary and binary. */
1919 if (TREE_CODE_CLASS (code
) != tcc_unary
|| CONVERT_EXPR_CODE_P (code
))
1922 rhs1
= get_ssa_def_if_simple_copy (rhs1
, &stmt
);
1925 case GIMPLE_BINARY_RHS
:
1927 gimple
*rhs1_stmt
= stmt
;
1928 gimple
*rhs2_stmt
= stmt
;
1929 tree rhs2
= gimple_assign_rhs2 (stmt
);
1931 rhs1
= get_ssa_def_if_simple_copy (rhs1
, &rhs1_stmt
);
1932 rhs2
= get_ssa_def_if_simple_copy (rhs2
, &rhs2_stmt
);
1934 if (is_gimple_ip_invariant (rhs2
))
1936 agg_value
->pass_through
.operand
= rhs2
;
1939 else if (is_gimple_ip_invariant (rhs1
))
1941 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
1942 code
= swap_tree_comparison (code
);
1943 else if (!commutative_tree_code (code
))
1946 agg_value
->pass_through
.operand
= rhs1
;
1953 if (TREE_CODE_CLASS (code
) != tcc_comparison
1954 && !useless_type_conversion_p (TREE_TYPE (lhs
),
1967 if (TREE_CODE (rhs1
) != SSA_NAME
)
1968 index
= load_from_unmodified_param_or_agg (fbi
, fbi
->info
, stmt
,
1970 &agg_value
->by_ref
);
1971 else if (SSA_NAME_IS_DEFAULT_DEF (rhs1
))
1972 index
= ipa_get_param_decl_index (fbi
->info
, SSA_NAME_VAR (rhs1
));
1976 if (agg_value
->offset
>= 0)
1977 agg_value
->type
= TREE_TYPE (rhs1
);
1978 agg_value
->pass_through
.formal_id
= index
;
1979 agg_value
->pass_through
.operation
= code
;
1982 agg_value
->pass_through
.operand
= NULL_TREE
;
1985 /* If STMT is a memory store to the object whose address is BASE, extract
1986 information (offset, size, and value) into CONTENT, and return true,
1987 otherwise we conservatively assume the whole object is modified with
1988 unknown content, and return false. CHECK_REF means that access to object
1989 is expected to be in form of MEM_REF expression. */
1992 extract_mem_content (struct ipa_func_body_info
*fbi
,
1993 gimple
*stmt
, tree base
, bool check_ref
,
1994 struct ipa_known_agg_contents_list
*content
)
1996 HOST_WIDE_INT lhs_offset
, lhs_size
;
1999 if (!is_gimple_assign (stmt
))
2002 tree lhs
= gimple_assign_lhs (stmt
);
2003 tree lhs_base
= get_ref_base_and_extent_hwi (lhs
, &lhs_offset
, &lhs_size
,
2010 if (TREE_CODE (lhs_base
) != MEM_REF
2011 || TREE_OPERAND (lhs_base
, 0) != base
2012 || !integer_zerop (TREE_OPERAND (lhs_base
, 1)))
2015 else if (lhs_base
!= base
)
2018 content
->offset
= lhs_offset
;
2019 content
->size
= lhs_size
;
2020 content
->type
= TREE_TYPE (lhs
);
2021 content
->next
= NULL
;
2023 analyze_agg_content_value (fbi
, &content
->value
, stmt
);
2027 /* Traverse statements from CALL backwards, scanning whether an aggregate given
2028 in ARG is filled in constants or values that are derived from caller's
2029 formal parameter in the way described by some kinds of jump functions. FBI
2030 is the context of the caller function for interprocedural analysis. ARG can
2031 either be an aggregate expression or a pointer to an aggregate. ARG_TYPE is
2032 the type of the aggregate, JFUNC is the jump function for the aggregate. */
2035 determine_known_aggregate_parts (struct ipa_func_body_info
*fbi
,
2036 gcall
*call
, tree arg
,
2038 struct ipa_jump_func
*jfunc
)
2040 struct ipa_known_agg_contents_list
*list
= NULL
, *all_list
= NULL
;
2041 bitmap visited
= NULL
;
2042 int item_count
= 0, value_count
= 0;
2043 HOST_WIDE_INT arg_offset
, arg_size
;
2045 bool check_ref
, by_ref
;
2047 int max_agg_items
= opt_for_fn (fbi
->node
->decl
, param_ipa_max_agg_items
);
2049 if (max_agg_items
== 0)
2052 /* The function operates in three stages. First, we prepare check_ref, r,
2053 arg_base and arg_offset based on what is actually passed as an actual
2056 if (POINTER_TYPE_P (arg_type
))
2059 if (TREE_CODE (arg
) == SSA_NAME
)
2062 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type
)))
2063 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
2068 type_size
= TYPE_SIZE (TREE_TYPE (arg_type
));
2069 arg_size
= tree_to_uhwi (type_size
);
2070 ao_ref_init_from_ptr_and_size (&r
, arg_base
, NULL_TREE
);
2072 else if (TREE_CODE (arg
) == ADDR_EXPR
)
2076 arg
= TREE_OPERAND (arg
, 0);
2077 arg_base
= get_ref_base_and_extent_hwi (arg
, &arg_offset
,
2078 &arg_size
, &reverse
);
2081 if (DECL_P (arg_base
))
2084 ao_ref_init (&r
, arg_base
);
2096 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg
)));
2100 arg_base
= get_ref_base_and_extent_hwi (arg
, &arg_offset
,
2101 &arg_size
, &reverse
);
2105 ao_ref_init (&r
, arg
);
2108 /* Second stage traverses virtual SSA web backwards starting from the call
2109 statement, only looks at individual dominating virtual operand (its
2110 definition dominates the call), as long as it is confident that content
2111 of the aggregate is affected by definition of the virtual operand, it
2112 builds a sorted linked list of ipa_agg_jf_list describing that. */
2114 for (tree dom_vuse
= gimple_vuse (call
);
2115 dom_vuse
&& fbi
->aa_walk_budget
> 0;)
2117 gimple
*stmt
= SSA_NAME_DEF_STMT (dom_vuse
);
2119 if (gimple_code (stmt
) == GIMPLE_PHI
)
2121 dom_vuse
= get_continuation_for_phi (stmt
, &r
, true,
2122 fbi
->aa_walk_budget
,
2123 &visited
, false, NULL
, NULL
);
2127 fbi
->aa_walk_budget
--;
2128 if (stmt_may_clobber_ref_p_1 (stmt
, &r
))
2130 struct ipa_known_agg_contents_list
*content
2131 = XALLOCA (struct ipa_known_agg_contents_list
);
2133 if (!extract_mem_content (fbi
, stmt
, arg_base
, check_ref
, content
))
2136 /* Now we get a dominating virtual operand, and need to check
2137 whether its value is clobbered any other dominating one. */
2138 if ((content
->value
.pass_through
.formal_id
>= 0
2139 || content
->value
.pass_through
.operand
)
2140 && !clobber_by_agg_contents_list_p (all_list
, content
)
2141 /* Since IPA-CP stores results with unsigned int offsets, we can
2142 discard those which would not fit now before we stream them to
2144 && (content
->offset
+ content
->size
- arg_offset
2145 <= (HOST_WIDE_INT
) UINT_MAX
* BITS_PER_UNIT
))
2147 struct ipa_known_agg_contents_list
*copy
2148 = XALLOCA (struct ipa_known_agg_contents_list
);
2150 /* Add to the list consisting of only dominating virtual
2151 operands, whose definitions can finally reach the call. */
2152 add_to_agg_contents_list (&list
, (*copy
= *content
, copy
));
2154 if (++value_count
== max_agg_items
)
2158 /* Add to the list consisting of all dominating virtual operands. */
2159 add_to_agg_contents_list (&all_list
, content
);
2161 if (++item_count
== 2 * max_agg_items
)
2164 dom_vuse
= gimple_vuse (stmt
);
2168 BITMAP_FREE (visited
);
2170 /* Third stage just goes over the list and creates an appropriate vector of
2171 ipa_agg_jf_item structures out of it, of course only if there are
2172 any meaningful items to begin with. */
2176 jfunc
->agg
.by_ref
= by_ref
;
2177 build_agg_jump_func_from_list (list
, value_count
, arg_offset
, jfunc
);
2182 /* Return the Ith param type of callee associated with call graph
2186 ipa_get_callee_param_type (struct cgraph_edge
*e
, int i
)
2189 tree type
= (e
->callee
2190 ? TREE_TYPE (e
->callee
->decl
)
2191 : gimple_call_fntype (e
->call_stmt
));
2192 tree t
= TYPE_ARG_TYPES (type
);
2194 for (n
= 0; n
< i
; n
++)
2200 if (t
&& t
!= void_list_node
)
2201 return TREE_VALUE (t
);
2204 t
= DECL_ARGUMENTS (e
->callee
->decl
);
2205 for (n
= 0; n
< i
; n
++)
2212 return TREE_TYPE (t
);
2216 /* Return a pointer to an ipa_vr just like TMP, but either find it in
2217 ipa_vr_hash_table or allocate it in GC memory. */
2220 ipa_get_value_range (const vrange
&tmp
)
2222 inchash::hash hstate
;
2223 inchash::add_vrange (tmp
, hstate
);
2224 hashval_t hash
= hstate
.end ();
2225 ipa_vr
**slot
= ipa_vr_hash_table
->find_slot_with_hash (&tmp
, hash
, INSERT
);
2229 ipa_vr
*vr
= new (ggc_alloc
<ipa_vr
> ()) ipa_vr (tmp
);
2234 /* Assign to JF a pointer to a range just like TMP but either fetch a
2235 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
2238 ipa_set_jfunc_vr (ipa_jump_func
*jf
, const vrange
&tmp
)
2240 jf
->m_vr
= ipa_get_value_range (tmp
);
2244 ipa_set_jfunc_vr (ipa_jump_func
*jf
, const ipa_vr
&vr
)
2247 vr
.get_vrange (tmp
);
2248 ipa_set_jfunc_vr (jf
, tmp
);
2251 /* Compute jump function for all arguments of callsite CS and insert the
2252 information in the jump_functions array in the ipa_edge_args corresponding
2253 to this callsite. */
2256 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info
*fbi
,
2257 struct cgraph_edge
*cs
)
2259 ipa_node_params
*info
= ipa_node_params_sum
->get (cs
->caller
);
2260 ipa_edge_args
*args
= ipa_edge_args_sum
->get_create (cs
);
2261 gcall
*call
= cs
->call_stmt
;
2262 int n
, arg_num
= gimple_call_num_args (call
);
2263 bool useful_context
= false;
2265 if (arg_num
== 0 || args
->jump_functions
)
2267 vec_safe_grow_cleared (args
->jump_functions
, arg_num
, true);
2268 if (flag_devirtualize
)
2269 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, arg_num
, true);
2271 if (gimple_call_internal_p (call
))
2273 if (ipa_func_spec_opts_forbid_analysis_p (cs
->caller
))
2276 for (n
= 0; n
< arg_num
; n
++)
2278 struct ipa_jump_func
*jfunc
= ipa_get_ith_jump_func (args
, n
);
2279 tree arg
= gimple_call_arg (call
, n
);
2280 tree param_type
= ipa_get_callee_param_type (cs
, n
);
2281 if (flag_devirtualize
&& POINTER_TYPE_P (TREE_TYPE (arg
)))
2284 class ipa_polymorphic_call_context
context (cs
->caller
->decl
,
2287 context
.get_dynamic_type (instance
, arg
, NULL
, cs
->call_stmt
,
2288 &fbi
->aa_walk_budget
);
2289 *ipa_get_ith_polymorhic_call_context (args
, n
) = context
;
2290 if (!context
.useless_p ())
2291 useful_context
= true;
2294 Value_Range
vr (TREE_TYPE (arg
));
2295 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
2297 bool addr_nonzero
= false;
2298 bool strict_overflow
= false;
2300 if (TREE_CODE (arg
) == SSA_NAME
2302 && get_range_query (cfun
)->range_of_expr (vr
, arg
, cs
->call_stmt
)
2304 addr_nonzero
= true;
2305 else if (tree_single_nonzero_warnv_p (arg
, &strict_overflow
))
2306 addr_nonzero
= true;
2309 vr
.set_nonzero (TREE_TYPE (arg
));
2311 unsigned HOST_WIDE_INT bitpos
;
2312 unsigned align
, prec
= TYPE_PRECISION (TREE_TYPE (arg
));
2314 get_pointer_alignment_1 (arg
, &align
, &bitpos
);
2316 if (align
> BITS_PER_UNIT
2317 && opt_for_fn (cs
->caller
->decl
, flag_ipa_bit_cp
))
2320 = wi::bit_and_not (wi::mask (prec
, false, prec
),
2321 wide_int::from (align
/ BITS_PER_UNIT
- 1,
2323 wide_int value
= wide_int::from (bitpos
/ BITS_PER_UNIT
, prec
,
2325 irange_bitmask
bm (value
, mask
);
2327 vr
.set_varying (TREE_TYPE (arg
));
2328 irange
&r
= as_a
<irange
> (vr
);
2329 r
.update_bitmask (bm
);
2330 ipa_set_jfunc_vr (jfunc
, vr
);
2332 else if (addr_nonzero
)
2333 ipa_set_jfunc_vr (jfunc
, vr
);
2335 gcc_assert (!jfunc
->m_vr
);
2340 && Value_Range::supports_type_p (TREE_TYPE (arg
))
2341 && Value_Range::supports_type_p (param_type
)
2342 && irange::supports_p (TREE_TYPE (arg
))
2343 && irange::supports_p (param_type
)
2344 && get_range_query (cfun
)->range_of_expr (vr
, arg
, cs
->call_stmt
)
2345 && !vr
.undefined_p ())
2347 Value_Range
resvr (vr
);
2348 range_cast (resvr
, param_type
);
2349 if (!resvr
.undefined_p () && !resvr
.varying_p ())
2350 ipa_set_jfunc_vr (jfunc
, resvr
);
2352 gcc_assert (!jfunc
->m_vr
);
2355 gcc_assert (!jfunc
->m_vr
);
2358 if (is_gimple_ip_invariant (arg
)
2360 && is_global_var (arg
)
2361 && TREE_READONLY (arg
)))
2362 ipa_set_jf_constant (jfunc
, arg
, cs
);
2363 else if (!is_gimple_reg_type (TREE_TYPE (arg
))
2364 && TREE_CODE (arg
) == PARM_DECL
)
2366 int index
= ipa_get_param_decl_index (info
, arg
);
2368 gcc_assert (index
>=0);
2369 /* Aggregate passed by value, check for pass-through, otherwise we
2370 will attempt to fill in aggregate contents later in this
2372 if (parm_preserved_before_stmt_p (fbi
, index
, call
, arg
))
2374 ipa_set_jf_simple_pass_through (jfunc
, index
, false);
2378 else if (TREE_CODE (arg
) == SSA_NAME
)
2380 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
2382 int index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (arg
));
2386 agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
, arg
);
2387 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
2392 gimple
*stmt
= SSA_NAME_DEF_STMT (arg
);
2393 if (is_gimple_assign (stmt
))
2394 compute_complex_assign_jump_func (fbi
, info
, jfunc
,
2395 call
, stmt
, arg
, param_type
);
2396 else if (gimple_code (stmt
) == GIMPLE_PHI
)
2397 compute_complex_ancestor_jump_func (fbi
, info
, jfunc
,
2399 as_a
<gphi
*> (stmt
));
2403 /* If ARG is pointer, we cannot use its type to determine the type of aggregate
2404 passed (because type conversions are ignored in gimple). Usually we can
2405 safely get type from function declaration, but in case of K&R prototypes or
2406 variadic functions we can try our luck with type of the pointer passed.
2407 TODO: Since we look for actual initialization of the memory object, we may better
2408 work out the type based on the memory stores we find. */
2410 param_type
= TREE_TYPE (arg
);
2412 if ((jfunc
->type
!= IPA_JF_PASS_THROUGH
2413 || !ipa_get_jf_pass_through_agg_preserved (jfunc
))
2414 && (jfunc
->type
!= IPA_JF_ANCESTOR
2415 || !ipa_get_jf_ancestor_agg_preserved (jfunc
))
2416 && (AGGREGATE_TYPE_P (TREE_TYPE (arg
))
2417 || POINTER_TYPE_P (param_type
)))
2418 determine_known_aggregate_parts (fbi
, call
, arg
, param_type
, jfunc
);
2420 if (!useful_context
)
2421 vec_free (args
->polymorphic_call_contexts
);
2424 /* Compute jump functions for all edges - both direct and indirect - outgoing
2428 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info
*fbi
, basic_block bb
)
2430 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
2432 struct cgraph_edge
*cs
;
2434 FOR_EACH_VEC_ELT_REVERSE (bi
->cg_edges
, i
, cs
)
2436 struct cgraph_node
*callee
= cs
->callee
;
2440 callee
= callee
->ultimate_alias_target ();
2441 /* We do not need to bother analyzing calls to unknown functions
2442 unless they may become known during lto/whopr. */
2443 if (!callee
->definition
&& !flag_lto
2444 && !gimple_call_fnspec (cs
->call_stmt
).known_p ())
2447 ipa_compute_jump_functions_for_edge (fbi
, cs
);
2451 /* If STMT looks like a statement loading a value from a member pointer formal
2452 parameter, return that parameter and store the offset of the field to
2453 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2454 might be clobbered). If USE_DELTA, then we look for a use of the delta
2455 field rather than the pfn. */
2458 ipa_get_stmt_member_ptr_load_param (gimple
*stmt
, bool use_delta
,
2459 HOST_WIDE_INT
*offset_p
)
2461 tree rhs
, rec
, ref_field
, ref_offset
, fld
, ptr_field
, delta_field
;
2463 if (!gimple_assign_single_p (stmt
))
2466 rhs
= gimple_assign_rhs1 (stmt
);
2467 if (TREE_CODE (rhs
) == COMPONENT_REF
)
2469 ref_field
= TREE_OPERAND (rhs
, 1);
2470 rhs
= TREE_OPERAND (rhs
, 0);
2473 ref_field
= NULL_TREE
;
2474 if (TREE_CODE (rhs
) != MEM_REF
)
2476 rec
= TREE_OPERAND (rhs
, 0);
2477 if (TREE_CODE (rec
) != ADDR_EXPR
)
2479 rec
= TREE_OPERAND (rec
, 0);
2480 if (TREE_CODE (rec
) != PARM_DECL
2481 || !type_like_member_ptr_p (TREE_TYPE (rec
), &ptr_field
, &delta_field
))
2483 ref_offset
= TREE_OPERAND (rhs
, 1);
2490 *offset_p
= int_bit_position (fld
);
2494 if (integer_nonzerop (ref_offset
))
2496 return ref_field
== fld
? rec
: NULL_TREE
;
2499 return tree_int_cst_equal (byte_position (fld
), ref_offset
) ? rec
2503 /* Returns true iff T is an SSA_NAME defined by a statement. */
2506 ipa_is_ssa_with_stmt_def (tree t
)
2508 if (TREE_CODE (t
) == SSA_NAME
2509 && !SSA_NAME_IS_DEFAULT_DEF (t
))
2515 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2516 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2517 indirect call graph edge.
2518 If POLYMORPHIC is true record is as a destination of polymorphic call. */
2520 static struct cgraph_edge
*
2521 ipa_note_param_call (struct cgraph_node
*node
, int param_index
,
2522 gcall
*stmt
, bool polymorphic
)
2524 struct cgraph_edge
*cs
;
2526 cs
= node
->get_edge (stmt
);
2527 cs
->indirect_info
->param_index
= param_index
;
2528 cs
->indirect_info
->agg_contents
= 0;
2529 cs
->indirect_info
->member_ptr
= 0;
2530 cs
->indirect_info
->guaranteed_unmodified
= 0;
2531 ipa_node_params
*info
= ipa_node_params_sum
->get (node
);
2532 ipa_set_param_used_by_indirect_call (info
, param_index
, true);
2533 if (cs
->indirect_info
->polymorphic
|| polymorphic
)
2534 ipa_set_param_used_by_polymorphic_call (info
, param_index
, true);
2538 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2539 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2540 intermediate information about each formal parameter. Currently it checks
2541 whether the call calls a pointer that is a formal parameter and if so, the
2542 parameter is marked with the called flag and an indirect call graph edge
2543 describing the call is created. This is very simple for ordinary pointers
2544 represented in SSA but not-so-nice when it comes to member pointers. The
2545 ugly part of this function does nothing more than trying to match the
2546 pattern of such a call. An example of such a pattern is the gimple dump
2547 below, the call is on the last line:
2550 f$__delta_5 = f.__delta;
2551 f$__pfn_24 = f.__pfn;
2555 f$__delta_5 = MEM[(struct *)&f];
2556 f$__pfn_24 = MEM[(struct *)&f + 4B];
2558 and a few lines below:
2561 D.2496_3 = (int) f$__pfn_24;
2562 D.2497_4 = D.2496_3 & 1;
2569 D.2500_7 = (unsigned int) f$__delta_5;
2570 D.2501_8 = &S + D.2500_7;
2571 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2572 D.2503_10 = *D.2502_9;
2573 D.2504_12 = f$__pfn_24 + -1;
2574 D.2505_13 = (unsigned int) D.2504_12;
2575 D.2506_14 = D.2503_10 + D.2505_13;
2576 D.2507_15 = *D.2506_14;
2577 iftmp.11_16 = (String:: *) D.2507_15;
2580 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2581 D.2500_19 = (unsigned int) f$__delta_5;
2582 D.2508_20 = &S + D.2500_19;
2583 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2585 Such patterns are results of simple calls to a member pointer:
2587 int doprinting (int (MyString::* f)(int) const)
2589 MyString S ("somestring");
2594 Moreover, the function also looks for called pointers loaded from aggregates
2595 passed by value or reference. */
2598 ipa_analyze_indirect_call_uses (struct ipa_func_body_info
*fbi
, gcall
*call
,
2601 class ipa_node_params
*info
= fbi
->info
;
2602 HOST_WIDE_INT offset
;
2605 if (SSA_NAME_IS_DEFAULT_DEF (target
))
2607 tree var
= SSA_NAME_VAR (target
);
2608 int index
= ipa_get_param_decl_index (info
, var
);
2610 ipa_note_param_call (fbi
->node
, index
, call
, false);
2615 gimple
*def
= SSA_NAME_DEF_STMT (target
);
2616 bool guaranteed_unmodified
;
2617 if (gimple_assign_single_p (def
)
2618 && ipa_load_from_parm_agg (fbi
, info
->descriptors
, def
,
2619 gimple_assign_rhs1 (def
), &index
, &offset
,
2620 NULL
, &by_ref
, &guaranteed_unmodified
))
2622 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
,
2624 cs
->indirect_info
->offset
= offset
;
2625 cs
->indirect_info
->agg_contents
= 1;
2626 cs
->indirect_info
->by_ref
= by_ref
;
2627 cs
->indirect_info
->guaranteed_unmodified
= guaranteed_unmodified
;
2631 /* Now we need to try to match the complex pattern of calling a member
2633 if (gimple_code (def
) != GIMPLE_PHI
2634 || gimple_phi_num_args (def
) != 2
2635 || !POINTER_TYPE_P (TREE_TYPE (target
))
2636 || TREE_CODE (TREE_TYPE (TREE_TYPE (target
))) != METHOD_TYPE
)
2639 /* First, we need to check whether one of these is a load from a member
2640 pointer that is a parameter to this function. */
2641 tree n1
= PHI_ARG_DEF (def
, 0);
2642 tree n2
= PHI_ARG_DEF (def
, 1);
2643 if (!ipa_is_ssa_with_stmt_def (n1
) || !ipa_is_ssa_with_stmt_def (n2
))
2645 gimple
*d1
= SSA_NAME_DEF_STMT (n1
);
2646 gimple
*d2
= SSA_NAME_DEF_STMT (n2
);
2649 basic_block bb
, virt_bb
;
2650 basic_block join
= gimple_bb (def
);
2651 if ((rec
= ipa_get_stmt_member_ptr_load_param (d1
, false, &offset
)))
2653 if (ipa_get_stmt_member_ptr_load_param (d2
, false, NULL
))
2656 bb
= EDGE_PRED (join
, 0)->src
;
2657 virt_bb
= gimple_bb (d2
);
2659 else if ((rec
= ipa_get_stmt_member_ptr_load_param (d2
, false, &offset
)))
2661 bb
= EDGE_PRED (join
, 1)->src
;
2662 virt_bb
= gimple_bb (d1
);
2667 /* Second, we need to check that the basic blocks are laid out in the way
2668 corresponding to the pattern. */
2670 if (!single_pred_p (virt_bb
) || !single_succ_p (virt_bb
)
2671 || single_pred (virt_bb
) != bb
2672 || single_succ (virt_bb
) != join
)
2675 /* Third, let's see that the branching is done depending on the least
2676 significant bit of the pfn. */
2678 gcond
*branch
= safe_dyn_cast
<gcond
*> (*gsi_last_bb (bb
));
2682 if ((gimple_cond_code (branch
) != NE_EXPR
2683 && gimple_cond_code (branch
) != EQ_EXPR
)
2684 || !integer_zerop (gimple_cond_rhs (branch
)))
2687 tree cond
= gimple_cond_lhs (branch
);
2688 if (!ipa_is_ssa_with_stmt_def (cond
))
2691 def
= SSA_NAME_DEF_STMT (cond
);
2692 if (!is_gimple_assign (def
)
2693 || gimple_assign_rhs_code (def
) != BIT_AND_EXPR
2694 || !integer_onep (gimple_assign_rhs2 (def
)))
2697 cond
= gimple_assign_rhs1 (def
);
2698 if (!ipa_is_ssa_with_stmt_def (cond
))
2701 def
= SSA_NAME_DEF_STMT (cond
);
2703 if (is_gimple_assign (def
)
2704 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
2706 cond
= gimple_assign_rhs1 (def
);
2707 if (!ipa_is_ssa_with_stmt_def (cond
))
2709 def
= SSA_NAME_DEF_STMT (cond
);
2713 rec2
= ipa_get_stmt_member_ptr_load_param (def
,
2714 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2715 == ptrmemfunc_vbit_in_delta
),
2720 index
= ipa_get_param_decl_index (info
, rec
);
2722 && parm_preserved_before_stmt_p (fbi
, index
, call
, rec
))
2724 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
,
2726 cs
->indirect_info
->offset
= offset
;
2727 cs
->indirect_info
->agg_contents
= 1;
2728 cs
->indirect_info
->member_ptr
= 1;
2729 cs
->indirect_info
->guaranteed_unmodified
= 1;
2735 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2736 object referenced in the expression is a formal parameter of the caller
2737 FBI->node (described by FBI->info), create a call note for the
2741 ipa_analyze_virtual_call_uses (struct ipa_func_body_info
*fbi
,
2742 gcall
*call
, tree target
)
2744 tree obj
= OBJ_TYPE_REF_OBJECT (target
);
2746 HOST_WIDE_INT anc_offset
;
2748 if (!flag_devirtualize
)
2751 if (TREE_CODE (obj
) != SSA_NAME
)
2754 class ipa_node_params
*info
= fbi
->info
;
2755 if (SSA_NAME_IS_DEFAULT_DEF (obj
))
2757 if (TREE_CODE (SSA_NAME_VAR (obj
)) != PARM_DECL
)
2761 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (obj
));
2762 gcc_assert (index
>= 0);
2763 if (detect_type_change_ssa (fbi
, obj
, obj_type_ref_class (target
),
2769 gimple
*stmt
= SSA_NAME_DEF_STMT (obj
);
2772 expr
= get_ancestor_addr_info (stmt
, &obj
, &anc_offset
);
2775 index
= ipa_get_param_decl_index (info
,
2776 SSA_NAME_VAR (TREE_OPERAND (expr
, 0)));
2777 gcc_assert (index
>= 0);
2778 if (detect_type_change (fbi
, obj
, expr
, obj_type_ref_class (target
),
2783 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
,
2785 class cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
2786 ii
->offset
= anc_offset
;
2787 ii
->otr_token
= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target
));
2788 ii
->otr_type
= obj_type_ref_class (target
);
2789 ii
->polymorphic
= 1;
2792 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2793 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2794 containing intermediate information about each formal parameter. */
2797 ipa_analyze_call_uses (struct ipa_func_body_info
*fbi
, gcall
*call
)
2799 tree target
= gimple_call_fn (call
);
2802 || (TREE_CODE (target
) != SSA_NAME
2803 && !virtual_method_call_p (target
)))
2806 struct cgraph_edge
*cs
= fbi
->node
->get_edge (call
);
2807 /* If we previously turned the call into a direct call, there is
2808 no need to analyze. */
2809 if (cs
&& !cs
->indirect_unknown_callee
)
2812 if (cs
->indirect_info
->polymorphic
&& flag_devirtualize
)
2815 tree target
= gimple_call_fn (call
);
2816 ipa_polymorphic_call_context
context (current_function_decl
,
2817 target
, call
, &instance
);
2819 gcc_checking_assert (cs
->indirect_info
->otr_type
2820 == obj_type_ref_class (target
));
2821 gcc_checking_assert (cs
->indirect_info
->otr_token
2822 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target
)));
2824 cs
->indirect_info
->vptr_changed
2825 = !context
.get_dynamic_type (instance
,
2826 OBJ_TYPE_REF_OBJECT (target
),
2827 obj_type_ref_class (target
), call
,
2828 &fbi
->aa_walk_budget
);
2829 cs
->indirect_info
->context
= context
;
2832 if (TREE_CODE (target
) == SSA_NAME
)
2833 ipa_analyze_indirect_call_uses (fbi
, call
, target
);
2834 else if (virtual_method_call_p (target
))
2835 ipa_analyze_virtual_call_uses (fbi
, call
, target
);
2839 /* Analyze the call statement STMT with respect to formal parameters (described
2840 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2841 formal parameters are called. */
2844 ipa_analyze_stmt_uses (struct ipa_func_body_info
*fbi
, gimple
*stmt
)
2846 if (is_gimple_call (stmt
))
2847 ipa_analyze_call_uses (fbi
, as_a
<gcall
*> (stmt
));
2850 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2851 If OP is a parameter declaration, mark it as used in the info structure
2855 visit_ref_for_mod_analysis (gimple
*, tree op
, tree
, void *data
)
2857 class ipa_node_params
*info
= (class ipa_node_params
*) data
;
2859 op
= get_base_address (op
);
2861 && TREE_CODE (op
) == PARM_DECL
)
2863 int index
= ipa_get_param_decl_index (info
, op
);
2864 gcc_assert (index
>= 0);
2865 ipa_set_param_used (info
, index
, true);
2871 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2872 the findings in various structures of the associated ipa_node_params
2873 structure, such as parameter flags, notes etc. FBI holds various data about
2874 the function being analyzed. */
2877 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info
*fbi
, basic_block bb
)
2879 gimple_stmt_iterator gsi
;
2880 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2882 gimple
*stmt
= gsi_stmt (gsi
);
2884 if (is_gimple_debug (stmt
))
2887 ipa_analyze_stmt_uses (fbi
, stmt
);
2888 walk_stmt_load_store_addr_ops (stmt
, fbi
->info
,
2889 visit_ref_for_mod_analysis
,
2890 visit_ref_for_mod_analysis
,
2891 visit_ref_for_mod_analysis
);
2893 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2894 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), fbi
->info
,
2895 visit_ref_for_mod_analysis
,
2896 visit_ref_for_mod_analysis
,
2897 visit_ref_for_mod_analysis
);
2900 /* Return true EXPR is a load from a dereference of SSA_NAME NAME. */
2903 load_from_dereferenced_name (tree expr
, tree name
)
2905 tree base
= get_base_address (expr
);
2906 return (TREE_CODE (base
) == MEM_REF
2907 && TREE_OPERAND (base
, 0) == name
);
2910 /* Calculate controlled uses of parameters of NODE. */
2913 ipa_analyze_controlled_uses (struct cgraph_node
*node
)
2915 ipa_node_params
*info
= ipa_node_params_sum
->get (node
);
2917 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2919 tree parm
= ipa_get_param (info
, i
);
2921 bool load_dereferenced
= false;
2923 /* For SSA regs see if parameter is used. For non-SSA we compute
2924 the flag during modification analysis. */
2925 if (is_gimple_reg (parm
))
2927 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
),
2929 if (ddef
&& !has_zero_uses (ddef
))
2931 imm_use_iterator imm_iter
;
2934 ipa_set_param_used (info
, i
, true);
2935 FOR_EACH_IMM_USE_STMT (stmt
, imm_iter
, ddef
)
2937 if (is_gimple_debug (stmt
))
2940 int all_stmt_uses
= 0;
2941 use_operand_p use_p
;
2942 FOR_EACH_IMM_USE_ON_STMT (use_p
, imm_iter
)
2945 if (is_gimple_call (stmt
))
2947 if (gimple_call_internal_p (stmt
))
2949 call_uses
= IPA_UNDESCRIBED_USE
;
2952 int recognized_stmt_uses
;
2953 if (gimple_call_fn (stmt
) == ddef
)
2954 recognized_stmt_uses
= 1;
2956 recognized_stmt_uses
= 0;
2957 unsigned arg_count
= gimple_call_num_args (stmt
);
2958 for (unsigned i
= 0; i
< arg_count
; i
++)
2960 tree arg
= gimple_call_arg (stmt
, i
);
2962 recognized_stmt_uses
++;
2963 else if (load_from_dereferenced_name (arg
, ddef
))
2965 load_dereferenced
= true;
2966 recognized_stmt_uses
++;
2970 if (recognized_stmt_uses
!= all_stmt_uses
)
2972 call_uses
= IPA_UNDESCRIBED_USE
;
2976 call_uses
+= all_stmt_uses
;
2978 else if (gimple_assign_single_p (stmt
))
2980 tree rhs
= gimple_assign_rhs1 (stmt
);
2981 if (all_stmt_uses
!= 1
2982 || !load_from_dereferenced_name (rhs
, ddef
))
2984 call_uses
= IPA_UNDESCRIBED_USE
;
2987 load_dereferenced
= true;
2991 call_uses
= IPA_UNDESCRIBED_USE
;
3000 call_uses
= IPA_UNDESCRIBED_USE
;
3001 ipa_set_controlled_uses (info
, i
, call_uses
);
3002 ipa_set_param_load_dereferenced (info
, i
, load_dereferenced
);
3006 /* Free stuff in BI. */
3009 free_ipa_bb_info (struct ipa_bb_info
*bi
)
3011 bi
->cg_edges
.release ();
3012 bi
->param_aa_statuses
.release ();
3015 /* Dominator walker driving the analysis. */
3017 class analysis_dom_walker
: public dom_walker
3020 analysis_dom_walker (struct ipa_func_body_info
*fbi
)
3021 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
) {}
3023 edge
before_dom_children (basic_block
) final override
;
3026 struct ipa_func_body_info
*m_fbi
;
3030 analysis_dom_walker::before_dom_children (basic_block bb
)
3032 ipa_analyze_params_uses_in_bb (m_fbi
, bb
);
3033 ipa_compute_jump_functions_for_bb (m_fbi
, bb
);
3037 /* Release body info FBI. */
3040 ipa_release_body_info (struct ipa_func_body_info
*fbi
)
3043 struct ipa_bb_info
*bi
;
3045 FOR_EACH_VEC_ELT (fbi
->bb_infos
, i
, bi
)
3046 free_ipa_bb_info (bi
);
3047 fbi
->bb_infos
.release ();
3050 /* Initialize the array describing properties of formal parameters
3051 of NODE, analyze their uses and compute jump functions associated
3052 with actual arguments of calls from within NODE. */
3055 ipa_analyze_node (struct cgraph_node
*node
)
3057 struct ipa_func_body_info fbi
;
3058 class ipa_node_params
*info
;
3060 ipa_check_create_node_params ();
3061 ipa_check_create_edge_args ();
3062 info
= ipa_node_params_sum
->get_create (node
);
3064 if (info
->analysis_done
)
3066 info
->analysis_done
= 1;
3068 if (ipa_func_spec_opts_forbid_analysis_p (node
)
3069 || (count_formal_params (node
->decl
)
3070 >= (1 << IPA_PROP_ARG_INDEX_LIMIT_BITS
)))
3072 gcc_assert (!ipa_get_param_count (info
));
3076 struct function
*func
= DECL_STRUCT_FUNCTION (node
->decl
);
3078 calculate_dominance_info (CDI_DOMINATORS
);
3079 ipa_initialize_node_params (node
);
3080 ipa_analyze_controlled_uses (node
);
3084 fbi
.bb_infos
= vNULL
;
3085 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
), true);
3086 fbi
.param_count
= ipa_get_param_count (info
);
3087 fbi
.aa_walk_budget
= opt_for_fn (node
->decl
, param_ipa_max_aa_steps
);
3089 for (struct cgraph_edge
*cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
3091 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
3092 bi
->cg_edges
.safe_push (cs
);
3095 for (struct cgraph_edge
*cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
3097 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
3098 bi
->cg_edges
.safe_push (cs
);
3101 enable_ranger (cfun
, false);
3102 analysis_dom_walker (&fbi
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
3103 disable_ranger (cfun
);
3105 ipa_release_body_info (&fbi
);
3106 free_dominance_info (CDI_DOMINATORS
);
3110 /* Update the jump functions associated with call graph edge E when the call
3111 graph edge CS is being inlined, assuming that E->caller is already (possibly
3112 indirectly) inlined into CS->callee and that E has not been inlined. */
3115 update_jump_functions_after_inlining (struct cgraph_edge
*cs
,
3116 struct cgraph_edge
*e
)
3118 ipa_edge_args
*top
= ipa_edge_args_sum
->get (cs
);
3119 ipa_edge_args
*args
= ipa_edge_args_sum
->get (e
);
3122 int count
= ipa_get_cs_argument_count (args
);
3125 for (i
= 0; i
< count
; i
++)
3127 struct ipa_jump_func
*dst
= ipa_get_ith_jump_func (args
, i
);
3128 class ipa_polymorphic_call_context
*dst_ctx
3129 = ipa_get_ith_polymorhic_call_context (args
, i
);
3133 struct ipa_agg_jf_item
*item
;
3136 FOR_EACH_VEC_ELT (*dst
->agg
.items
, j
, item
)
3139 struct ipa_jump_func
*src
;
3141 if (item
->jftype
!= IPA_JF_PASS_THROUGH
3142 && item
->jftype
!= IPA_JF_LOAD_AGG
)
3145 dst_fid
= item
->value
.pass_through
.formal_id
;
3146 if (!top
|| dst_fid
>= ipa_get_cs_argument_count (top
))
3148 item
->jftype
= IPA_JF_UNKNOWN
;
3152 item
->value
.pass_through
.formal_id
= -1;
3153 src
= ipa_get_ith_jump_func (top
, dst_fid
);
3154 if (src
->type
== IPA_JF_CONST
)
3156 if (item
->jftype
== IPA_JF_PASS_THROUGH
3157 && item
->value
.pass_through
.operation
== NOP_EXPR
)
3159 item
->jftype
= IPA_JF_CONST
;
3160 item
->value
.constant
= src
->value
.constant
.value
;
3164 else if (src
->type
== IPA_JF_PASS_THROUGH
3165 && src
->value
.pass_through
.operation
== NOP_EXPR
)
3167 if (item
->jftype
== IPA_JF_PASS_THROUGH
3168 || !item
->value
.load_agg
.by_ref
3169 || src
->value
.pass_through
.agg_preserved
)
3170 item
->value
.pass_through
.formal_id
3171 = src
->value
.pass_through
.formal_id
;
3173 else if (src
->type
== IPA_JF_ANCESTOR
)
3175 if (item
->jftype
== IPA_JF_PASS_THROUGH
)
3177 if (!src
->value
.ancestor
.offset
)
3178 item
->value
.pass_through
.formal_id
3179 = src
->value
.ancestor
.formal_id
;
3181 else if (src
->value
.ancestor
.agg_preserved
)
3183 gcc_checking_assert (item
->value
.load_agg
.by_ref
);
3185 item
->value
.pass_through
.formal_id
3186 = src
->value
.ancestor
.formal_id
;
3187 item
->value
.load_agg
.offset
3188 += src
->value
.ancestor
.offset
;
3192 if (item
->value
.pass_through
.formal_id
< 0)
3193 item
->jftype
= IPA_JF_UNKNOWN
;
3199 ipa_set_jf_unknown (dst
);
3203 if (dst
->type
== IPA_JF_ANCESTOR
)
3205 struct ipa_jump_func
*src
;
3206 int dst_fid
= dst
->value
.ancestor
.formal_id
;
3207 class ipa_polymorphic_call_context
*src_ctx
3208 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
3210 /* Variable number of arguments can cause havoc if we try to access
3211 one that does not exist in the inlined edge. So make sure we
3213 if (dst_fid
>= ipa_get_cs_argument_count (top
))
3215 ipa_set_jf_unknown (dst
);
3219 src
= ipa_get_ith_jump_func (top
, dst_fid
);
3221 if (src_ctx
&& !src_ctx
->useless_p ())
3223 class ipa_polymorphic_call_context ctx
= *src_ctx
;
3225 /* TODO: Make type preserved safe WRT contexts. */
3226 if (!ipa_get_jf_ancestor_type_preserved (dst
))
3227 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
3228 ctx
.offset_by (dst
->value
.ancestor
.offset
);
3229 if (!ctx
.useless_p ())
3233 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
3235 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
3238 dst_ctx
->combine_with (ctx
);
3242 /* Parameter and argument in ancestor jump function must be pointer
3243 type, which means access to aggregate must be by-reference. */
3244 gcc_assert (!src
->agg
.items
|| src
->agg
.by_ref
);
3246 if (src
->agg
.items
&& dst
->value
.ancestor
.agg_preserved
)
3248 struct ipa_agg_jf_item
*item
;
3251 /* Currently we do not produce clobber aggregate jump functions,
3252 replace with merging when we do. */
3253 gcc_assert (!dst
->agg
.items
);
3255 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
3256 dst
->agg
.by_ref
= src
->agg
.by_ref
;
3257 FOR_EACH_VEC_SAFE_ELT (dst
->agg
.items
, j
, item
)
3258 item
->offset
-= dst
->value
.ancestor
.offset
;
3261 if (src
->type
== IPA_JF_PASS_THROUGH
3262 && src
->value
.pass_through
.operation
== NOP_EXPR
)
3264 dst
->value
.ancestor
.formal_id
= src
->value
.pass_through
.formal_id
;
3265 dst
->value
.ancestor
.agg_preserved
&=
3266 src
->value
.pass_through
.agg_preserved
;
3268 else if (src
->type
== IPA_JF_ANCESTOR
)
3270 dst
->value
.ancestor
.formal_id
= src
->value
.ancestor
.formal_id
;
3271 dst
->value
.ancestor
.offset
+= src
->value
.ancestor
.offset
;
3272 dst
->value
.ancestor
.agg_preserved
&=
3273 src
->value
.ancestor
.agg_preserved
;
3274 dst
->value
.ancestor
.keep_null
|= src
->value
.ancestor
.keep_null
;
3277 ipa_set_jf_unknown (dst
);
3279 else if (dst
->type
== IPA_JF_PASS_THROUGH
)
3281 struct ipa_jump_func
*src
;
3282 /* We must check range due to calls with variable number of arguments
3283 and we cannot combine jump functions with operations. */
3284 if (dst
->value
.pass_through
.operation
== NOP_EXPR
3285 && (top
&& dst
->value
.pass_through
.formal_id
3286 < ipa_get_cs_argument_count (top
)))
3288 int dst_fid
= dst
->value
.pass_through
.formal_id
;
3289 src
= ipa_get_ith_jump_func (top
, dst_fid
);
3290 bool dst_agg_p
= ipa_get_jf_pass_through_agg_preserved (dst
);
3291 class ipa_polymorphic_call_context
*src_ctx
3292 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
3294 if (src_ctx
&& !src_ctx
->useless_p ())
3296 class ipa_polymorphic_call_context ctx
= *src_ctx
;
3298 /* TODO: Make type preserved safe WRT contexts. */
3299 if (!ipa_get_jf_pass_through_type_preserved (dst
))
3300 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
3301 if (!ctx
.useless_p ())
3305 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
3307 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
3309 dst_ctx
->combine_with (ctx
);
3314 case IPA_JF_UNKNOWN
:
3315 ipa_set_jf_unknown (dst
);
3319 bool rd
= ipa_get_jf_pass_through_refdesc_decremented (dst
);
3320 ipa_set_jf_cst_copy (dst
, src
);
3322 ipa_zap_jf_refdesc (dst
);
3327 case IPA_JF_PASS_THROUGH
:
3329 int formal_id
= ipa_get_jf_pass_through_formal_id (src
);
3330 enum tree_code operation
;
3331 operation
= ipa_get_jf_pass_through_operation (src
);
3333 if (operation
== NOP_EXPR
)
3337 && ipa_get_jf_pass_through_agg_preserved (src
);
3338 ipa_set_jf_simple_pass_through (dst
, formal_id
, agg_p
);
3340 else if (TREE_CODE_CLASS (operation
) == tcc_unary
)
3341 ipa_set_jf_unary_pass_through (dst
, formal_id
, operation
);
3344 tree operand
= ipa_get_jf_pass_through_operand (src
);
3345 ipa_set_jf_arith_pass_through (dst
, formal_id
, operand
,
3350 case IPA_JF_ANCESTOR
:
3354 && ipa_get_jf_ancestor_agg_preserved (src
);
3355 ipa_set_ancestor_jf (dst
,
3356 ipa_get_jf_ancestor_offset (src
),
3357 ipa_get_jf_ancestor_formal_id (src
),
3359 ipa_get_jf_ancestor_keep_null (src
));
3367 && (dst_agg_p
|| !src
->agg
.by_ref
))
3369 /* Currently we do not produce clobber aggregate jump
3370 functions, replace with merging when we do. */
3371 gcc_assert (!dst
->agg
.items
);
3373 dst
->agg
.by_ref
= src
->agg
.by_ref
;
3374 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
3378 ipa_set_jf_unknown (dst
);
3383 /* If TARGET is an addr_expr of a function declaration, make it the
3384 (SPECULATIVE)destination of an indirect edge IE and return the edge.
3385 Otherwise, return NULL. */
3387 struct cgraph_edge
*
3388 ipa_make_edge_direct_to_target (struct cgraph_edge
*ie
, tree target
,
3391 struct cgraph_node
*callee
;
3392 bool unreachable
= false;
3394 if (TREE_CODE (target
) == ADDR_EXPR
)
3395 target
= TREE_OPERAND (target
, 0);
3396 if (TREE_CODE (target
) != FUNCTION_DECL
)
3398 target
= canonicalize_constructor_val (target
, NULL
);
3399 if (!target
|| TREE_CODE (target
) != FUNCTION_DECL
)
3401 /* Member pointer call that goes through a VMT lookup. */
3402 if (ie
->indirect_info
->member_ptr
3403 /* Or if target is not an invariant expression and we do not
3404 know if it will evaulate to function at runtime.
3405 This can happen when folding through &VAR, where &VAR
3406 is IP invariant, but VAR itself is not.
3408 TODO: Revisit this when GCC 5 is branched. It seems that
3409 member_ptr check is not needed and that we may try to fold
3410 the expression and see if VAR is readonly. */
3411 || !is_gimple_ip_invariant (target
))
3413 if (dump_enabled_p ())
3415 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, ie
->call_stmt
,
3416 "discovered direct call non-invariant %s\n",
3417 ie
->caller
->dump_name ());
3423 if (dump_enabled_p ())
3425 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, ie
->call_stmt
,
3426 "discovered direct call to non-function in %s, "
3427 "making it __builtin_unreachable\n",
3428 ie
->caller
->dump_name ());
3431 target
= builtin_decl_unreachable ();
3432 callee
= cgraph_node::get_create (target
);
3436 callee
= cgraph_node::get (target
);
3439 callee
= cgraph_node::get (target
);
3441 /* Because may-edges are not explicitely represented and vtable may be external,
3442 we may create the first reference to the object in the unit. */
3443 if (!callee
|| callee
->inlined_to
)
3446 /* We are better to ensure we can refer to it.
3447 In the case of static functions we are out of luck, since we already
3448 removed its body. In the case of public functions we may or may
3449 not introduce the reference. */
3450 if (!canonicalize_constructor_val (target
, NULL
)
3451 || !TREE_PUBLIC (target
))
3454 fprintf (dump_file
, "ipa-prop: Discovered call to a known target "
3455 "(%s -> %s) but cannot refer to it. Giving up.\n",
3456 ie
->caller
->dump_name (),
3457 ie
->callee
->dump_name ());
3460 callee
= cgraph_node::get_create (target
);
3463 /* If the edge is already speculated. */
3464 if (speculative
&& ie
->speculative
)
3468 cgraph_edge
*e2
= ie
->speculative_call_for_target (callee
);
3472 fprintf (dump_file
, "ipa-prop: Discovered call to a "
3473 "speculative target (%s -> %s) but the call is "
3474 "already speculated to different target. "
3476 ie
->caller
->dump_name (), callee
->dump_name ());
3482 "ipa-prop: Discovered call to a speculative target "
3483 "(%s -> %s) this agree with previous speculation.\n",
3484 ie
->caller
->dump_name (), callee
->dump_name ());
3490 if (!dbg_cnt (devirt
))
3493 ipa_check_create_node_params ();
3495 /* We cannot make edges to inline clones. It is bug that someone removed
3496 the cgraph node too early. */
3497 gcc_assert (!callee
->inlined_to
);
3499 if (dump_file
&& !unreachable
)
3501 fprintf (dump_file
, "ipa-prop: Discovered %s call to a %s target "
3502 "(%s -> %s), for stmt ",
3503 ie
->indirect_info
->polymorphic
? "a virtual" : "an indirect",
3504 speculative
? "speculative" : "known",
3505 ie
->caller
->dump_name (),
3506 callee
->dump_name ());
3508 print_gimple_stmt (dump_file
, ie
->call_stmt
, 2, TDF_SLIM
);
3510 fprintf (dump_file
, "with uid %i\n", ie
->lto_stmt_uid
);
3512 if (dump_enabled_p ())
3514 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, ie
->call_stmt
,
3515 "converting indirect call in %s to direct call to %s\n",
3516 ie
->caller
->dump_name (), callee
->dump_name ());
3520 struct cgraph_edge
*orig
= ie
;
3521 ie
= cgraph_edge::make_direct (ie
, callee
);
3522 /* If we resolved speculative edge the cost is already up to date
3523 for direct call (adjusted by inline_edge_duplication_hook). */
3526 ipa_call_summary
*es
= ipa_call_summaries
->get (ie
);
3527 es
->call_stmt_size
-= (eni_size_weights
.indirect_call_cost
3528 - eni_size_weights
.call_cost
);
3529 es
->call_stmt_time
-= (eni_time_weights
.indirect_call_cost
3530 - eni_time_weights
.call_cost
);
3535 if (!callee
->can_be_discarded_p ())
3538 alias
= dyn_cast
<cgraph_node
*> (callee
->noninterposable_alias ());
3542 /* make_speculative will update ie's cost to direct call cost. */
3543 ie
= ie
->make_speculative
3544 (callee
, ie
->count
.apply_scale (8, 10));
3550 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3551 CONSTRUCTOR and return it. Return NULL if the search fails for some
3555 find_constructor_constant_at_offset (tree constructor
, HOST_WIDE_INT req_offset
)
3557 tree type
= TREE_TYPE (constructor
);
3558 if (TREE_CODE (type
) != ARRAY_TYPE
3559 && TREE_CODE (type
) != RECORD_TYPE
)
3564 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor
), ix
, index
, val
)
3566 HOST_WIDE_INT elt_offset
;
3567 if (TREE_CODE (type
) == ARRAY_TYPE
)
3570 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (type
));
3571 gcc_assert (TREE_CODE (unit_size
) == INTEGER_CST
);
3575 if (TREE_CODE (index
) == RANGE_EXPR
)
3576 off
= wi::to_offset (TREE_OPERAND (index
, 0));
3578 off
= wi::to_offset (index
);
3579 if (TYPE_DOMAIN (type
) && TYPE_MIN_VALUE (TYPE_DOMAIN (type
)))
3581 tree low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
3582 gcc_assert (TREE_CODE (unit_size
) == INTEGER_CST
);
3583 off
= wi::sext (off
- wi::to_offset (low_bound
),
3584 TYPE_PRECISION (TREE_TYPE (index
)));
3586 off
*= wi::to_offset (unit_size
);
3587 /* ??? Handle more than just the first index of a
3591 off
= wi::to_offset (unit_size
) * ix
;
3593 off
= wi::lshift (off
, LOG2_BITS_PER_UNIT
);
3594 if (!wi::fits_shwi_p (off
) || wi::neg_p (off
))
3596 elt_offset
= off
.to_shwi ();
3598 else if (TREE_CODE (type
) == RECORD_TYPE
)
3600 gcc_checking_assert (index
&& TREE_CODE (index
) == FIELD_DECL
);
3601 if (DECL_BIT_FIELD (index
))
3603 elt_offset
= int_bit_position (index
);
3608 if (elt_offset
> req_offset
)
3611 if (TREE_CODE (val
) == CONSTRUCTOR
)
3612 return find_constructor_constant_at_offset (val
,
3613 req_offset
- elt_offset
);
3615 if (elt_offset
== req_offset
3616 && is_gimple_reg_type (TREE_TYPE (val
))
3617 && is_gimple_ip_invariant (val
))
3623 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3624 invariant from a static constructor and if so, return it. Otherwise return
3628 ipa_find_agg_cst_from_init (tree scalar
, HOST_WIDE_INT offset
, bool by_ref
)
3632 if (TREE_CODE (scalar
) != ADDR_EXPR
)
3634 scalar
= TREE_OPERAND (scalar
, 0);
3638 || !is_global_var (scalar
)
3639 || !TREE_READONLY (scalar
)
3640 || !DECL_INITIAL (scalar
)
3641 || TREE_CODE (DECL_INITIAL (scalar
)) != CONSTRUCTOR
)
3644 return find_constructor_constant_at_offset (DECL_INITIAL (scalar
), offset
);
3647 /* Retrieve value from AGG_JFUNC for the given OFFSET or return NULL if there
3648 is none. BY_REF specifies whether the value has to be passed by reference
3652 ipa_find_agg_cst_from_jfunc_items (struct ipa_agg_jump_function
*agg_jfunc
,
3653 ipa_node_params
*src_info
,
3654 cgraph_node
*src_node
,
3655 HOST_WIDE_INT offset
, bool by_ref
)
3657 if (by_ref
!= agg_jfunc
->by_ref
)
3660 for (const ipa_agg_jf_item
&item
: agg_jfunc
->items
)
3661 if (item
.offset
== offset
)
3662 return ipa_agg_value_from_jfunc (src_info
, src_node
, &item
);
3667 /* Remove a reference to SYMBOL from the list of references of a node given by
3668 reference description RDESC. Return true if the reference has been
3669 successfully found and removed. */
3672 remove_described_reference (symtab_node
*symbol
, struct ipa_cst_ref_desc
*rdesc
)
3674 struct ipa_ref
*to_del
;
3675 struct cgraph_edge
*origin
;
3680 to_del
= origin
->caller
->find_reference (symbol
, origin
->call_stmt
,
3681 origin
->lto_stmt_uid
, IPA_REF_ADDR
);
3685 to_del
->remove_reference ();
3687 fprintf (dump_file
, "ipa-prop: Removed a reference from %s to %s.\n",
3688 origin
->caller
->dump_name (), symbol
->dump_name ());
3692 /* If JFUNC has a reference description with refcount different from
3693 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3694 NULL. JFUNC must be a constant jump function. */
3696 static struct ipa_cst_ref_desc
*
3697 jfunc_rdesc_usable (struct ipa_jump_func
*jfunc
)
3699 struct ipa_cst_ref_desc
*rdesc
= ipa_get_jf_constant_rdesc (jfunc
);
3700 if (rdesc
&& rdesc
->refcount
!= IPA_UNDESCRIBED_USE
)
3706 /* If the value of constant jump function JFUNC is an address of a function
3707 declaration, return the associated call graph node. Otherwise return
3710 static symtab_node
*
3711 symtab_node_for_jfunc (struct ipa_jump_func
*jfunc
)
3713 gcc_checking_assert (jfunc
->type
== IPA_JF_CONST
);
3714 tree cst
= ipa_get_jf_constant (jfunc
);
3715 if (TREE_CODE (cst
) != ADDR_EXPR
3716 || (TREE_CODE (TREE_OPERAND (cst
, 0)) != FUNCTION_DECL
3717 && TREE_CODE (TREE_OPERAND (cst
, 0)) != VAR_DECL
))
3720 return symtab_node::get (TREE_OPERAND (cst
, 0));
3724 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3725 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3726 the edge specified in the rdesc. Return false if either the symbol or the
3727 reference could not be found, otherwise return true. */
3730 try_decrement_rdesc_refcount (struct ipa_jump_func
*jfunc
)
3732 struct ipa_cst_ref_desc
*rdesc
;
3733 if (jfunc
->type
== IPA_JF_CONST
3734 && (rdesc
= jfunc_rdesc_usable (jfunc
))
3735 && --rdesc
->refcount
== 0)
3737 symtab_node
*symbol
= symtab_node_for_jfunc (jfunc
);
3741 return remove_described_reference (symbol
, rdesc
);
3746 /* Try to find a destination for indirect edge IE that corresponds to a simple
3747 call or a call of a member function pointer and where the destination is a
3748 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3749 the type of the parameter to which the result of JFUNC is passed. If it can
3750 be determined, return the newly direct edge, otherwise return NULL.
3751 NEW_ROOT and NEW_ROOT_INFO is the node and its info that JFUNC lattices are
3754 static struct cgraph_edge
*
3755 try_make_edge_direct_simple_call (struct cgraph_edge
*ie
,
3756 struct ipa_jump_func
*jfunc
, tree target_type
,
3757 struct cgraph_node
*new_root
,
3758 class ipa_node_params
*new_root_info
)
3760 struct cgraph_edge
*cs
;
3761 tree target
= NULL_TREE
;
3762 bool agg_contents
= ie
->indirect_info
->agg_contents
;
3763 tree scalar
= ipa_value_from_jfunc (new_root_info
, jfunc
, target_type
);
3767 target
= ipa_find_agg_cst_from_init (scalar
, ie
->indirect_info
->offset
,
3768 ie
->indirect_info
->by_ref
);
3769 if (!target
&& ie
->indirect_info
->guaranteed_unmodified
)
3770 target
= ipa_find_agg_cst_from_jfunc_items (&jfunc
->agg
, new_root_info
,
3772 ie
->indirect_info
->offset
,
3773 ie
->indirect_info
->by_ref
);
3779 cs
= ipa_make_edge_direct_to_target (ie
, target
);
3781 if (cs
&& !agg_contents
)
3784 gcc_checking_assert (cs
->callee
3786 || jfunc
->type
!= IPA_JF_CONST
3787 || !symtab_node_for_jfunc (jfunc
)
3788 || cs
->callee
== symtab_node_for_jfunc (jfunc
)));
3789 ok
= try_decrement_rdesc_refcount (jfunc
);
3790 gcc_checking_assert (ok
);
3796 /* Return the target to be used in cases of impossible devirtualization. IE
3797 and target (the latter can be NULL) are dumped when dumping is enabled. */
3800 ipa_impossible_devirt_target (struct cgraph_edge
*ie
, tree target
)
3806 "Type inconsistent devirtualization: %s->%s\n",
3807 ie
->caller
->dump_name (),
3808 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target
)));
3811 "No devirtualization target in %s\n",
3812 ie
->caller
->dump_name ());
3814 tree new_target
= builtin_decl_unreachable ();
3815 cgraph_node::get_create (new_target
);
3819 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3820 call based on a formal parameter which is described by jump function JFUNC
3821 and if it can be determined, make it direct and return the direct edge.
3822 Otherwise, return NULL. CTX describes the polymorphic context that the
3823 parameter the call is based on brings along with it. NEW_ROOT and
3824 NEW_ROOT_INFO is the node and its info that JFUNC lattices are relative
3827 static struct cgraph_edge
*
3828 try_make_edge_direct_virtual_call (struct cgraph_edge
*ie
,
3829 struct ipa_jump_func
*jfunc
,
3830 class ipa_polymorphic_call_context ctx
,
3831 struct cgraph_node
*new_root
,
3832 class ipa_node_params
*new_root_info
)
3835 bool speculative
= false;
3837 if (!opt_for_fn (ie
->caller
->decl
, flag_devirtualize
))
3840 gcc_assert (!ie
->indirect_info
->by_ref
);
3842 /* Try to do lookup via known virtual table pointer value. */
3843 if (!ie
->indirect_info
->vptr_changed
3844 || opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
))
3847 unsigned HOST_WIDE_INT offset
;
3849 if (jfunc
->type
== IPA_JF_CONST
)
3850 t
= ipa_find_agg_cst_from_init (ipa_get_jf_constant (jfunc
),
3851 ie
->indirect_info
->offset
, true);
3853 t
= ipa_find_agg_cst_from_jfunc_items (&jfunc
->agg
, new_root_info
,
3855 ie
->indirect_info
->offset
, true);
3856 if (t
&& vtable_pointer_value_to_vtable (t
, &vtable
, &offset
))
3859 t
= gimple_get_virt_method_for_vtable (ie
->indirect_info
->otr_token
,
3860 vtable
, offset
, &can_refer
);
3864 || fndecl_built_in_p (t
, BUILT_IN_UNREACHABLE
,
3865 BUILT_IN_UNREACHABLE_TRAP
)
3866 || !possible_polymorphic_call_target_p
3867 (ie
, cgraph_node::get (t
)))
3869 /* Do not speculate builtin_unreachable, it is stupid! */
3870 if (!ie
->indirect_info
->vptr_changed
)
3871 target
= ipa_impossible_devirt_target (ie
, target
);
3878 speculative
= ie
->indirect_info
->vptr_changed
;
3884 ipa_polymorphic_call_context
ie_context (ie
);
3885 vec
<cgraph_node
*>targets
;
3888 ctx
.offset_by (ie
->indirect_info
->offset
);
3889 if (ie
->indirect_info
->vptr_changed
)
3890 ctx
.possible_dynamic_type_change (ie
->in_polymorphic_cdtor
,
3891 ie
->indirect_info
->otr_type
);
3892 ctx
.combine_with (ie_context
, ie
->indirect_info
->otr_type
);
3893 targets
= possible_polymorphic_call_targets
3894 (ie
->indirect_info
->otr_type
,
3895 ie
->indirect_info
->otr_token
,
3897 if (final
&& targets
.length () <= 1)
3899 speculative
= false;
3900 if (targets
.length () == 1)
3901 target
= targets
[0]->decl
;
3903 target
= ipa_impossible_devirt_target (ie
, NULL_TREE
);
3905 else if (!target
&& opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
)
3906 && !ie
->speculative
&& ie
->maybe_hot_p ())
3909 n
= try_speculative_devirtualization (ie
->indirect_info
->otr_type
,
3910 ie
->indirect_info
->otr_token
,
3911 ie
->indirect_info
->context
);
3921 if (!possible_polymorphic_call_target_p
3922 (ie
, cgraph_node::get_create (target
)))
3926 target
= ipa_impossible_devirt_target (ie
, target
);
3928 return ipa_make_edge_direct_to_target (ie
, target
, speculative
);
3934 /* Update the param called notes associated with NODE when CS is being inlined,
3935 assuming NODE is (potentially indirectly) inlined into CS->callee.
3936 Moreover, if the callee is discovered to be constant, create a new cgraph
3937 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3938 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3941 update_indirect_edges_after_inlining (struct cgraph_edge
*cs
,
3942 struct cgraph_node
*node
,
3943 vec
<cgraph_edge
*> *new_edges
)
3945 class ipa_edge_args
*top
;
3946 struct cgraph_edge
*ie
, *next_ie
, *new_direct_edge
;
3947 struct cgraph_node
*new_root
;
3948 class ipa_node_params
*new_root_info
, *inlined_node_info
;
3951 ipa_check_create_edge_args ();
3952 top
= ipa_edge_args_sum
->get (cs
);
3953 new_root
= cs
->caller
->inlined_to
3954 ? cs
->caller
->inlined_to
: cs
->caller
;
3955 new_root_info
= ipa_node_params_sum
->get (new_root
);
3956 inlined_node_info
= ipa_node_params_sum
->get (cs
->callee
->function_symbol ());
3958 for (ie
= node
->indirect_calls
; ie
; ie
= next_ie
)
3960 class cgraph_indirect_call_info
*ici
= ie
->indirect_info
;
3961 struct ipa_jump_func
*jfunc
;
3964 next_ie
= ie
->next_callee
;
3966 if (ici
->param_index
== -1)
3969 /* We must check range due to calls with variable number of arguments: */
3970 if (!top
|| ici
->param_index
>= ipa_get_cs_argument_count (top
))
3972 ici
->param_index
= -1;
3976 param_index
= ici
->param_index
;
3977 jfunc
= ipa_get_ith_jump_func (top
, param_index
);
3979 auto_vec
<cgraph_node
*, 4> spec_targets
;
3980 if (ie
->speculative
)
3981 for (cgraph_edge
*direct
= ie
->first_speculative_call_target ();
3983 direct
= direct
->next_speculative_call_target ())
3984 spec_targets
.safe_push (direct
->callee
);
3986 if (!opt_for_fn (node
->decl
, flag_indirect_inlining
))
3987 new_direct_edge
= NULL
;
3988 else if (ici
->polymorphic
)
3990 ipa_polymorphic_call_context ctx
;
3991 ctx
= ipa_context_from_jfunc (new_root_info
, cs
, param_index
, jfunc
);
3992 new_direct_edge
= try_make_edge_direct_virtual_call (ie
, jfunc
, ctx
,
3998 tree target_type
= ipa_get_type (inlined_node_info
, param_index
);
3999 new_direct_edge
= try_make_edge_direct_simple_call (ie
, jfunc
,
4005 /* If speculation was removed, then we need to do nothing. */
4006 if (new_direct_edge
&& new_direct_edge
!= ie
4007 && spec_targets
.contains (new_direct_edge
->callee
))
4009 new_direct_edge
->indirect_inlining_edge
= 1;
4011 if (!new_direct_edge
->speculative
)
4014 else if (new_direct_edge
)
4016 new_direct_edge
->indirect_inlining_edge
= 1;
4019 new_edges
->safe_push (new_direct_edge
);
4022 /* If speculative edge was introduced we still need to update
4023 call info of the indirect edge. */
4024 if (!new_direct_edge
->speculative
)
4027 if (jfunc
->type
== IPA_JF_PASS_THROUGH
4028 && ipa_get_jf_pass_through_operation (jfunc
) == NOP_EXPR
)
4030 if (ici
->agg_contents
4031 && !ipa_get_jf_pass_through_agg_preserved (jfunc
)
4032 && !ici
->polymorphic
)
4033 ici
->param_index
= -1;
4036 ici
->param_index
= ipa_get_jf_pass_through_formal_id (jfunc
);
4037 if (ici
->polymorphic
4038 && !ipa_get_jf_pass_through_type_preserved (jfunc
))
4039 ici
->vptr_changed
= true;
4040 ipa_set_param_used_by_indirect_call (new_root_info
,
4041 ici
->param_index
, true);
4042 if (ici
->polymorphic
)
4043 ipa_set_param_used_by_polymorphic_call (new_root_info
,
4044 ici
->param_index
, true);
4047 else if (jfunc
->type
== IPA_JF_ANCESTOR
)
4049 if (ici
->agg_contents
4050 && !ipa_get_jf_ancestor_agg_preserved (jfunc
)
4051 && !ici
->polymorphic
)
4052 ici
->param_index
= -1;
4055 ici
->param_index
= ipa_get_jf_ancestor_formal_id (jfunc
);
4056 ici
->offset
+= ipa_get_jf_ancestor_offset (jfunc
);
4057 if (ici
->polymorphic
4058 && !ipa_get_jf_ancestor_type_preserved (jfunc
))
4059 ici
->vptr_changed
= true;
4060 ipa_set_param_used_by_indirect_call (new_root_info
,
4061 ici
->param_index
, true);
4062 if (ici
->polymorphic
)
4063 ipa_set_param_used_by_polymorphic_call (new_root_info
,
4064 ici
->param_index
, true);
4068 /* Either we can find a destination for this edge now or never. */
4069 ici
->param_index
= -1;
4075 /* Recursively traverse subtree of NODE (including node) made of inlined
4076 cgraph_edges when CS has been inlined and invoke
4077 update_indirect_edges_after_inlining on all nodes and
4078 update_jump_functions_after_inlining on all non-inlined edges that lead out
4079 of this subtree. Newly discovered indirect edges will be added to
4080 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
4084 propagate_info_to_inlined_callees (struct cgraph_edge
*cs
,
4085 struct cgraph_node
*node
,
4086 vec
<cgraph_edge
*> *new_edges
)
4088 struct cgraph_edge
*e
;
4091 res
= update_indirect_edges_after_inlining (cs
, node
, new_edges
);
4093 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4094 if (!e
->inline_failed
)
4095 res
|= propagate_info_to_inlined_callees (cs
, e
->callee
, new_edges
);
4097 update_jump_functions_after_inlining (cs
, e
);
4098 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4099 update_jump_functions_after_inlining (cs
, e
);
4104 /* Combine two controlled uses counts as done during inlining. */
4107 combine_controlled_uses_counters (int c
, int d
)
4109 if (c
== IPA_UNDESCRIBED_USE
|| d
== IPA_UNDESCRIBED_USE
)
4110 return IPA_UNDESCRIBED_USE
;
4115 /* Propagate number of controlled users from CS->caleee to the new root of the
4116 tree of inlined nodes. */
4119 propagate_controlled_uses (struct cgraph_edge
*cs
)
4121 ipa_edge_args
*args
= ipa_edge_args_sum
->get (cs
);
4124 struct cgraph_node
*new_root
= cs
->caller
->inlined_to
4125 ? cs
->caller
->inlined_to
: cs
->caller
;
4126 ipa_node_params
*new_root_info
= ipa_node_params_sum
->get (new_root
);
4127 ipa_node_params
*old_root_info
= ipa_node_params_sum
->get (cs
->callee
);
4133 count
= MIN (ipa_get_cs_argument_count (args
),
4134 ipa_get_param_count (old_root_info
));
4135 for (i
= 0; i
< count
; i
++)
4137 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
4138 struct ipa_cst_ref_desc
*rdesc
;
4140 if (jf
->type
== IPA_JF_PASS_THROUGH
4141 && !ipa_get_jf_pass_through_refdesc_decremented (jf
))
4144 src_idx
= ipa_get_jf_pass_through_formal_id (jf
);
4145 c
= ipa_get_controlled_uses (new_root_info
, src_idx
);
4146 d
= ipa_get_controlled_uses (old_root_info
, i
);
4148 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf
)
4149 == NOP_EXPR
|| c
== IPA_UNDESCRIBED_USE
);
4150 c
= combine_controlled_uses_counters (c
, d
);
4151 ipa_set_controlled_uses (new_root_info
, src_idx
, c
);
4153 if (c
!= IPA_UNDESCRIBED_USE
)
4155 lderef
= (ipa_get_param_load_dereferenced (new_root_info
, src_idx
)
4156 || ipa_get_param_load_dereferenced (old_root_info
, i
));
4157 ipa_set_param_load_dereferenced (new_root_info
, src_idx
, lderef
);
4160 if (c
== 0 && !lderef
&& new_root_info
->ipcp_orig_node
)
4162 struct cgraph_node
*n
;
4163 struct ipa_ref
*ref
;
4164 tree t
= new_root_info
->known_csts
[src_idx
];
4166 if (t
&& TREE_CODE (t
) == ADDR_EXPR
4167 && TREE_CODE (TREE_OPERAND (t
, 0)) == FUNCTION_DECL
4168 && (n
= cgraph_node::get (TREE_OPERAND (t
, 0)))
4169 && (ref
= new_root
->find_reference (n
, NULL
, 0,
4173 fprintf (dump_file
, "ipa-prop: Removing cloning-created "
4174 "reference from %s to %s.\n",
4175 new_root
->dump_name (),
4177 ref
->remove_reference ();
4181 else if (jf
->type
== IPA_JF_CONST
4182 && (rdesc
= jfunc_rdesc_usable (jf
)))
4184 int d
= ipa_get_controlled_uses (old_root_info
, i
);
4185 int c
= rdesc
->refcount
;
4186 tree cst
= ipa_get_jf_constant (jf
);
4187 rdesc
->refcount
= combine_controlled_uses_counters (c
, d
);
4188 if (rdesc
->refcount
!= IPA_UNDESCRIBED_USE
4189 && ipa_get_param_load_dereferenced (old_root_info
, i
)
4190 && TREE_CODE (cst
) == ADDR_EXPR
4191 && VAR_P (TREE_OPERAND (cst
, 0)))
4193 symtab_node
*n
= symtab_node::get (TREE_OPERAND (cst
, 0));
4194 new_root
->create_reference (n
, IPA_REF_LOAD
, NULL
);
4196 fprintf (dump_file
, "ipa-prop: Address IPA constant will reach "
4197 "a load so adding LOAD reference from %s to %s.\n",
4198 new_root
->dump_name (), n
->dump_name ());
4200 if (rdesc
->refcount
== 0)
4202 gcc_checking_assert (TREE_CODE (cst
) == ADDR_EXPR
4203 && ((TREE_CODE (TREE_OPERAND (cst
, 0))
4205 || VAR_P (TREE_OPERAND (cst
, 0))));
4207 symtab_node
*n
= symtab_node::get (TREE_OPERAND (cst
, 0));
4210 remove_described_reference (n
, rdesc
);
4211 cgraph_node
*clone
= cs
->caller
;
4212 while (clone
->inlined_to
4213 && clone
->ipcp_clone
4214 && clone
!= rdesc
->cs
->caller
)
4216 struct ipa_ref
*ref
;
4217 ref
= clone
->find_reference (n
, NULL
, 0, IPA_REF_ADDR
);
4221 fprintf (dump_file
, "ipa-prop: Removing "
4222 "cloning-created reference "
4224 clone
->dump_name (),
4226 ref
->remove_reference ();
4228 clone
= clone
->callers
->caller
;
4235 for (i
= ipa_get_param_count (old_root_info
);
4236 i
< ipa_get_cs_argument_count (args
);
4239 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
4241 if (jf
->type
== IPA_JF_CONST
)
4243 struct ipa_cst_ref_desc
*rdesc
= jfunc_rdesc_usable (jf
);
4245 rdesc
->refcount
= IPA_UNDESCRIBED_USE
;
4247 else if (jf
->type
== IPA_JF_PASS_THROUGH
)
4248 ipa_set_controlled_uses (new_root_info
,
4249 jf
->value
.pass_through
.formal_id
,
4250 IPA_UNDESCRIBED_USE
);
4254 /* Update jump functions and call note functions on inlining the call site CS.
4255 CS is expected to lead to a node already cloned by
4256 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
4257 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
4261 ipa_propagate_indirect_call_infos (struct cgraph_edge
*cs
,
4262 vec
<cgraph_edge
*> *new_edges
)
4265 /* Do nothing if the preparation phase has not been carried out yet
4266 (i.e. during early inlining). */
4267 if (!ipa_node_params_sum
)
4269 gcc_assert (ipa_edge_args_sum
);
4271 propagate_controlled_uses (cs
);
4272 changed
= propagate_info_to_inlined_callees (cs
, cs
->callee
, new_edges
);
4273 ipa_node_params_sum
->remove (cs
->callee
);
4275 ipa_edge_args
*args
= ipa_edge_args_sum
->get (cs
);
4279 if (args
->jump_functions
)
4281 struct ipa_jump_func
*jf
;
4283 FOR_EACH_VEC_ELT (*args
->jump_functions
, i
, jf
)
4284 if (jf
->type
== IPA_JF_CONST
4285 && ipa_get_jf_constant_rdesc (jf
))
4292 ipa_edge_args_sum
->remove (cs
);
4294 if (ipcp_transformation_sum
)
4295 ipcp_transformation_sum
->remove (cs
->callee
);
4300 /* Ensure that array of edge arguments infos is big enough to accommodate a
4301 structure for all edges and reallocates it if not. Also, allocate
4302 associated hash tables is they do not already exist. */
4305 ipa_check_create_edge_args (void)
4307 if (!ipa_edge_args_sum
)
4309 = (new (ggc_alloc_no_dtor
<ipa_edge_args_sum_t
> ())
4310 ipa_edge_args_sum_t (symtab
, true));
4311 if (!ipa_vr_hash_table
)
4312 ipa_vr_hash_table
= hash_table
<ipa_vr_ggc_hash_traits
>::create_ggc (37);
4315 /* Free all ipa_edge structures. */
4318 ipa_free_all_edge_args (void)
4320 if (!ipa_edge_args_sum
)
4323 ggc_delete (ipa_edge_args_sum
);
4324 ipa_edge_args_sum
= NULL
;
4327 /* Free all ipa_node_params structures. */
4330 ipa_free_all_node_params (void)
4332 if (ipa_node_params_sum
)
4333 ggc_delete (ipa_node_params_sum
);
4334 ipa_node_params_sum
= NULL
;
4337 /* Initialize IPA CP transformation summary and also allocate any necessary hash
4338 tables if they do not already exist. */
4341 ipcp_transformation_initialize (void)
4343 if (!ipa_vr_hash_table
)
4344 ipa_vr_hash_table
= hash_table
<ipa_vr_ggc_hash_traits
>::create_ggc (37);
4345 if (ipcp_transformation_sum
== NULL
)
4347 ipcp_transformation_sum
= ipcp_transformation_t::create_ggc (symtab
);
4348 ipcp_transformation_sum
->disable_insertion_hook ();
4352 /* Release the IPA CP transformation summary. */
4355 ipcp_free_transformation_sum (void)
4357 if (!ipcp_transformation_sum
)
4360 ipcp_transformation_sum
->~function_summary
<ipcp_transformation
*> ();
4361 ggc_free (ipcp_transformation_sum
);
4362 ipcp_transformation_sum
= NULL
;
4365 /* Set the aggregate replacements of NODE to be AGGVALS. */
4368 ipa_set_node_agg_value_chain (struct cgraph_node
*node
,
4369 vec
<ipa_argagg_value
, va_gc
> *aggs
)
4371 ipcp_transformation_initialize ();
4372 ipcp_transformation
*s
= ipcp_transformation_sum
->get_create (node
);
4373 s
->m_agg_values
= aggs
;
4376 /* Hook that is called by cgraph.cc when an edge is removed. Adjust reference
4377 count data structures accordingly. */
4380 ipa_edge_args_sum_t::remove (cgraph_edge
*cs
, ipa_edge_args
*args
)
4382 if (args
->jump_functions
)
4384 struct ipa_jump_func
*jf
;
4386 FOR_EACH_VEC_ELT (*args
->jump_functions
, i
, jf
)
4388 struct ipa_cst_ref_desc
*rdesc
;
4389 try_decrement_rdesc_refcount (jf
);
4390 if (jf
->type
== IPA_JF_CONST
4391 && (rdesc
= ipa_get_jf_constant_rdesc (jf
))
4398 /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
4399 reference count data strucutres accordingly. */
4402 ipa_edge_args_sum_t::duplicate (cgraph_edge
*src
, cgraph_edge
*dst
,
4403 ipa_edge_args
*old_args
, ipa_edge_args
*new_args
)
4407 new_args
->jump_functions
= vec_safe_copy (old_args
->jump_functions
);
4408 if (old_args
->polymorphic_call_contexts
)
4409 new_args
->polymorphic_call_contexts
4410 = vec_safe_copy (old_args
->polymorphic_call_contexts
);
4412 for (i
= 0; i
< vec_safe_length (old_args
->jump_functions
); i
++)
4414 struct ipa_jump_func
*src_jf
= ipa_get_ith_jump_func (old_args
, i
);
4415 struct ipa_jump_func
*dst_jf
= ipa_get_ith_jump_func (new_args
, i
);
4417 dst_jf
->agg
.items
= vec_safe_copy (dst_jf
->agg
.items
);
4419 if (src_jf
->type
== IPA_JF_CONST
)
4421 struct ipa_cst_ref_desc
*src_rdesc
= jfunc_rdesc_usable (src_jf
);
4424 dst_jf
->value
.constant
.rdesc
= NULL
;
4425 else if (src
->caller
== dst
->caller
)
4427 /* Creation of a speculative edge. If the source edge is the one
4428 grabbing a reference, we must create a new (duplicate)
4429 reference description. Otherwise they refer to the same
4430 description corresponding to a reference taken in a function
4431 src->caller is inlined to. In that case we just must
4432 increment the refcount. */
4433 if (src_rdesc
->cs
== src
)
4435 symtab_node
*n
= symtab_node_for_jfunc (src_jf
);
4436 gcc_checking_assert (n
);
4438 = src
->caller
->find_reference (n
, src
->call_stmt
,
4441 gcc_checking_assert (ref
);
4442 dst
->caller
->clone_reference (ref
, ref
->stmt
);
4444 ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
4445 dst_rdesc
->cs
= dst
;
4446 dst_rdesc
->refcount
= src_rdesc
->refcount
;
4447 dst_rdesc
->next_duplicate
= NULL
;
4448 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
4452 src_rdesc
->refcount
++;
4453 dst_jf
->value
.constant
.rdesc
= src_rdesc
;
4456 else if (src_rdesc
->cs
== src
)
4458 struct ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
4459 dst_rdesc
->cs
= dst
;
4460 dst_rdesc
->refcount
= src_rdesc
->refcount
;
4461 dst_rdesc
->next_duplicate
= src_rdesc
->next_duplicate
;
4462 src_rdesc
->next_duplicate
= dst_rdesc
;
4463 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
4467 struct ipa_cst_ref_desc
*dst_rdesc
;
4468 /* This can happen during inlining, when a JFUNC can refer to a
4469 reference taken in a function up in the tree of inline clones.
4470 We need to find the duplicate that refers to our tree of
4473 gcc_assert (dst
->caller
->inlined_to
);
4474 for (dst_rdesc
= src_rdesc
->next_duplicate
;
4476 dst_rdesc
= dst_rdesc
->next_duplicate
)
4478 struct cgraph_node
*top
;
4479 top
= dst_rdesc
->cs
->caller
->inlined_to
4480 ? dst_rdesc
->cs
->caller
->inlined_to
4481 : dst_rdesc
->cs
->caller
;
4482 if (dst
->caller
->inlined_to
== top
)
4485 gcc_assert (dst_rdesc
);
4486 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
4489 else if (dst_jf
->type
== IPA_JF_PASS_THROUGH
4490 && src
->caller
== dst
->caller
)
4492 struct cgraph_node
*inline_root
= dst
->caller
->inlined_to
4493 ? dst
->caller
->inlined_to
: dst
->caller
;
4494 ipa_node_params
*root_info
= ipa_node_params_sum
->get (inline_root
);
4495 int idx
= ipa_get_jf_pass_through_formal_id (dst_jf
);
4497 int c
= ipa_get_controlled_uses (root_info
, idx
);
4498 if (c
!= IPA_UNDESCRIBED_USE
)
4501 ipa_set_controlled_uses (root_info
, idx
, c
);
4507 /* Analyze newly added function into callgraph. */
4510 ipa_add_new_function (cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
4512 if (node
->has_gimple_body_p ())
4513 ipa_analyze_node (node
);
4516 /* Hook that is called by summary when a node is duplicated. */
4519 ipa_node_params_t::duplicate(cgraph_node
*, cgraph_node
*,
4520 ipa_node_params
*old_info
,
4521 ipa_node_params
*new_info
)
4523 new_info
->descriptors
= vec_safe_copy (old_info
->descriptors
);
4524 new_info
->lattices
= NULL
;
4525 new_info
->ipcp_orig_node
= old_info
->ipcp_orig_node
;
4526 new_info
->known_csts
= old_info
->known_csts
.copy ();
4527 new_info
->known_contexts
= old_info
->known_contexts
.copy ();
4529 new_info
->analysis_done
= old_info
->analysis_done
;
4530 new_info
->node_enqueued
= old_info
->node_enqueued
;
4531 new_info
->versionable
= old_info
->versionable
;
4534 /* Duplication of ipcp transformation summaries. */
4537 ipcp_transformation_t::duplicate(cgraph_node
*, cgraph_node
*dst
,
4538 ipcp_transformation
*src_trans
,
4539 ipcp_transformation
*dst_trans
)
4541 /* Avoid redundant work of duplicating vectors we will never use. */
4542 if (dst
->inlined_to
)
4544 dst_trans
->m_agg_values
= vec_safe_copy (src_trans
->m_agg_values
);
4545 dst_trans
->m_vr
= vec_safe_copy (src_trans
->m_vr
);
4548 /* Register our cgraph hooks if they are not already there. */
4551 ipa_register_cgraph_hooks (void)
4553 ipa_check_create_node_params ();
4554 ipa_check_create_edge_args ();
4556 function_insertion_hook_holder
=
4557 symtab
->add_cgraph_insertion_hook (&ipa_add_new_function
, NULL
);
4560 /* Unregister our cgraph hooks if they are not already there. */
4563 ipa_unregister_cgraph_hooks (void)
4565 if (function_insertion_hook_holder
)
4566 symtab
->remove_cgraph_insertion_hook (function_insertion_hook_holder
);
4567 function_insertion_hook_holder
= NULL
;
4570 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4571 longer needed after ipa-cp. */
4574 ipa_free_all_structures_after_ipa_cp (void)
4576 if (!optimize
&& !in_lto_p
)
4578 ipa_free_all_edge_args ();
4579 ipa_free_all_node_params ();
4580 ipcp_sources_pool
.release ();
4581 ipcp_cst_values_pool
.release ();
4582 ipcp_poly_ctx_values_pool
.release ();
4583 ipcp_agg_lattice_pool
.release ();
4584 ipa_unregister_cgraph_hooks ();
4585 ipa_refdesc_pool
.release ();
4589 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4590 longer needed after indirect inlining. */
4593 ipa_free_all_structures_after_iinln (void)
4595 ipa_free_all_edge_args ();
4596 ipa_free_all_node_params ();
4597 ipa_unregister_cgraph_hooks ();
4598 ipcp_sources_pool
.release ();
4599 ipcp_cst_values_pool
.release ();
4600 ipcp_poly_ctx_values_pool
.release ();
4601 ipcp_agg_lattice_pool
.release ();
4602 ipa_refdesc_pool
.release ();
4605 /* Print ipa_tree_map data structures of all functions in the
4609 ipa_print_node_params (FILE *f
, struct cgraph_node
*node
)
4612 class ipa_node_params
*info
;
4614 if (!node
->definition
)
4616 info
= ipa_node_params_sum
->get (node
);
4617 fprintf (f
, " function %s parameter descriptors:\n", node
->dump_name ());
4620 fprintf (f
, " no params return\n");
4623 count
= ipa_get_param_count (info
);
4624 for (i
= 0; i
< count
; i
++)
4629 ipa_dump_param (f
, info
, i
);
4630 if (ipa_is_param_used (info
, i
))
4631 fprintf (f
, " used");
4632 if (ipa_is_param_used_by_ipa_predicates (info
, i
))
4633 fprintf (f
, " used_by_ipa_predicates");
4634 if (ipa_is_param_used_by_indirect_call (info
, i
))
4635 fprintf (f
, " used_by_indirect_call");
4636 if (ipa_is_param_used_by_polymorphic_call (info
, i
))
4637 fprintf (f
, " used_by_polymorphic_call");
4638 c
= ipa_get_controlled_uses (info
, i
);
4639 if (c
== IPA_UNDESCRIBED_USE
)
4640 fprintf (f
, " undescribed_use");
4642 fprintf (f
, " controlled_uses=%i %s", c
,
4643 ipa_get_param_load_dereferenced (info
, i
)
4644 ? "(load_dereferenced)" : "");
4649 /* Print ipa_tree_map data structures of all functions in the
4653 ipa_print_all_params (FILE * f
)
4655 struct cgraph_node
*node
;
4657 fprintf (f
, "\nFunction parameters:\n");
4658 FOR_EACH_FUNCTION (node
)
4659 ipa_print_node_params (f
, node
);
4662 /* Stream out jump function JUMP_FUNC to OB. */
4665 ipa_write_jump_function (struct output_block
*ob
,
4666 struct ipa_jump_func
*jump_func
)
4668 struct ipa_agg_jf_item
*item
;
4669 struct bitpack_d bp
;
4673 /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4674 as well as WPA memory by handling them specially. */
4675 if (jump_func
->type
== IPA_JF_CONST
4676 && TREE_CODE (jump_func
->value
.constant
.value
) == ADDR_EXPR
)
4679 streamer_write_uhwi (ob
, jump_func
->type
* 2 + flag
);
4680 switch (jump_func
->type
)
4682 case IPA_JF_UNKNOWN
:
4686 EXPR_LOCATION (jump_func
->value
.constant
.value
) == UNKNOWN_LOCATION
);
4687 stream_write_tree (ob
,
4689 ? TREE_OPERAND (jump_func
->value
.constant
.value
, 0)
4690 : jump_func
->value
.constant
.value
, true);
4692 case IPA_JF_PASS_THROUGH
:
4693 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.operation
);
4694 if (jump_func
->value
.pass_through
.operation
== NOP_EXPR
)
4696 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4697 bp
= bitpack_create (ob
->main_stream
);
4698 bp_pack_value (&bp
, jump_func
->value
.pass_through
.agg_preserved
, 1);
4699 gcc_assert (!jump_func
->value
.pass_through
.refdesc_decremented
);
4700 streamer_write_bitpack (&bp
);
4702 else if (TREE_CODE_CLASS (jump_func
->value
.pass_through
.operation
)
4704 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4707 stream_write_tree (ob
, jump_func
->value
.pass_through
.operand
, true);
4708 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4711 case IPA_JF_ANCESTOR
:
4712 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.offset
);
4713 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.formal_id
);
4714 bp
= bitpack_create (ob
->main_stream
);
4715 bp_pack_value (&bp
, jump_func
->value
.ancestor
.agg_preserved
, 1);
4716 bp_pack_value (&bp
, jump_func
->value
.ancestor
.keep_null
, 1);
4717 streamer_write_bitpack (&bp
);
4720 fatal_error (UNKNOWN_LOCATION
, "invalid jump function in LTO stream");
4723 count
= vec_safe_length (jump_func
->agg
.items
);
4724 streamer_write_uhwi (ob
, count
);
4727 bp
= bitpack_create (ob
->main_stream
);
4728 bp_pack_value (&bp
, jump_func
->agg
.by_ref
, 1);
4729 streamer_write_bitpack (&bp
);
4732 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, i
, item
)
4734 stream_write_tree (ob
, item
->type
, true);
4735 streamer_write_uhwi (ob
, item
->offset
);
4736 streamer_write_uhwi (ob
, item
->jftype
);
4737 switch (item
->jftype
)
4739 case IPA_JF_UNKNOWN
:
4742 stream_write_tree (ob
, item
->value
.constant
, true);
4744 case IPA_JF_PASS_THROUGH
:
4745 case IPA_JF_LOAD_AGG
:
4746 streamer_write_uhwi (ob
, item
->value
.pass_through
.operation
);
4747 streamer_write_uhwi (ob
, item
->value
.pass_through
.formal_id
);
4748 if (TREE_CODE_CLASS (item
->value
.pass_through
.operation
)
4750 stream_write_tree (ob
, item
->value
.pass_through
.operand
, true);
4751 if (item
->jftype
== IPA_JF_LOAD_AGG
)
4753 stream_write_tree (ob
, item
->value
.load_agg
.type
, true);
4754 streamer_write_uhwi (ob
, item
->value
.load_agg
.offset
);
4755 bp
= bitpack_create (ob
->main_stream
);
4756 bp_pack_value (&bp
, item
->value
.load_agg
.by_ref
, 1);
4757 streamer_write_bitpack (&bp
);
4761 fatal_error (UNKNOWN_LOCATION
,
4762 "invalid jump function in LTO stream");
4766 bp
= bitpack_create (ob
->main_stream
);
4767 if (jump_func
->m_vr
)
4768 jump_func
->m_vr
->streamer_write (ob
);
4771 bp_pack_value (&bp
, false, 1);
4772 streamer_write_bitpack (&bp
);
4776 /* Read in jump function JUMP_FUNC from IB. */
4779 ipa_read_jump_function (class lto_input_block
*ib
,
4780 struct ipa_jump_func
*jump_func
,
4781 struct cgraph_edge
*cs
,
4782 class data_in
*data_in
,
4785 enum jump_func_type jftype
;
4786 enum tree_code operation
;
4788 int val
= streamer_read_uhwi (ib
);
4789 bool flag
= val
& 1;
4791 jftype
= (enum jump_func_type
) (val
/ 2);
4794 case IPA_JF_UNKNOWN
:
4795 ipa_set_jf_unknown (jump_func
);
4799 tree t
= stream_read_tree (ib
, data_in
);
4800 if (flag
&& prevails
)
4801 t
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (t
)), t
);
4802 ipa_set_jf_constant (jump_func
, t
, cs
);
4805 case IPA_JF_PASS_THROUGH
:
4806 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
4807 if (operation
== NOP_EXPR
)
4809 int formal_id
= streamer_read_uhwi (ib
);
4810 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4811 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4812 ipa_set_jf_simple_pass_through (jump_func
, formal_id
, agg_preserved
);
4814 else if (TREE_CODE_CLASS (operation
) == tcc_unary
)
4816 int formal_id
= streamer_read_uhwi (ib
);
4817 ipa_set_jf_unary_pass_through (jump_func
, formal_id
, operation
);
4821 tree operand
= stream_read_tree (ib
, data_in
);
4822 int formal_id
= streamer_read_uhwi (ib
);
4823 ipa_set_jf_arith_pass_through (jump_func
, formal_id
, operand
,
4827 case IPA_JF_ANCESTOR
:
4829 HOST_WIDE_INT offset
= streamer_read_uhwi (ib
);
4830 int formal_id
= streamer_read_uhwi (ib
);
4831 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4832 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4833 bool keep_null
= bp_unpack_value (&bp
, 1);
4834 ipa_set_ancestor_jf (jump_func
, offset
, formal_id
, agg_preserved
,
4839 fatal_error (UNKNOWN_LOCATION
, "invalid jump function in LTO stream");
4842 count
= streamer_read_uhwi (ib
);
4845 jump_func
->agg
.items
= NULL
;
4846 vec_safe_reserve (jump_func
->agg
.items
, count
, true);
4850 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4851 jump_func
->agg
.by_ref
= bp_unpack_value (&bp
, 1);
4853 for (i
= 0; i
< count
; i
++)
4855 struct ipa_agg_jf_item item
;
4856 item
.type
= stream_read_tree (ib
, data_in
);
4857 item
.offset
= streamer_read_uhwi (ib
);
4858 item
.jftype
= (enum jump_func_type
) streamer_read_uhwi (ib
);
4860 switch (item
.jftype
)
4862 case IPA_JF_UNKNOWN
:
4865 item
.value
.constant
= stream_read_tree (ib
, data_in
);
4867 case IPA_JF_PASS_THROUGH
:
4868 case IPA_JF_LOAD_AGG
:
4869 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
4870 item
.value
.pass_through
.operation
= operation
;
4871 item
.value
.pass_through
.formal_id
= streamer_read_uhwi (ib
);
4872 if (TREE_CODE_CLASS (operation
) == tcc_unary
)
4873 item
.value
.pass_through
.operand
= NULL_TREE
;
4875 item
.value
.pass_through
.operand
= stream_read_tree (ib
, data_in
);
4876 if (item
.jftype
== IPA_JF_LOAD_AGG
)
4878 struct bitpack_d bp
;
4879 item
.value
.load_agg
.type
= stream_read_tree (ib
, data_in
);
4880 item
.value
.load_agg
.offset
= streamer_read_uhwi (ib
);
4881 bp
= streamer_read_bitpack (ib
);
4882 item
.value
.load_agg
.by_ref
= bp_unpack_value (&bp
, 1);
4886 fatal_error (UNKNOWN_LOCATION
,
4887 "invalid jump function in LTO stream");
4890 jump_func
->agg
.items
->quick_push (item
);
4894 vr
.streamer_read (ib
, data_in
);
4898 ipa_set_jfunc_vr (jump_func
, vr
);
4901 jump_func
->m_vr
= NULL
;
4904 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4905 relevant to indirect inlining to OB. */
4908 ipa_write_indirect_edge_info (struct output_block
*ob
,
4909 struct cgraph_edge
*cs
)
4911 class cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4912 struct bitpack_d bp
;
4914 streamer_write_hwi (ob
, ii
->param_index
);
4915 bp
= bitpack_create (ob
->main_stream
);
4916 bp_pack_value (&bp
, ii
->polymorphic
, 1);
4917 bp_pack_value (&bp
, ii
->agg_contents
, 1);
4918 bp_pack_value (&bp
, ii
->member_ptr
, 1);
4919 bp_pack_value (&bp
, ii
->by_ref
, 1);
4920 bp_pack_value (&bp
, ii
->guaranteed_unmodified
, 1);
4921 bp_pack_value (&bp
, ii
->vptr_changed
, 1);
4922 streamer_write_bitpack (&bp
);
4923 if (ii
->agg_contents
|| ii
->polymorphic
)
4924 streamer_write_hwi (ob
, ii
->offset
);
4926 gcc_assert (ii
->offset
== 0);
4928 if (ii
->polymorphic
)
4930 streamer_write_hwi (ob
, ii
->otr_token
);
4931 stream_write_tree (ob
, ii
->otr_type
, true);
4932 ii
->context
.stream_out (ob
);
4936 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4937 relevant to indirect inlining from IB. */
4940 ipa_read_indirect_edge_info (class lto_input_block
*ib
,
4941 class data_in
*data_in
,
4942 struct cgraph_edge
*cs
,
4943 class ipa_node_params
*info
)
4945 class cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4946 struct bitpack_d bp
;
4948 ii
->param_index
= (int) streamer_read_hwi (ib
);
4949 bp
= streamer_read_bitpack (ib
);
4950 ii
->polymorphic
= bp_unpack_value (&bp
, 1);
4951 ii
->agg_contents
= bp_unpack_value (&bp
, 1);
4952 ii
->member_ptr
= bp_unpack_value (&bp
, 1);
4953 ii
->by_ref
= bp_unpack_value (&bp
, 1);
4954 ii
->guaranteed_unmodified
= bp_unpack_value (&bp
, 1);
4955 ii
->vptr_changed
= bp_unpack_value (&bp
, 1);
4956 if (ii
->agg_contents
|| ii
->polymorphic
)
4957 ii
->offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4960 if (ii
->polymorphic
)
4962 ii
->otr_token
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4963 ii
->otr_type
= stream_read_tree (ib
, data_in
);
4964 ii
->context
.stream_in (ib
, data_in
);
4966 if (info
&& ii
->param_index
>= 0)
4968 if (ii
->polymorphic
)
4969 ipa_set_param_used_by_polymorphic_call (info
,
4970 ii
->param_index
, true);
4971 ipa_set_param_used_by_indirect_call (info
,
4972 ii
->param_index
, true);
4976 /* Stream out NODE info to OB. */
4979 ipa_write_node_info (struct output_block
*ob
, struct cgraph_node
*node
)
4982 lto_symtab_encoder_t encoder
;
4983 ipa_node_params
*info
= ipa_node_params_sum
->get (node
);
4985 struct cgraph_edge
*e
;
4986 struct bitpack_d bp
;
4988 encoder
= ob
->decl_state
->symtab_node_encoder
;
4989 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4990 streamer_write_uhwi (ob
, node_ref
);
4992 streamer_write_uhwi (ob
, ipa_get_param_count (info
));
4993 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4994 streamer_write_uhwi (ob
, ipa_get_param_move_cost (info
, j
));
4995 bp
= bitpack_create (ob
->main_stream
);
4996 gcc_assert (info
->analysis_done
4997 || ipa_get_param_count (info
) == 0);
4998 gcc_assert (!info
->node_enqueued
);
4999 gcc_assert (!info
->ipcp_orig_node
);
5000 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
5002 /* TODO: We could just not stream the bit in the undescribed case. */
5003 bool d
= (ipa_get_controlled_uses (info
, j
) != IPA_UNDESCRIBED_USE
)
5004 ? ipa_get_param_load_dereferenced (info
, j
) : true;
5005 bp_pack_value (&bp
, d
, 1);
5006 bp_pack_value (&bp
, ipa_is_param_used (info
, j
), 1);
5008 streamer_write_bitpack (&bp
);
5009 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
5011 streamer_write_hwi (ob
, ipa_get_controlled_uses (info
, j
));
5012 stream_write_tree (ob
, ipa_get_type (info
, j
), true);
5014 for (e
= node
->callees
; e
; e
= e
->next_callee
)
5016 ipa_edge_args
*args
= ipa_edge_args_sum
->get (e
);
5020 streamer_write_uhwi (ob
, 0);
5024 streamer_write_uhwi (ob
,
5025 ipa_get_cs_argument_count (args
) * 2
5026 + (args
->polymorphic_call_contexts
!= NULL
));
5027 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
5029 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
5030 if (args
->polymorphic_call_contexts
!= NULL
)
5031 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
5034 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
5036 ipa_edge_args
*args
= ipa_edge_args_sum
->get (e
);
5038 streamer_write_uhwi (ob
, 0);
5041 streamer_write_uhwi (ob
,
5042 ipa_get_cs_argument_count (args
) * 2
5043 + (args
->polymorphic_call_contexts
!= NULL
));
5044 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
5046 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
5047 if (args
->polymorphic_call_contexts
!= NULL
)
5048 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
5051 ipa_write_indirect_edge_info (ob
, e
);
5055 /* Stream in edge E from IB. */
5058 ipa_read_edge_info (class lto_input_block
*ib
,
5059 class data_in
*data_in
,
5060 struct cgraph_edge
*e
, bool prevails
)
5062 int count
= streamer_read_uhwi (ib
);
5063 bool contexts_computed
= count
& 1;
5069 && (e
->possibly_call_in_translation_unit_p ()
5070 /* Also stream in jump functions to builtins in hope that they
5071 will get fnspecs. */
5072 || fndecl_built_in_p (e
->callee
->decl
, BUILT_IN_NORMAL
)))
5074 ipa_edge_args
*args
= ipa_edge_args_sum
->get_create (e
);
5075 vec_safe_grow_cleared (args
->jump_functions
, count
, true);
5076 if (contexts_computed
)
5077 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
, true);
5078 for (int k
= 0; k
< count
; k
++)
5080 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
5082 if (contexts_computed
)
5083 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in
5089 for (int k
= 0; k
< count
; k
++)
5091 struct ipa_jump_func dummy
;
5092 ipa_read_jump_function (ib
, &dummy
, e
,
5094 if (contexts_computed
)
5096 class ipa_polymorphic_call_context ctx
;
5097 ctx
.stream_in (ib
, data_in
);
5103 /* Stream in NODE info from IB. */
5106 ipa_read_node_info (class lto_input_block
*ib
, struct cgraph_node
*node
,
5107 class data_in
*data_in
)
5110 struct cgraph_edge
*e
;
5111 struct bitpack_d bp
;
5112 bool prevails
= node
->prevailing_p ();
5113 ipa_node_params
*info
5114 = prevails
? ipa_node_params_sum
->get_create (node
) : NULL
;
5116 int param_count
= streamer_read_uhwi (ib
);
5119 ipa_alloc_node_params (node
, param_count
);
5120 for (k
= 0; k
< param_count
; k
++)
5121 (*info
->descriptors
)[k
].move_cost
= streamer_read_uhwi (ib
);
5122 if (ipa_get_param_count (info
) != 0)
5123 info
->analysis_done
= true;
5124 info
->node_enqueued
= false;
5127 for (k
= 0; k
< param_count
; k
++)
5128 streamer_read_uhwi (ib
);
5130 bp
= streamer_read_bitpack (ib
);
5131 for (k
= 0; k
< param_count
; k
++)
5133 bool load_dereferenced
= bp_unpack_value (&bp
, 1);
5134 bool used
= bp_unpack_value (&bp
, 1);
5138 ipa_set_param_load_dereferenced (info
, k
, load_dereferenced
);
5139 ipa_set_param_used (info
, k
, used
);
5142 for (k
= 0; k
< param_count
; k
++)
5144 int nuses
= streamer_read_hwi (ib
);
5145 tree type
= stream_read_tree (ib
, data_in
);
5149 ipa_set_controlled_uses (info
, k
, nuses
);
5150 (*info
->descriptors
)[k
].decl_or_type
= type
;
5153 for (e
= node
->callees
; e
; e
= e
->next_callee
)
5154 ipa_read_edge_info (ib
, data_in
, e
, prevails
);
5155 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
5157 ipa_read_edge_info (ib
, data_in
, e
, prevails
);
5158 ipa_read_indirect_edge_info (ib
, data_in
, e
, info
);
5162 /* Write jump functions for nodes in SET. */
5165 ipa_prop_write_jump_functions (void)
5167 struct output_block
*ob
;
5168 unsigned int count
= 0;
5169 lto_symtab_encoder_iterator lsei
;
5170 lto_symtab_encoder_t encoder
;
5172 if (!ipa_node_params_sum
|| !ipa_edge_args_sum
)
5175 ob
= create_output_block (LTO_section_jump_functions
);
5176 encoder
= ob
->decl_state
->symtab_node_encoder
;
5178 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5179 lsei_next_function_in_partition (&lsei
))
5181 cgraph_node
*node
= lsei_cgraph_node (lsei
);
5182 if (node
->has_gimple_body_p ()
5183 && ipa_node_params_sum
->get (node
) != NULL
)
5187 streamer_write_uhwi (ob
, count
);
5189 /* Process all of the functions. */
5190 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5191 lsei_next_function_in_partition (&lsei
))
5193 cgraph_node
*node
= lsei_cgraph_node (lsei
);
5194 if (node
->has_gimple_body_p ()
5195 && ipa_node_params_sum
->get (node
) != NULL
)
5196 ipa_write_node_info (ob
, node
);
5198 streamer_write_char_stream (ob
->main_stream
, 0);
5199 produce_asm (ob
, NULL
);
5200 destroy_output_block (ob
);
5203 /* Read section in file FILE_DATA of length LEN with data DATA. */
5206 ipa_prop_read_section (struct lto_file_decl_data
*file_data
, const char *data
,
5209 const struct lto_function_header
*header
=
5210 (const struct lto_function_header
*) data
;
5211 const int cfg_offset
= sizeof (struct lto_function_header
);
5212 const int main_offset
= cfg_offset
+ header
->cfg_size
;
5213 const int string_offset
= main_offset
+ header
->main_size
;
5214 class data_in
*data_in
;
5218 lto_input_block
ib_main ((const char *) data
+ main_offset
,
5219 header
->main_size
, file_data
);
5222 lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
5223 header
->string_size
, vNULL
);
5224 count
= streamer_read_uhwi (&ib_main
);
5226 for (i
= 0; i
< count
; i
++)
5229 struct cgraph_node
*node
;
5230 lto_symtab_encoder_t encoder
;
5232 index
= streamer_read_uhwi (&ib_main
);
5233 encoder
= file_data
->symtab_node_encoder
;
5234 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
5236 gcc_assert (node
->definition
);
5237 ipa_read_node_info (&ib_main
, node
, data_in
);
5239 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
5241 lto_data_in_delete (data_in
);
5244 /* Read ipcp jump functions. */
5247 ipa_prop_read_jump_functions (void)
5249 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
5250 struct lto_file_decl_data
*file_data
;
5253 ipa_check_create_node_params ();
5254 ipa_check_create_edge_args ();
5255 ipa_register_cgraph_hooks ();
5257 while ((file_data
= file_data_vec
[j
++]))
5261 = lto_get_summary_section_data (file_data
, LTO_section_jump_functions
,
5264 ipa_prop_read_section (file_data
, data
, len
);
5268 /* Return true if the IPA-CP transformation summary TS is non-NULL and contains
5271 useful_ipcp_transformation_info_p (ipcp_transformation
*ts
)
5275 if (!vec_safe_is_empty (ts
->m_agg_values
)
5276 || !vec_safe_is_empty (ts
->m_vr
))
5281 /* Write into OB IPA-CP transfromation summary TS describing NODE. */
5284 write_ipcp_transformation_info (output_block
*ob
, cgraph_node
*node
,
5285 ipcp_transformation
*ts
)
5287 lto_symtab_encoder_t encoder
= ob
->decl_state
->symtab_node_encoder
;
5288 int node_ref
= lto_symtab_encoder_encode (encoder
, node
);
5289 streamer_write_uhwi (ob
, node_ref
);
5291 streamer_write_uhwi (ob
, vec_safe_length (ts
->m_agg_values
));
5292 for (const ipa_argagg_value
&av
: ts
->m_agg_values
)
5294 struct bitpack_d bp
;
5296 stream_write_tree (ob
, av
.value
, true);
5297 streamer_write_uhwi (ob
, av
.unit_offset
);
5298 streamer_write_uhwi (ob
, av
.index
);
5300 bp
= bitpack_create (ob
->main_stream
);
5301 bp_pack_value (&bp
, av
.by_ref
, 1);
5302 streamer_write_bitpack (&bp
);
5305 streamer_write_uhwi (ob
, vec_safe_length (ts
->m_vr
));
5306 for (const ipa_vr
&parm_vr
: ts
->m_vr
)
5307 parm_vr
.streamer_write (ob
);
5310 /* Stream in the aggregate value replacement chain for NODE from IB. */
5313 read_ipcp_transformation_info (lto_input_block
*ib
, cgraph_node
*node
,
5316 unsigned int count
, i
;
5317 ipcp_transformation_initialize ();
5318 ipcp_transformation
*ts
= ipcp_transformation_sum
->get_create (node
);
5320 count
= streamer_read_uhwi (ib
);
5323 vec_safe_grow_cleared (ts
->m_agg_values
, count
, true);
5324 for (i
= 0; i
<count
; i
++)
5326 ipa_argagg_value
*av
= &(*ts
->m_agg_values
)[i
];;
5328 av
->value
= stream_read_tree (ib
, data_in
);
5329 av
->unit_offset
= streamer_read_uhwi (ib
);
5330 av
->index
= streamer_read_uhwi (ib
);
5332 bitpack_d bp
= streamer_read_bitpack (ib
);
5333 av
->by_ref
= bp_unpack_value (&bp
, 1);
5337 count
= streamer_read_uhwi (ib
);
5340 vec_safe_grow_cleared (ts
->m_vr
, count
, true);
5341 for (i
= 0; i
< count
; i
++)
5344 parm_vr
= &(*ts
->m_vr
)[i
];
5345 parm_vr
->streamer_read (ib
, data_in
);
5350 /* Write all aggregate replacement for nodes in set. */
5353 ipcp_write_transformation_summaries (void)
5355 struct output_block
*ob
;
5356 unsigned int count
= 0;
5357 lto_symtab_encoder_t encoder
;
5359 ob
= create_output_block (LTO_section_ipcp_transform
);
5360 encoder
= ob
->decl_state
->symtab_node_encoder
;
5363 for (int i
= 0; i
< lto_symtab_encoder_size (encoder
); i
++)
5365 symtab_node
*snode
= lto_symtab_encoder_deref (encoder
, i
);
5366 cgraph_node
*cnode
= dyn_cast
<cgraph_node
*> (snode
);
5369 ipcp_transformation
*ts
= ipcp_get_transformation_summary (cnode
);
5370 if (useful_ipcp_transformation_info_p (ts
)
5371 && lto_symtab_encoder_encode_body_p (encoder
, cnode
))
5375 streamer_write_uhwi (ob
, count
);
5377 for (int i
= 0; i
< lto_symtab_encoder_size (encoder
); i
++)
5379 symtab_node
*snode
= lto_symtab_encoder_deref (encoder
, i
);
5380 cgraph_node
*cnode
= dyn_cast
<cgraph_node
*> (snode
);
5383 ipcp_transformation
*ts
= ipcp_get_transformation_summary (cnode
);
5384 if (useful_ipcp_transformation_info_p (ts
)
5385 && lto_symtab_encoder_encode_body_p (encoder
, cnode
))
5386 write_ipcp_transformation_info (ob
, cnode
, ts
);
5388 streamer_write_char_stream (ob
->main_stream
, 0);
5389 produce_asm (ob
, NULL
);
5390 destroy_output_block (ob
);
5393 /* Read replacements section in file FILE_DATA of length LEN with data
5397 read_replacements_section (struct lto_file_decl_data
*file_data
,
5401 const struct lto_function_header
*header
=
5402 (const struct lto_function_header
*) data
;
5403 const int cfg_offset
= sizeof (struct lto_function_header
);
5404 const int main_offset
= cfg_offset
+ header
->cfg_size
;
5405 const int string_offset
= main_offset
+ header
->main_size
;
5406 class data_in
*data_in
;
5410 lto_input_block
ib_main ((const char *) data
+ main_offset
,
5411 header
->main_size
, file_data
);
5413 data_in
= lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
5414 header
->string_size
, vNULL
);
5415 count
= streamer_read_uhwi (&ib_main
);
5417 for (i
= 0; i
< count
; i
++)
5420 struct cgraph_node
*node
;
5421 lto_symtab_encoder_t encoder
;
5423 index
= streamer_read_uhwi (&ib_main
);
5424 encoder
= file_data
->symtab_node_encoder
;
5425 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
5427 read_ipcp_transformation_info (&ib_main
, node
, data_in
);
5429 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
5431 lto_data_in_delete (data_in
);
5434 /* Read IPA-CP aggregate replacements. */
5437 ipcp_read_transformation_summaries (void)
5439 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
5440 struct lto_file_decl_data
*file_data
;
5443 while ((file_data
= file_data_vec
[j
++]))
5447 = lto_get_summary_section_data (file_data
, LTO_section_ipcp_transform
,
5450 read_replacements_section (file_data
, data
, len
);
5454 /* Adjust the aggregate replacements in TS to reflect any parameter removals
5455 which might have already taken place. If after adjustments there are no
5456 aggregate replacements left, the m_agg_values will be set to NULL. In other
5457 cases, it may be shrunk. */
5460 adjust_agg_replacement_values (cgraph_node
*node
, ipcp_transformation
*ts
)
5462 clone_info
*cinfo
= clone_info::get (node
);
5463 if (!cinfo
|| !cinfo
->param_adjustments
)
5466 auto_vec
<int, 16> new_indices
;
5467 cinfo
->param_adjustments
->get_updated_indices (&new_indices
);
5468 bool removed_item
= false;
5469 unsigned dst_index
= 0;
5470 unsigned count
= ts
->m_agg_values
->length ();
5471 for (unsigned i
= 0; i
< count
; i
++)
5473 ipa_argagg_value
*v
= &(*ts
->m_agg_values
)[i
];
5474 gcc_checking_assert (v
->index
>= 0);
5477 if ((unsigned) v
->index
< new_indices
.length ())
5478 new_idx
= new_indices
[v
->index
];
5484 (*ts
->m_agg_values
)[dst_index
] = *v
;
5488 removed_item
= true;
5493 ggc_free (ts
->m_agg_values
);
5494 ts
->m_agg_values
= NULL
;
5496 else if (removed_item
)
5497 ts
->m_agg_values
->truncate (dst_index
);
5502 /* Dominator walker driving the ipcp modification phase. */
5504 class ipcp_modif_dom_walker
: public dom_walker
5507 ipcp_modif_dom_walker (struct ipa_func_body_info
*fbi
,
5508 vec
<ipa_param_descriptor
, va_gc
> *descs
,
5509 ipcp_transformation
*ts
, bool *sc
)
5510 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
), m_descriptors (descs
),
5511 m_ts (ts
), m_something_changed (sc
) {}
5513 edge
before_dom_children (basic_block
) final override
;
5515 { return gimple_purge_all_dead_eh_edges (m_need_eh_cleanup
); }
5518 struct ipa_func_body_info
*m_fbi
;
5519 vec
<ipa_param_descriptor
, va_gc
> *m_descriptors
;
5520 ipcp_transformation
*m_ts
;
5521 bool *m_something_changed
;
5522 auto_bitmap m_need_eh_cleanup
;
5526 ipcp_modif_dom_walker::before_dom_children (basic_block bb
)
5528 gimple_stmt_iterator gsi
;
5529 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5531 gimple
*stmt
= gsi_stmt (gsi
);
5533 HOST_WIDE_INT bit_offset
;
5538 if (!gimple_assign_load_p (stmt
))
5540 rhs
= gimple_assign_rhs1 (stmt
);
5541 if (!is_gimple_reg_type (TREE_TYPE (rhs
)))
5546 while (handled_component_p (t
))
5548 /* V_C_E can do things like convert an array of integers to one
5549 bigger integer and similar things we do not handle below. */
5550 if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
5555 t
= TREE_OPERAND (t
, 0);
5560 if (!ipa_load_from_parm_agg (m_fbi
, m_descriptors
, stmt
, rhs
, &index
,
5561 &bit_offset
, &size
, &by_ref
))
5563 unsigned unit_offset
= bit_offset
/ BITS_PER_UNIT
;
5564 ipa_argagg_value_list
avl (m_ts
);
5565 tree v
= avl
.get_value (index
, unit_offset
, by_ref
);
5568 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v
))), size
))
5571 gcc_checking_assert (is_gimple_ip_invariant (v
));
5572 if (!useless_type_conversion_p (TREE_TYPE (rhs
), TREE_TYPE (v
)))
5574 if (fold_convertible_p (TREE_TYPE (rhs
), v
))
5575 val
= fold_build1 (NOP_EXPR
, TREE_TYPE (rhs
), v
);
5576 else if (TYPE_SIZE (TREE_TYPE (rhs
))
5577 == TYPE_SIZE (TREE_TYPE (v
)))
5578 val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (rhs
), v
);
5583 fprintf (dump_file
, " const ");
5584 print_generic_expr (dump_file
, v
);
5585 fprintf (dump_file
, " can't be converted to type of ");
5586 print_generic_expr (dump_file
, rhs
);
5587 fprintf (dump_file
, "\n");
5595 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5597 fprintf (dump_file
, "Modifying stmt:\n ");
5598 print_gimple_stmt (dump_file
, stmt
, 0);
5600 gimple_assign_set_rhs_from_tree (&gsi
, val
);
5603 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5605 fprintf (dump_file
, "into:\n ");
5606 print_gimple_stmt (dump_file
, stmt
, 0);
5607 fprintf (dump_file
, "\n");
5610 *m_something_changed
= true;
5611 if (maybe_clean_eh_stmt (stmt
))
5612 bitmap_set_bit (m_need_eh_cleanup
, bb
->index
);
5617 /* If IPA-CP discovered a constant in parameter PARM at OFFSET of a given SIZE
5618 - whether passed by reference or not is given by BY_REF - return that
5619 constant. Otherwise return NULL_TREE. */
5622 ipcp_get_aggregate_const (struct function
*func
, tree parm
, bool by_ref
,
5623 HOST_WIDE_INT bit_offset
, HOST_WIDE_INT bit_size
)
5625 cgraph_node
*node
= cgraph_node::get (func
->decl
);
5626 ipcp_transformation
*ts
= ipcp_get_transformation_summary (node
);
5628 if (!ts
|| !ts
->m_agg_values
)
5631 int index
= ts
->get_param_index (func
->decl
, parm
);
5635 ipa_argagg_value_list
avl (ts
);
5636 unsigned unit_offset
= bit_offset
/ BITS_PER_UNIT
;
5637 tree v
= avl
.get_value (index
, unit_offset
, by_ref
);
5639 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v
))), bit_size
))
5645 /* Return true if we have recorded VALUE and MASK about PARM.
5646 Set VALUE and MASk accordingly. */
5649 ipcp_get_parm_bits (tree parm
, tree
*value
, widest_int
*mask
)
5651 cgraph_node
*cnode
= cgraph_node::get (current_function_decl
);
5652 ipcp_transformation
*ts
= ipcp_get_transformation_summary (cnode
);
5654 || vec_safe_length (ts
->m_vr
) == 0
5655 || !irange::supports_p (TREE_TYPE (parm
)))
5658 int i
= ts
->get_param_index (current_function_decl
, parm
);
5661 clone_info
*cinfo
= clone_info::get (cnode
);
5662 if (cinfo
&& cinfo
->param_adjustments
)
5664 i
= cinfo
->param_adjustments
->get_original_index (i
);
5669 vec
<ipa_vr
, va_gc
> &vr
= *ts
->m_vr
;
5670 if (!vr
[i
].known_p ())
5673 vr
[i
].get_vrange (tmp
);
5674 if (tmp
.undefined_p () || tmp
.varying_p ())
5676 irange
&r
= as_a
<irange
> (tmp
);
5677 irange_bitmask bm
= r
.get_bitmask ();
5678 *mask
= widest_int::from (bm
.mask (), TYPE_SIGN (TREE_TYPE (parm
)));
5679 *value
= wide_int_to_tree (TREE_TYPE (parm
), bm
.value ());
5683 /* Update value range of formal parameters of NODE as described in TS. */
5686 ipcp_update_vr (struct cgraph_node
*node
, ipcp_transformation
*ts
)
5688 if (vec_safe_is_empty (ts
->m_vr
))
5690 const vec
<ipa_vr
, va_gc
> &vr
= *ts
->m_vr
;
5691 unsigned count
= vr
.length ();
5695 auto_vec
<int, 16> new_indices
;
5696 bool need_remapping
= false;
5697 clone_info
*cinfo
= clone_info::get (node
);
5698 if (cinfo
&& cinfo
->param_adjustments
)
5700 cinfo
->param_adjustments
->get_updated_indices (&new_indices
);
5701 need_remapping
= true;
5703 auto_vec
<tree
, 16> parm_decls
;
5704 push_function_arg_decls (&parm_decls
, node
->decl
);
5706 for (unsigned i
= 0; i
< count
; ++i
)
5712 if (i
>= new_indices
.length ())
5714 remapped_idx
= new_indices
[i
];
5715 if (remapped_idx
< 0)
5721 parm
= parm_decls
[remapped_idx
];
5723 gcc_checking_assert (parm
);
5724 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
), parm
);
5726 if (!ddef
|| !is_gimple_reg (parm
))
5729 if (vr
[i
].known_p ())
5732 vr
[i
].get_vrange (tmp
);
5734 if (!tmp
.undefined_p () && !tmp
.varying_p ())
5738 fprintf (dump_file
, "Setting value range of param %u "
5739 "(now %i) ", i
, remapped_idx
);
5740 tmp
.dump (dump_file
);
5741 fprintf (dump_file
, "]\n");
5743 set_range_info (ddef
, tmp
);
5745 if (POINTER_TYPE_P (TREE_TYPE (parm
))
5746 && opt_for_fn (node
->decl
, flag_ipa_bit_cp
))
5748 irange
&r
= as_a
<irange
> (tmp
);
5749 irange_bitmask bm
= r
.get_bitmask ();
5750 unsigned tem
= bm
.mask ().to_uhwi ();
5751 unsigned HOST_WIDE_INT bitpos
= bm
.value ().to_uhwi ();
5752 unsigned align
= tem
& -tem
;
5753 unsigned misalign
= bitpos
& (align
- 1);
5760 "Adjusting mask for param %u to ", i
);
5761 print_hex (bm
.mask (), dump_file
);
5762 fprintf (dump_file
, "\n");
5767 "Adjusting align: %u, misalign: %u\n",
5770 unsigned old_align
, old_misalign
;
5771 struct ptr_info_def
*pi
= get_ptr_info (ddef
);
5772 bool old_known
= get_ptr_info_alignment (pi
, &old_align
,
5775 if (old_known
&& old_align
> align
)
5780 "But alignment was already %u.\n",
5782 if ((old_misalign
& (align
- 1)) != misalign
)
5784 "old_misalign (%u) and misalign "
5786 old_misalign
, misalign
);
5793 && ((misalign
& (old_align
- 1)) != old_misalign
))
5795 "old_misalign (%u) and misalign (%u) "
5797 old_misalign
, misalign
);
5799 set_ptr_info_alignment (pi
, align
, misalign
);
5802 else if (dump_file
&& INTEGRAL_TYPE_P (TREE_TYPE (parm
)))
5804 irange
&r
= as_a
<irange
> (tmp
);
5805 irange_bitmask bm
= r
.get_bitmask ();
5806 unsigned prec
= TYPE_PRECISION (TREE_TYPE (parm
));
5807 if (wi::ne_p (bm
.mask (), wi::shwi (-1, prec
)))
5810 "Adjusting mask for param %u to ", i
);
5811 print_hex (bm
.mask (), dump_file
);
5812 fprintf (dump_file
, "\n");
5820 /* IPCP transformation phase doing propagation of aggregate values. */
5823 ipcp_transform_function (struct cgraph_node
*node
)
5825 struct ipa_func_body_info fbi
;
5828 gcc_checking_assert (cfun
);
5829 gcc_checking_assert (current_function_decl
);
5832 fprintf (dump_file
, "Modification phase of node %s\n",
5833 node
->dump_name ());
5835 ipcp_transformation
*ts
= ipcp_get_transformation_summary (node
);
5837 || (vec_safe_is_empty (ts
->m_agg_values
)
5838 && vec_safe_is_empty (ts
->m_vr
)))
5841 ts
->maybe_create_parm_idx_map (cfun
->decl
);
5842 ipcp_update_vr (node
, ts
);
5843 if (vec_safe_is_empty (ts
->m_agg_values
))
5845 param_count
= count_formal_params (node
->decl
);
5846 if (param_count
== 0)
5849 adjust_agg_replacement_values (node
, ts
);
5850 if (vec_safe_is_empty (ts
->m_agg_values
))
5853 fprintf (dump_file
, " All affected aggregate parameters were either "
5854 "removed or converted into scalars, phase done.\n");
5859 fprintf (dump_file
, " Aggregate replacements:");
5860 ipa_argagg_value_list
avs (ts
);
5861 avs
.dump (dump_file
);
5866 fbi
.bb_infos
= vNULL
;
5867 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
), true);
5868 fbi
.param_count
= param_count
;
5869 fbi
.aa_walk_budget
= opt_for_fn (node
->decl
, param_ipa_max_aa_steps
);
5871 vec
<ipa_param_descriptor
, va_gc
> *descriptors
= NULL
;
5872 vec_safe_grow_cleared (descriptors
, param_count
, true);
5873 ipa_populate_param_decls (node
, *descriptors
);
5874 bool modified_mem_access
= false;
5875 calculate_dominance_info (CDI_DOMINATORS
);
5876 ipcp_modif_dom_walker
walker (&fbi
, descriptors
, ts
, &modified_mem_access
);
5877 walker
.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5878 free_dominance_info (CDI_DOMINATORS
);
5879 bool cfg_changed
= walker
.cleanup_eh ();
5882 struct ipa_bb_info
*bi
;
5883 FOR_EACH_VEC_ELT (fbi
.bb_infos
, i
, bi
)
5884 free_ipa_bb_info (bi
);
5885 fbi
.bb_infos
.release ();
5887 vec_free (descriptors
);
5889 delete_unreachable_blocks_update_callgraph (node
, false);
5891 return modified_mem_access
? TODO_update_ssa_only_virtuals
: 0;
5895 #include "gt-ipa-prop.h"