1 /* Interprocedural analyses.
2 Copyright (C) 2005-2024 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
30 #include "tree-streamer.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-iterator.h"
35 #include "gimple-fold.h"
38 #include "stor-layout.h"
39 #include "print-tree.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
49 #include "tree-inline.h"
50 #include "ipa-fnsummary.h"
51 #include "gimple-pretty-print.h"
52 #include "ipa-utils.h"
56 #include "tree-cfgcleanup.h"
58 #include "symtab-clones.h"
59 #include "attr-fnspec.h"
60 #include "gimple-range.h"
61 #include "value-range-storage.h"
63 /* Function summary where the parameter infos are actually stored. */
64 ipa_node_params_t
*ipa_node_params_sum
= NULL
;
66 function_summary
<ipcp_transformation
*> *ipcp_transformation_sum
= NULL
;
68 /* Edge summary for IPA-CP edge information. */
69 ipa_edge_args_sum_t
*ipa_edge_args_sum
;
71 /* Traits for a hash table for reusing ranges. */
73 struct ipa_vr_ggc_hash_traits
: public ggc_cache_remove
<ipa_vr
*>
75 typedef ipa_vr
*value_type
;
76 typedef const vrange
*compare_type
;
78 hash (const ipa_vr
*p
)
80 // This never get called, except in the verification code, as
81 // ipa_get_value_range() calculates the hash itself. This
82 // function is mostly here for completness' sake.
86 add_vrange (vr
, hstate
);
90 equal (const ipa_vr
*a
, const vrange
*b
)
92 return a
->equal_p (*b
);
94 static const bool empty_zero_p
= true;
96 mark_empty (ipa_vr
*&p
)
101 is_empty (const ipa_vr
*p
)
106 is_deleted (const ipa_vr
*p
)
108 return p
== reinterpret_cast<const ipa_vr
*> (1);
111 mark_deleted (ipa_vr
*&p
)
113 p
= reinterpret_cast<ipa_vr
*> (1);
117 /* Hash table for avoid repeated allocations of equal ranges. */
118 static GTY ((cache
)) hash_table
<ipa_vr_ggc_hash_traits
> *ipa_vr_hash_table
;
120 /* Holders of ipa cgraph hooks: */
121 static struct cgraph_node_hook_list
*function_insertion_hook_holder
;
123 /* Description of a reference to an IPA constant. */
124 struct ipa_cst_ref_desc
126 /* Edge that corresponds to the statement which took the reference. */
127 struct cgraph_edge
*cs
;
128 /* Linked list of duplicates created when call graph edges are cloned. */
129 struct ipa_cst_ref_desc
*next_duplicate
;
130 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
131 if out of control. */
135 /* Allocation pool for reference descriptions. */
137 static object_allocator
<ipa_cst_ref_desc
> ipa_refdesc_pool
138 ("IPA-PROP ref descriptions");
146 ipa_vr::ipa_vr (const vrange
&r
)
147 : m_storage (ggc_alloc_vrange_storage (r
)),
153 ipa_vr::equal_p (const vrange
&r
) const
155 gcc_checking_assert (!r
.undefined_p ());
156 return (types_compatible_p (m_type
, r
.type ()) && m_storage
->equal_p (r
));
160 ipa_vr::equal_p (const ipa_vr
&o
) const
163 return !o
.known_p ();
165 if (!types_compatible_p (m_type
, o
.m_type
))
170 return m_storage
->equal_p (r
);
174 ipa_vr::get_vrange (Value_Range
&r
) const
177 m_storage
->get_vrange (r
, m_type
);
181 ipa_vr::set_unknown ()
184 ggc_free (m_storage
);
190 ipa_vr::streamer_read (lto_input_block
*ib
, data_in
*data_in
)
192 struct bitpack_d bp
= streamer_read_bitpack (ib
);
193 bool known
= bp_unpack_value (&bp
, 1);
197 streamer_read_value_range (ib
, data_in
, vr
);
198 if (!m_storage
|| !m_storage
->fits_p (vr
))
201 ggc_free (m_storage
);
202 m_storage
= ggc_alloc_vrange_storage (vr
);
204 m_storage
->set_vrange (vr
);
215 ipa_vr::streamer_write (output_block
*ob
) const
217 struct bitpack_d bp
= bitpack_create (ob
->main_stream
);
218 bp_pack_value (&bp
, !!m_storage
, 1);
219 streamer_write_bitpack (&bp
);
222 Value_Range
vr (m_type
);
223 m_storage
->get_vrange (vr
, m_type
);
224 streamer_write_vrange (ob
, vr
);
229 ipa_vr::dump (FILE *out
) const
233 Value_Range
vr (m_type
);
234 m_storage
->get_vrange (vr
, m_type
);
238 fprintf (out
, "NO RANGE");
241 // These stubs are because we use an ipa_vr in a hash_traits and
242 // hash-traits.h defines an extern of gt_ggc_mx (T &) instead of
243 // picking up the gt_ggc_mx (T *) version.
245 gt_pch_nx (ipa_vr
*&x
)
247 return gt_pch_nx ((ipa_vr
*) x
);
251 gt_ggc_mx (ipa_vr
*&x
)
253 return gt_ggc_mx ((ipa_vr
*) x
);
256 /* Analysis summery of function call return value. */
257 struct GTY(()) ipa_return_value_summary
259 /* Known value range.
260 This needs to be wrapped in struccture due to specific way
261 we allocate ipa_vr. */
265 /* Function summary for return values. */
266 class ipa_return_value_sum_t
: public function_summary
<ipa_return_value_summary
*>
269 ipa_return_value_sum_t (symbol_table
*table
, bool ggc
):
270 function_summary
<ipa_return_value_summary
*> (table
, ggc
) { }
272 /* Hook that is called by summary when a node is duplicated. */
273 void duplicate (cgraph_node
*,
275 ipa_return_value_summary
*data
,
276 ipa_return_value_summary
*data2
) final override
282 /* Variable hoding the return value summary. */
283 static GTY(()) function_summary
<ipa_return_value_summary
*> *ipa_return_value_sum
;
286 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
287 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
290 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node
*node
)
292 tree fs_opts
= DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node
->decl
);
296 return !opt_for_fn (node
->decl
, optimize
) || !opt_for_fn (node
->decl
, flag_ipa_cp
);
299 /* Return index of the formal whose tree is PTREE in function which corresponds
303 ipa_get_param_decl_index_1 (vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
308 count
= vec_safe_length (descriptors
);
309 for (i
= 0; i
< count
; i
++)
310 if ((*descriptors
)[i
].decl_or_type
== ptree
)
316 /* Return index of the formal whose tree is PTREE in function which corresponds
320 ipa_get_param_decl_index (class ipa_node_params
*info
, tree ptree
)
322 return ipa_get_param_decl_index_1 (info
->descriptors
, ptree
);
325 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
329 ipa_populate_param_decls (struct cgraph_node
*node
,
330 vec
<ipa_param_descriptor
, va_gc
> &descriptors
)
338 gcc_assert (gimple_has_body_p (fndecl
));
339 fnargs
= DECL_ARGUMENTS (fndecl
);
341 for (parm
= fnargs
; parm
; parm
= DECL_CHAIN (parm
))
343 descriptors
[param_num
].decl_or_type
= parm
;
344 unsigned int cost
= estimate_move_cost (TREE_TYPE (parm
), true);
345 descriptors
[param_num
].move_cost
= cost
;
346 /* Watch overflow, move_cost is a bitfield. */
347 gcc_checking_assert (cost
== descriptors
[param_num
].move_cost
);
352 /* Return how many formal parameters FNDECL has. */
355 count_formal_params (tree fndecl
)
359 gcc_assert (gimple_has_body_p (fndecl
));
361 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
367 /* Return the declaration of Ith formal parameter of the function corresponding
368 to INFO. Note there is no setter function as this array is built just once
369 using ipa_initialize_node_params. */
372 ipa_dump_param (FILE *file
, class ipa_node_params
*info
, int i
)
374 fprintf (file
, "param #%i", i
);
375 if ((*info
->descriptors
)[i
].decl_or_type
)
378 print_generic_expr (file
, (*info
->descriptors
)[i
].decl_or_type
);
382 /* If necessary, allocate vector of parameter descriptors in info of NODE.
383 Return true if they were allocated, false if not. */
386 ipa_alloc_node_params (struct cgraph_node
*node
, int param_count
)
388 ipa_node_params
*info
= ipa_node_params_sum
->get_create (node
);
390 if (!info
->descriptors
&& param_count
)
392 vec_safe_grow_cleared (info
->descriptors
, param_count
, true);
399 /* Initialize the ipa_node_params structure associated with NODE by counting
400 the function parameters, creating the descriptors and populating their
404 ipa_initialize_node_params (struct cgraph_node
*node
)
406 ipa_node_params
*info
= ipa_node_params_sum
->get_create (node
);
408 if (!info
->descriptors
409 && ipa_alloc_node_params (node
, count_formal_params (node
->decl
)))
410 ipa_populate_param_decls (node
, *info
->descriptors
);
413 /* Print VAL which is extracted from a jump function to F. */
416 ipa_print_constant_value (FILE *f
, tree val
)
418 print_generic_expr (f
, val
);
420 /* This is in keeping with values_equal_for_ipcp_p. */
421 if (TREE_CODE (val
) == ADDR_EXPR
422 && (TREE_CODE (TREE_OPERAND (val
, 0)) == CONST_DECL
423 || (TREE_CODE (TREE_OPERAND (val
, 0)) == VAR_DECL
424 && DECL_IN_CONSTANT_POOL (TREE_OPERAND (val
, 0)))))
427 print_generic_expr (f
, DECL_INITIAL (TREE_OPERAND (val
, 0)));
431 /* Print the jump functions associated with call graph edge CS to file F. */
434 ipa_print_node_jump_functions_for_edge (FILE *f
, struct cgraph_edge
*cs
)
436 ipa_edge_args
*args
= ipa_edge_args_sum
->get (cs
);
437 int count
= ipa_get_cs_argument_count (args
);
439 for (int i
= 0; i
< count
; i
++)
441 struct ipa_jump_func
*jump_func
;
442 enum jump_func_type type
;
444 jump_func
= ipa_get_ith_jump_func (args
, i
);
445 type
= jump_func
->type
;
447 fprintf (f
, " param %d: ", i
);
448 if (type
== IPA_JF_UNKNOWN
)
449 fprintf (f
, "UNKNOWN\n");
450 else if (type
== IPA_JF_CONST
)
452 fprintf (f
, "CONST: ");
453 ipa_print_constant_value (f
, jump_func
->value
.constant
.value
);
456 else if (type
== IPA_JF_PASS_THROUGH
)
458 fprintf (f
, "PASS THROUGH: ");
459 fprintf (f
, "%d, op %s",
460 jump_func
->value
.pass_through
.formal_id
,
461 get_tree_code_name(jump_func
->value
.pass_through
.operation
));
462 if (jump_func
->value
.pass_through
.operation
!= NOP_EXPR
)
465 print_generic_expr (f
, jump_func
->value
.pass_through
.operand
);
467 if (jump_func
->value
.pass_through
.agg_preserved
)
468 fprintf (f
, ", agg_preserved");
469 if (jump_func
->value
.pass_through
.refdesc_decremented
)
470 fprintf (f
, ", refdesc_decremented");
473 else if (type
== IPA_JF_ANCESTOR
)
475 fprintf (f
, "ANCESTOR: ");
476 fprintf (f
, "%d, offset " HOST_WIDE_INT_PRINT_DEC
,
477 jump_func
->value
.ancestor
.formal_id
,
478 jump_func
->value
.ancestor
.offset
);
479 if (jump_func
->value
.ancestor
.agg_preserved
)
480 fprintf (f
, ", agg_preserved");
481 if (jump_func
->value
.ancestor
.keep_null
)
482 fprintf (f
, ", keep_null");
486 if (jump_func
->agg
.items
)
488 struct ipa_agg_jf_item
*item
;
491 fprintf (f
, " Aggregate passed by %s:\n",
492 jump_func
->agg
.by_ref
? "reference" : "value");
493 FOR_EACH_VEC_ELT (*jump_func
->agg
.items
, j
, item
)
495 fprintf (f
, " offset: " HOST_WIDE_INT_PRINT_DEC
", ",
497 fprintf (f
, "type: ");
498 print_generic_expr (f
, item
->type
);
500 if (item
->jftype
== IPA_JF_PASS_THROUGH
)
501 fprintf (f
, "PASS THROUGH: %d,",
502 item
->value
.pass_through
.formal_id
);
503 else if (item
->jftype
== IPA_JF_LOAD_AGG
)
505 fprintf (f
, "LOAD AGG: %d",
506 item
->value
.pass_through
.formal_id
);
507 fprintf (f
, " [offset: " HOST_WIDE_INT_PRINT_DEC
", by %s],",
508 item
->value
.load_agg
.offset
,
509 item
->value
.load_agg
.by_ref
? "reference"
513 if (item
->jftype
== IPA_JF_PASS_THROUGH
514 || item
->jftype
== IPA_JF_LOAD_AGG
)
516 fprintf (f
, " op %s",
517 get_tree_code_name (item
->value
.pass_through
.operation
));
518 if (item
->value
.pass_through
.operation
!= NOP_EXPR
)
521 print_generic_expr (f
, item
->value
.pass_through
.operand
);
524 else if (item
->jftype
== IPA_JF_CONST
)
526 fprintf (f
, "CONST: ");
527 ipa_print_constant_value (f
, item
->value
.constant
);
529 else if (item
->jftype
== IPA_JF_UNKNOWN
)
530 fprintf (f
, "UNKNOWN: " HOST_WIDE_INT_PRINT_DEC
" bits",
531 tree_to_uhwi (TYPE_SIZE (item
->type
)));
536 class ipa_polymorphic_call_context
*ctx
537 = ipa_get_ith_polymorhic_call_context (args
, i
);
538 if (ctx
&& !ctx
->useless_p ())
540 fprintf (f
, " Context: ");
541 ctx
->dump (dump_file
);
546 jump_func
->m_vr
->dump (f
);
550 fprintf (f
, " Unknown VR\n");
555 /* Print the jump functions of all arguments on all call graph edges going from
559 ipa_print_node_jump_functions (FILE *f
, struct cgraph_node
*node
)
561 struct cgraph_edge
*cs
;
563 fprintf (f
, " Jump functions of caller %s:\n", node
->dump_name ());
564 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
567 fprintf (f
, " callsite %s -> %s : \n",
569 cs
->callee
->dump_name ());
570 if (!ipa_edge_args_info_available_for_edge_p (cs
))
571 fprintf (f
, " no arg info\n");
573 ipa_print_node_jump_functions_for_edge (f
, cs
);
576 for (cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
578 class cgraph_indirect_call_info
*ii
;
580 ii
= cs
->indirect_info
;
581 if (ii
->agg_contents
)
582 fprintf (f
, " indirect %s callsite, calling param %i, "
583 "offset " HOST_WIDE_INT_PRINT_DEC
", %s",
584 ii
->member_ptr
? "member ptr" : "aggregate",
585 ii
->param_index
, ii
->offset
,
586 ii
->by_ref
? "by reference" : "by_value");
588 fprintf (f
, " indirect %s callsite, calling param %i, "
589 "offset " HOST_WIDE_INT_PRINT_DEC
,
590 ii
->polymorphic
? "polymorphic" : "simple", ii
->param_index
,
595 fprintf (f
, ", for stmt ");
596 print_gimple_stmt (f
, cs
->call_stmt
, 0, TDF_SLIM
);
601 ii
->context
.dump (f
);
602 if (!ipa_edge_args_info_available_for_edge_p (cs
))
603 fprintf (f
, " no arg info\n");
605 ipa_print_node_jump_functions_for_edge (f
, cs
);
609 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
612 ipa_print_all_jump_functions (FILE *f
)
614 struct cgraph_node
*node
;
616 fprintf (f
, "\nJump functions:\n");
617 FOR_EACH_FUNCTION (node
)
619 ipa_print_node_jump_functions (f
, node
);
623 /* Set jfunc to be a know-really nothing jump function. */
626 ipa_set_jf_unknown (struct ipa_jump_func
*jfunc
)
628 jfunc
->type
= IPA_JF_UNKNOWN
;
631 /* Set JFUNC to be a copy of another jmp (to be used by jump function
632 combination code). The two functions will share their rdesc. */
635 ipa_set_jf_cst_copy (struct ipa_jump_func
*dst
,
636 struct ipa_jump_func
*src
)
639 gcc_checking_assert (src
->type
== IPA_JF_CONST
);
640 dst
->type
= IPA_JF_CONST
;
641 dst
->value
.constant
= src
->value
.constant
;
644 /* Set JFUNC to be a constant jmp function. */
647 ipa_set_jf_constant (struct ipa_jump_func
*jfunc
, tree constant
,
648 struct cgraph_edge
*cs
)
650 jfunc
->type
= IPA_JF_CONST
;
651 jfunc
->value
.constant
.value
= unshare_expr_without_location (constant
);
653 if (TREE_CODE (constant
) == ADDR_EXPR
654 && (TREE_CODE (TREE_OPERAND (constant
, 0)) == FUNCTION_DECL
655 || (VAR_P (TREE_OPERAND (constant
, 0))
656 && TREE_STATIC (TREE_OPERAND (constant
, 0)))))
658 struct ipa_cst_ref_desc
*rdesc
;
660 rdesc
= ipa_refdesc_pool
.allocate ();
662 rdesc
->next_duplicate
= NULL
;
664 jfunc
->value
.constant
.rdesc
= rdesc
;
667 jfunc
->value
.constant
.rdesc
= NULL
;
670 /* Set JFUNC to be a simple pass-through jump function. */
672 ipa_set_jf_simple_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
675 jfunc
->type
= IPA_JF_PASS_THROUGH
;
676 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
677 jfunc
->value
.pass_through
.formal_id
= formal_id
;
678 jfunc
->value
.pass_through
.operation
= NOP_EXPR
;
679 jfunc
->value
.pass_through
.agg_preserved
= agg_preserved
;
680 jfunc
->value
.pass_through
.refdesc_decremented
= false;
683 /* Set JFUNC to be an unary pass through jump function. */
686 ipa_set_jf_unary_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
687 enum tree_code operation
)
689 jfunc
->type
= IPA_JF_PASS_THROUGH
;
690 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
691 jfunc
->value
.pass_through
.formal_id
= formal_id
;
692 jfunc
->value
.pass_through
.operation
= operation
;
693 jfunc
->value
.pass_through
.agg_preserved
= false;
694 jfunc
->value
.pass_through
.refdesc_decremented
= false;
696 /* Set JFUNC to be an arithmetic pass through jump function. */
699 ipa_set_jf_arith_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
700 tree operand
, enum tree_code operation
)
702 jfunc
->type
= IPA_JF_PASS_THROUGH
;
703 jfunc
->value
.pass_through
.operand
= unshare_expr_without_location (operand
);
704 jfunc
->value
.pass_through
.formal_id
= formal_id
;
705 jfunc
->value
.pass_through
.operation
= operation
;
706 jfunc
->value
.pass_through
.agg_preserved
= false;
707 jfunc
->value
.pass_through
.refdesc_decremented
= false;
710 /* Set JFUNC to be an ancestor jump function. */
713 ipa_set_ancestor_jf (struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
,
714 int formal_id
, bool agg_preserved
, bool keep_null
)
716 jfunc
->type
= IPA_JF_ANCESTOR
;
717 jfunc
->value
.ancestor
.formal_id
= formal_id
;
718 jfunc
->value
.ancestor
.offset
= offset
;
719 jfunc
->value
.ancestor
.agg_preserved
= agg_preserved
;
720 jfunc
->value
.ancestor
.keep_null
= keep_null
;
723 /* Get IPA BB information about the given BB. FBI is the context of analyzis
724 of this function body. */
726 static struct ipa_bb_info
*
727 ipa_get_bb_info (struct ipa_func_body_info
*fbi
, basic_block bb
)
729 gcc_checking_assert (fbi
);
730 return &fbi
->bb_infos
[bb
->index
];
733 /* Structure to be passed in between detect_type_change and
734 check_stmt_for_type_change. */
736 struct prop_type_change_info
738 /* Offset into the object where there is the virtual method pointer we are
740 HOST_WIDE_INT offset
;
741 /* The declaration or SSA_NAME pointer of the base that we are checking for
744 /* Set to true if dynamic type change has been detected. */
745 bool type_maybe_changed
;
748 /* Return true if STMT can modify a virtual method table pointer.
750 This function makes special assumptions about both constructors and
751 destructors which are all the functions that are allowed to alter the VMT
752 pointers. It assumes that destructors begin with assignment into all VMT
753 pointers and that constructors essentially look in the following way:
755 1) The very first thing they do is that they call constructors of ancestor
756 sub-objects that have them.
758 2) Then VMT pointers of this and all its ancestors is set to new values
759 corresponding to the type corresponding to the constructor.
761 3) Only afterwards, other stuff such as constructor of member sub-objects
762 and the code written by the user is run. Only this may include calling
763 virtual functions, directly or indirectly.
765 There is no way to call a constructor of an ancestor sub-object in any
768 This means that we do not have to care whether constructors get the correct
769 type information because they will always change it (in fact, if we define
770 the type to be given by the VMT pointer, it is undefined).
772 The most important fact to derive from the above is that if, for some
773 statement in the section 3, we try to detect whether the dynamic type has
774 changed, we can safely ignore all calls as we examine the function body
775 backwards until we reach statements in section 2 because these calls cannot
776 be ancestor constructors or destructors (if the input is not bogus) and so
777 do not change the dynamic type (this holds true only for automatically
778 allocated objects but at the moment we devirtualize only these). We then
779 must detect that statements in section 2 change the dynamic type and can try
780 to derive the new type. That is enough and we can stop, we will never see
781 the calls into constructors of sub-objects in this code. Therefore we can
782 safely ignore all call statements that we traverse.
786 stmt_may_be_vtbl_ptr_store (gimple
*stmt
)
788 if (is_gimple_call (stmt
))
790 if (gimple_clobber_p (stmt
))
792 else if (is_gimple_assign (stmt
))
794 tree lhs
= gimple_assign_lhs (stmt
);
796 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs
)))
798 if (flag_strict_aliasing
799 && !POINTER_TYPE_P (TREE_TYPE (lhs
)))
802 if (TREE_CODE (lhs
) == COMPONENT_REF
803 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
805 /* In the future we might want to use get_ref_base_and_extent to find
806 if there is a field corresponding to the offset and if so, proceed
807 almost like if it was a component ref. */
813 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
814 to check whether a particular statement may modify the virtual table
815 pointerIt stores its result into DATA, which points to a
816 prop_type_change_info structure. */
819 check_stmt_for_type_change (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef
, void *data
)
821 gimple
*stmt
= SSA_NAME_DEF_STMT (vdef
);
822 struct prop_type_change_info
*tci
= (struct prop_type_change_info
*) data
;
824 if (stmt_may_be_vtbl_ptr_store (stmt
))
826 tci
->type_maybe_changed
= true;
833 /* See if ARG is PARAM_DECl describing instance passed by pointer
834 or reference in FUNCTION. Return false if the dynamic type may change
835 in between beggining of the function until CALL is invoked.
837 Generally functions are not allowed to change type of such instances,
838 but they call destructors. We assume that methods cannot destroy the THIS
839 pointer. Also as a special cases, constructor and destructors may change
840 type of the THIS pointer. */
843 param_type_may_change_p (tree function
, tree arg
, gimple
*call
)
845 /* Pure functions cannot do any changes on the dynamic type;
846 that require writting to memory. */
847 if (flags_from_decl_or_type (function
) & (ECF_PURE
| ECF_CONST
))
849 /* We need to check if we are within inlined consturctor
850 or destructor (ideally we would have way to check that the
851 inline cdtor is actually working on ARG, but we don't have
852 easy tie on this, so punt on all non-pure cdtors.
853 We may also record the types of cdtors and once we know type
854 of the instance match them.
856 Also code unification optimizations may merge calls from
857 different blocks making return values unreliable. So
858 do nothing during late optimization. */
859 if (DECL_STRUCT_FUNCTION (function
)->after_inlining
)
861 if (TREE_CODE (arg
) == SSA_NAME
862 && SSA_NAME_IS_DEFAULT_DEF (arg
)
863 && TREE_CODE (SSA_NAME_VAR (arg
)) == PARM_DECL
)
865 /* Normal (non-THIS) argument. */
866 if ((SSA_NAME_VAR (arg
) != DECL_ARGUMENTS (function
)
867 || TREE_CODE (TREE_TYPE (function
)) != METHOD_TYPE
)
868 /* THIS pointer of an method - here we want to watch constructors
869 and destructors as those definitely may change the dynamic
871 || (TREE_CODE (TREE_TYPE (function
)) == METHOD_TYPE
872 && !DECL_CXX_CONSTRUCTOR_P (function
)
873 && !DECL_CXX_DESTRUCTOR_P (function
)
874 && (SSA_NAME_VAR (arg
) == DECL_ARGUMENTS (function
))))
876 /* Walk the inline stack and watch out for ctors/dtors. */
877 for (tree block
= gimple_block (call
); block
&& TREE_CODE (block
) == BLOCK
;
878 block
= BLOCK_SUPERCONTEXT (block
))
879 if (inlined_polymorphic_ctor_dtor_block_p (block
, false))
887 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
888 callsite CALL) by looking for assignments to its virtual table pointer. If
889 it is, return true. ARG is the object itself (not a pointer
890 to it, unless dereferenced). BASE is the base of the memory access as
891 returned by get_ref_base_and_extent, as is the offset.
893 This is helper function for detect_type_change and detect_type_change_ssa
894 that does the heavy work which is usually unnecesary. */
897 detect_type_change_from_memory_writes (ipa_func_body_info
*fbi
, tree arg
,
898 tree base
, tree comp_type
, gcall
*call
,
899 HOST_WIDE_INT offset
)
901 struct prop_type_change_info tci
;
904 gcc_checking_assert (DECL_P (arg
)
905 || TREE_CODE (arg
) == MEM_REF
906 || handled_component_p (arg
));
908 comp_type
= TYPE_MAIN_VARIANT (comp_type
);
910 /* Const calls cannot call virtual methods through VMT and so type changes do
912 if (!flag_devirtualize
|| !gimple_vuse (call
)
913 /* Be sure expected_type is polymorphic. */
915 || TREE_CODE (comp_type
) != RECORD_TYPE
916 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))
917 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))))
920 if (fbi
->aa_walk_budget
== 0)
923 ao_ref_init (&ao
, arg
);
926 ao
.size
= POINTER_SIZE
;
927 ao
.max_size
= ao
.size
;
930 tci
.object
= get_base_address (arg
);
931 tci
.type_maybe_changed
= false;
934 = walk_aliased_vdefs (&ao
, gimple_vuse (call
), check_stmt_for_type_change
,
935 &tci
, NULL
, NULL
, fbi
->aa_walk_budget
);
937 fbi
->aa_walk_budget
-= walked
;
939 fbi
->aa_walk_budget
= 0;
941 if (walked
>= 0 && !tci
.type_maybe_changed
)
947 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
948 If it is, return true. ARG is the object itself (not a pointer
949 to it, unless dereferenced). BASE is the base of the memory access as
950 returned by get_ref_base_and_extent, as is the offset. */
953 detect_type_change (ipa_func_body_info
*fbi
, tree arg
, tree base
,
954 tree comp_type
, gcall
*call
,
955 HOST_WIDE_INT offset
)
957 if (!flag_devirtualize
)
960 if (TREE_CODE (base
) == MEM_REF
961 && !param_type_may_change_p (current_function_decl
,
962 TREE_OPERAND (base
, 0),
965 return detect_type_change_from_memory_writes (fbi
, arg
, base
, comp_type
,
969 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
970 SSA name (its dereference will become the base and the offset is assumed to
974 detect_type_change_ssa (ipa_func_body_info
*fbi
, tree arg
, tree comp_type
,
977 gcc_checking_assert (TREE_CODE (arg
) == SSA_NAME
);
978 if (!flag_devirtualize
979 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
982 if (!param_type_may_change_p (current_function_decl
, arg
, call
))
985 arg
= build2 (MEM_REF
, ptr_type_node
, arg
,
986 build_int_cst (ptr_type_node
, 0));
988 return detect_type_change_from_memory_writes (fbi
, arg
, arg
, comp_type
,
992 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
993 boolean variable pointed to by DATA. */
996 mark_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
999 bool *b
= (bool *) data
;
1004 /* Find the nearest valid aa status for parameter specified by INDEX that
1007 static struct ipa_param_aa_status
*
1008 find_dominating_aa_status (struct ipa_func_body_info
*fbi
, basic_block bb
,
1013 bb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
1016 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
1017 if (!bi
->param_aa_statuses
.is_empty ()
1018 && bi
->param_aa_statuses
[index
].valid
)
1019 return &bi
->param_aa_statuses
[index
];
1023 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
1024 structures and/or intialize the result with a dominating description as
1027 static struct ipa_param_aa_status
*
1028 parm_bb_aa_status_for_bb (struct ipa_func_body_info
*fbi
, basic_block bb
,
1031 gcc_checking_assert (fbi
);
1032 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
1033 if (bi
->param_aa_statuses
.is_empty ())
1034 bi
->param_aa_statuses
.safe_grow_cleared (fbi
->param_count
, true);
1035 struct ipa_param_aa_status
*paa
= &bi
->param_aa_statuses
[index
];
1038 gcc_checking_assert (!paa
->parm_modified
1039 && !paa
->ref_modified
1040 && !paa
->pt_modified
);
1041 struct ipa_param_aa_status
*dom_paa
;
1042 dom_paa
= find_dominating_aa_status (fbi
, bb
, index
);
1052 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
1053 a value known not to be modified in this function before reaching the
1054 statement STMT. FBI holds information about the function we have so far
1055 gathered but do not survive the summary building stage. */
1058 parm_preserved_before_stmt_p (struct ipa_func_body_info
*fbi
, int index
,
1059 gimple
*stmt
, tree parm_load
)
1061 struct ipa_param_aa_status
*paa
;
1062 bool modified
= false;
1065 tree base
= get_base_address (parm_load
);
1066 gcc_assert (TREE_CODE (base
) == PARM_DECL
);
1067 if (TREE_READONLY (base
))
1070 gcc_checking_assert (fbi
);
1071 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
1072 if (paa
->parm_modified
|| fbi
->aa_walk_budget
== 0)
1075 gcc_checking_assert (gimple_vuse (stmt
) != NULL_TREE
);
1076 ao_ref_init (&refd
, parm_load
);
1077 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
1078 &modified
, NULL
, NULL
,
1079 fbi
->aa_walk_budget
);
1083 fbi
->aa_walk_budget
= 0;
1086 fbi
->aa_walk_budget
-= walked
;
1087 if (paa
&& modified
)
1088 paa
->parm_modified
= true;
1092 /* If STMT is an assignment that loads a value from an parameter declaration,
1093 return the index of the parameter in ipa_node_params which has not been
1094 modified. Otherwise return -1. */
1097 load_from_unmodified_param (struct ipa_func_body_info
*fbi
,
1098 vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
1104 if (!gimple_assign_single_p (stmt
))
1107 op1
= gimple_assign_rhs1 (stmt
);
1108 if (TREE_CODE (op1
) != PARM_DECL
)
1111 index
= ipa_get_param_decl_index_1 (descriptors
, op1
);
1113 || !parm_preserved_before_stmt_p (fbi
, index
, stmt
, op1
))
1119 /* Return true if memory reference REF (which must be a load through parameter
1120 with INDEX) loads data that are known to be unmodified in this function
1121 before reaching statement STMT. */
1124 parm_ref_data_preserved_p (struct ipa_func_body_info
*fbi
,
1125 int index
, gimple
*stmt
, tree ref
)
1127 struct ipa_param_aa_status
*paa
;
1128 bool modified
= false;
1131 gcc_checking_assert (fbi
);
1132 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
1133 if (paa
->ref_modified
|| fbi
->aa_walk_budget
== 0)
1136 gcc_checking_assert (gimple_vuse (stmt
));
1137 ao_ref_init (&refd
, ref
);
1138 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
1139 &modified
, NULL
, NULL
,
1140 fbi
->aa_walk_budget
);
1144 fbi
->aa_walk_budget
= 0;
1147 fbi
->aa_walk_budget
-= walked
;
1149 paa
->ref_modified
= true;
1153 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1154 is known to be unmodified in this function before reaching call statement
1155 CALL into which it is passed. FBI describes the function body. */
1158 parm_ref_data_pass_through_p (struct ipa_func_body_info
*fbi
, int index
,
1159 gimple
*call
, tree parm
)
1161 bool modified
= false;
1164 /* It's unnecessary to calculate anything about memory contnets for a const
1165 function because it is not goin to use it. But do not cache the result
1166 either. Also, no such calculations for non-pointers. */
1167 if (!gimple_vuse (call
)
1168 || !POINTER_TYPE_P (TREE_TYPE (parm
)))
1171 struct ipa_param_aa_status
*paa
= parm_bb_aa_status_for_bb (fbi
,
1174 if (paa
->pt_modified
|| fbi
->aa_walk_budget
== 0)
1177 ao_ref_init_from_ptr_and_size (&refd
, parm
, NULL_TREE
);
1178 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (call
), mark_modified
,
1179 &modified
, NULL
, NULL
,
1180 fbi
->aa_walk_budget
);
1183 fbi
->aa_walk_budget
= 0;
1187 fbi
->aa_walk_budget
-= walked
;
1189 paa
->pt_modified
= true;
1193 /* Return true if we can prove that OP is a memory reference loading
1194 data from an aggregate passed as a parameter.
1196 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1197 false if it cannot prove that the value has not been modified before the
1198 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1199 if it cannot prove the value has not been modified, in that case it will
1200 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1202 INFO and PARMS_AINFO describe parameters of the current function (but the
1203 latter can be NULL), STMT is the load statement. If function returns true,
1204 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1205 within the aggregate and whether it is a load from a value passed by
1206 reference respectively.
1208 Return false if the offset divided by BITS_PER_UNIT would not fit into an
1212 ipa_load_from_parm_agg (struct ipa_func_body_info
*fbi
,
1213 vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
1214 gimple
*stmt
, tree op
, int *index_p
,
1215 HOST_WIDE_INT
*offset_p
, poly_int64
*size_p
,
1216 bool *by_ref_p
, bool *guaranteed_unmodified
)
1221 tree base
= get_ref_base_and_extent_hwi (op
, offset_p
, &size
, &reverse
);
1224 || (*offset_p
/ BITS_PER_UNIT
) > UINT_MAX
)
1227 /* We can not propagate across volatile loads. */
1228 if (TREE_THIS_VOLATILE (op
))
1233 int index
= ipa_get_param_decl_index_1 (descriptors
, base
);
1235 && parm_preserved_before_stmt_p (fbi
, index
, stmt
, op
))
1241 if (guaranteed_unmodified
)
1242 *guaranteed_unmodified
= true;
1248 if (TREE_CODE (base
) != MEM_REF
1249 || TREE_CODE (TREE_OPERAND (base
, 0)) != SSA_NAME
1250 || !integer_zerop (TREE_OPERAND (base
, 1)))
1253 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base
, 0)))
1255 tree parm
= SSA_NAME_VAR (TREE_OPERAND (base
, 0));
1256 index
= ipa_get_param_decl_index_1 (descriptors
, parm
);
1260 /* This branch catches situations where a pointer parameter is not a
1261 gimple register, for example:
1263 void hip7(S*) (struct S * p)
1265 void (*<T2e4>) (struct S *) D.1867;
1270 D.1867_2 = p.1_1->f;
1275 gimple
*def
= SSA_NAME_DEF_STMT (TREE_OPERAND (base
, 0));
1276 index
= load_from_unmodified_param (fbi
, descriptors
, def
);
1281 bool data_preserved
= parm_ref_data_preserved_p (fbi
, index
, stmt
, op
);
1282 if (!data_preserved
&& !guaranteed_unmodified
)
1289 if (guaranteed_unmodified
)
1290 *guaranteed_unmodified
= data_preserved
;
1296 /* If STMT is an assignment that loads a value from a parameter declaration,
1297 or from an aggregate passed as the parameter either by value or reference,
1298 return the index of the parameter in ipa_node_params. Otherwise return -1.
1300 FBI holds gathered information about the function. INFO describes
1301 parameters of the function, STMT is the assignment statement. If it is a
1302 memory load from an aggregate, *OFFSET_P is filled with offset within the
1303 aggregate, and *BY_REF_P specifies whether the aggregate is passed by
1307 load_from_unmodified_param_or_agg (struct ipa_func_body_info
*fbi
,
1308 class ipa_node_params
*info
,
1310 HOST_WIDE_INT
*offset_p
,
1313 int index
= load_from_unmodified_param (fbi
, info
->descriptors
, stmt
);
1316 /* Load value from a parameter declaration. */
1323 if (!gimple_assign_load_p (stmt
))
1326 tree rhs
= gimple_assign_rhs1 (stmt
);
1328 /* Skip memory reference containing VIEW_CONVERT_EXPR. */
1329 for (tree t
= rhs
; handled_component_p (t
); t
= TREE_OPERAND (t
, 0))
1330 if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
1333 /* Skip memory reference containing bit-field. */
1334 if (TREE_CODE (rhs
) == BIT_FIELD_REF
1335 || contains_bitfld_component_ref_p (rhs
))
1338 if (!ipa_load_from_parm_agg (fbi
, info
->descriptors
, stmt
, rhs
, &index
,
1339 offset_p
, &size
, by_ref_p
))
1342 gcc_assert (!maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (rhs
))),
1346 tree param_type
= ipa_get_type (info
, index
);
1348 if (!param_type
|| !AGGREGATE_TYPE_P (param_type
))
1351 else if (TREE_THIS_VOLATILE (rhs
))
1357 /* Walk pointer adjustemnts from OP (such as POINTER_PLUS and ADDR_EXPR)
1358 to find original pointer. Initialize RET to the pointer which results from
1360 If offset is known return true and initialize OFFSET_RET. */
1363 unadjusted_ptr_and_unit_offset (tree op
, tree
*ret
, poly_int64
*offset_ret
)
1365 poly_int64 offset
= 0;
1366 bool offset_known
= true;
1369 for (i
= 0; i
< param_ipa_jump_function_lookups
; i
++)
1371 if (TREE_CODE (op
) == ADDR_EXPR
)
1373 poly_int64 extra_offset
= 0;
1374 tree base
= get_addr_base_and_unit_offset (TREE_OPERAND (op
, 0),
1378 base
= get_base_address (TREE_OPERAND (op
, 0));
1379 if (TREE_CODE (base
) != MEM_REF
)
1381 offset_known
= false;
1385 if (TREE_CODE (base
) != MEM_REF
)
1387 offset
+= extra_offset
;
1389 op
= TREE_OPERAND (base
, 0);
1390 if (mem_ref_offset (base
).to_shwi (&extra_offset
))
1391 offset
+= extra_offset
;
1393 offset_known
= false;
1395 else if (TREE_CODE (op
) == SSA_NAME
1396 && !SSA_NAME_IS_DEFAULT_DEF (op
))
1398 gimple
*pstmt
= SSA_NAME_DEF_STMT (op
);
1400 if (gimple_assign_single_p (pstmt
))
1401 op
= gimple_assign_rhs1 (pstmt
);
1402 else if (is_gimple_assign (pstmt
)
1403 && gimple_assign_rhs_code (pstmt
) == POINTER_PLUS_EXPR
)
1405 poly_int64 extra_offset
= 0;
1406 if (ptrdiff_tree_p (gimple_assign_rhs2 (pstmt
),
1408 offset
+= extra_offset
;
1410 offset_known
= false;
1411 op
= gimple_assign_rhs1 (pstmt
);
1420 *offset_ret
= offset
;
1421 return offset_known
;
1424 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1425 of an assignment statement STMT, try to determine whether we are actually
1426 handling any of the following cases and construct an appropriate jump
1427 function into JFUNC if so:
1429 1) The passed value is loaded from a formal parameter which is not a gimple
1430 register (most probably because it is addressable, the value has to be
1431 scalar) and we can guarantee the value has not changed. This case can
1432 therefore be described by a simple pass-through jump function. For example:
1441 2) The passed value can be described by a simple arithmetic pass-through
1448 D.2064_4 = a.1(D) + 4;
1451 This case can also occur in combination of the previous one, e.g.:
1459 D.2064_4 = a.0_3 + 4;
1462 3) The passed value is an address of an object within another one (which
1463 also passed by reference). Such situations are described by an ancestor
1464 jump function and describe situations such as:
1466 B::foo() (struct B * const this)
1470 D.1845_2 = &this_1(D)->D.1748;
1473 INFO is the structure describing individual parameters access different
1474 stages of IPA optimizations. PARMS_AINFO contains the information that is
1475 only needed for intraprocedural analysis. */
1478 compute_complex_assign_jump_func (struct ipa_func_body_info
*fbi
,
1479 class ipa_node_params
*info
,
1480 struct ipa_jump_func
*jfunc
,
1481 gcall
*call
, gimple
*stmt
, tree name
,
1484 HOST_WIDE_INT offset
, size
;
1485 tree op1
, tc_ssa
, base
, ssa
;
1489 op1
= gimple_assign_rhs1 (stmt
);
1491 if (TREE_CODE (op1
) == SSA_NAME
)
1493 if (SSA_NAME_IS_DEFAULT_DEF (op1
))
1494 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (op1
));
1496 index
= load_from_unmodified_param (fbi
, info
->descriptors
,
1497 SSA_NAME_DEF_STMT (op1
));
1502 index
= load_from_unmodified_param (fbi
, info
->descriptors
, stmt
);
1503 tc_ssa
= gimple_assign_lhs (stmt
);
1508 switch (gimple_assign_rhs_class (stmt
))
1510 case GIMPLE_BINARY_RHS
:
1512 tree op2
= gimple_assign_rhs2 (stmt
);
1513 if (!is_gimple_ip_invariant (op2
)
1514 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt
))
1516 && !useless_type_conversion_p (TREE_TYPE (name
),
1520 ipa_set_jf_arith_pass_through (jfunc
, index
, op2
,
1521 gimple_assign_rhs_code (stmt
));
1524 case GIMPLE_SINGLE_RHS
:
1526 bool agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
,
1528 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1531 case GIMPLE_UNARY_RHS
:
1532 if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
)))
1533 ipa_set_jf_unary_pass_through (jfunc
, index
,
1534 gimple_assign_rhs_code (stmt
));
1540 if (TREE_CODE (op1
) != ADDR_EXPR
)
1542 op1
= TREE_OPERAND (op1
, 0);
1543 base
= get_ref_base_and_extent_hwi (op1
, &offset
, &size
, &reverse
);
1544 offset_int mem_offset
;
1546 || TREE_CODE (base
) != MEM_REF
1547 || !mem_ref_offset (base
).is_constant (&mem_offset
))
1549 offset
+= mem_offset
.to_short_addr () * BITS_PER_UNIT
;
1550 ssa
= TREE_OPERAND (base
, 0);
1551 if (TREE_CODE (ssa
) != SSA_NAME
1552 || !SSA_NAME_IS_DEFAULT_DEF (ssa
)
1556 /* Dynamic types are changed in constructors and destructors. */
1557 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (ssa
));
1558 if (index
>= 0 && param_type
&& POINTER_TYPE_P (param_type
))
1559 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1560 parm_ref_data_pass_through_p (fbi
, index
, call
, ssa
),
1564 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1567 iftmp.1_3 = &obj_2(D)->D.1762;
1569 The base of the MEM_REF must be a default definition SSA NAME of a
1570 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1571 whole MEM_REF expression is returned and the offset calculated from any
1572 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1573 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1576 get_ancestor_addr_info (gimple
*assign
, tree
*obj_p
, HOST_WIDE_INT
*offset
)
1579 tree expr
, parm
, obj
;
1582 if (!gimple_assign_single_p (assign
))
1584 expr
= gimple_assign_rhs1 (assign
);
1586 if (TREE_CODE (expr
) != ADDR_EXPR
)
1588 expr
= TREE_OPERAND (expr
, 0);
1590 expr
= get_ref_base_and_extent_hwi (expr
, offset
, &size
, &reverse
);
1592 offset_int mem_offset
;
1594 || TREE_CODE (expr
) != MEM_REF
1595 || !mem_ref_offset (expr
).is_constant (&mem_offset
))
1597 parm
= TREE_OPERAND (expr
, 0);
1598 if (TREE_CODE (parm
) != SSA_NAME
1599 || !SSA_NAME_IS_DEFAULT_DEF (parm
)
1600 || TREE_CODE (SSA_NAME_VAR (parm
)) != PARM_DECL
)
1603 *offset
+= mem_offset
.to_short_addr () * BITS_PER_UNIT
;
1609 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1610 statement PHI, try to find out whether NAME is in fact a
1611 multiple-inheritance typecast from a descendant into an ancestor of a formal
1612 parameter and thus can be described by an ancestor jump function and if so,
1613 write the appropriate function into JFUNC.
1615 Essentially we want to match the following pattern:
1623 iftmp.1_3 = &obj_2(D)->D.1762;
1626 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1627 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1631 compute_complex_ancestor_jump_func (struct ipa_func_body_info
*fbi
,
1632 class ipa_node_params
*info
,
1633 struct ipa_jump_func
*jfunc
,
1634 gcall
*call
, gphi
*phi
)
1636 HOST_WIDE_INT offset
;
1638 basic_block phi_bb
, assign_bb
, cond_bb
;
1639 tree tmp
, parm
, expr
, obj
;
1642 if (gimple_phi_num_args (phi
) != 2)
1645 if (integer_zerop (PHI_ARG_DEF (phi
, 1)))
1646 tmp
= PHI_ARG_DEF (phi
, 0);
1647 else if (integer_zerop (PHI_ARG_DEF (phi
, 0)))
1648 tmp
= PHI_ARG_DEF (phi
, 1);
1651 if (TREE_CODE (tmp
) != SSA_NAME
1652 || SSA_NAME_IS_DEFAULT_DEF (tmp
)
1653 || !POINTER_TYPE_P (TREE_TYPE (tmp
))
1654 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp
))) != RECORD_TYPE
)
1657 assign
= SSA_NAME_DEF_STMT (tmp
);
1658 assign_bb
= gimple_bb (assign
);
1659 if (!single_pred_p (assign_bb
))
1661 expr
= get_ancestor_addr_info (assign
, &obj
, &offset
);
1664 parm
= TREE_OPERAND (expr
, 0);
1665 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (parm
));
1669 cond_bb
= single_pred (assign_bb
);
1670 gcond
*cond
= safe_dyn_cast
<gcond
*> (*gsi_last_bb (cond_bb
));
1672 || gimple_cond_code (cond
) != NE_EXPR
1673 || gimple_cond_lhs (cond
) != parm
1674 || !integer_zerop (gimple_cond_rhs (cond
)))
1677 phi_bb
= gimple_bb (phi
);
1678 for (i
= 0; i
< 2; i
++)
1680 basic_block pred
= EDGE_PRED (phi_bb
, i
)->src
;
1681 if (pred
!= assign_bb
&& pred
!= cond_bb
)
1685 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1686 parm_ref_data_pass_through_p (fbi
, index
, call
, parm
),
1690 /* Inspect the given TYPE and return true iff it has the same structure (the
1691 same number of fields of the same types) as a C++ member pointer. If
1692 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1693 corresponding fields there. */
1696 type_like_member_ptr_p (tree type
, tree
*method_ptr
, tree
*delta
)
1700 if (TREE_CODE (type
) != RECORD_TYPE
)
1703 fld
= TYPE_FIELDS (type
);
1704 if (!fld
|| !POINTER_TYPE_P (TREE_TYPE (fld
))
1705 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld
))) != METHOD_TYPE
1706 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1712 fld
= DECL_CHAIN (fld
);
1713 if (!fld
|| INTEGRAL_TYPE_P (fld
)
1714 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1719 if (DECL_CHAIN (fld
))
1725 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1726 return the rhs of its defining statement, and this statement is stored in
1727 *RHS_STMT. Otherwise return RHS as it is. */
1730 get_ssa_def_if_simple_copy (tree rhs
, gimple
**rhs_stmt
)
1732 while (TREE_CODE (rhs
) == SSA_NAME
&& !SSA_NAME_IS_DEFAULT_DEF (rhs
))
1734 gimple
*def_stmt
= SSA_NAME_DEF_STMT (rhs
);
1736 if (gimple_assign_single_p (def_stmt
))
1737 rhs
= gimple_assign_rhs1 (def_stmt
);
1740 *rhs_stmt
= def_stmt
;
1745 /* Simple linked list, describing contents of an aggregate before call. */
1747 struct ipa_known_agg_contents_list
1749 /* Offset and size of the described part of the aggregate. */
1750 HOST_WIDE_INT offset
, size
;
1752 /* Type of the described part of the aggregate. */
1755 /* Known constant value or jump function data describing contents. */
1756 struct ipa_load_agg_data value
;
1758 /* Pointer to the next structure in the list. */
1759 struct ipa_known_agg_contents_list
*next
;
1762 /* Add an aggregate content item into a linked list of
1763 ipa_known_agg_contents_list structure, in which all elements
1764 are sorted ascendingly by offset. */
1767 add_to_agg_contents_list (struct ipa_known_agg_contents_list
**plist
,
1768 struct ipa_known_agg_contents_list
*item
)
1770 struct ipa_known_agg_contents_list
*list
= *plist
;
1772 for (; list
; list
= list
->next
)
1774 if (list
->offset
>= item
->offset
)
1777 plist
= &list
->next
;
1784 /* Check whether a given aggregate content is clobbered by certain element in
1785 a linked list of ipa_known_agg_contents_list. */
1788 clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list
*list
,
1789 struct ipa_known_agg_contents_list
*item
)
1791 for (; list
; list
= list
->next
)
1793 if (list
->offset
>= item
->offset
)
1794 return list
->offset
< item
->offset
+ item
->size
;
1796 if (list
->offset
+ list
->size
> item
->offset
)
1803 /* Build aggregate jump function from LIST, assuming there are exactly
1804 VALUE_COUNT entries there and that offset of the passed argument
1805 is ARG_OFFSET and store it into JFUNC. */
1808 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list
*list
,
1809 int value_count
, HOST_WIDE_INT arg_offset
,
1810 struct ipa_jump_func
*jfunc
)
1812 vec_safe_reserve (jfunc
->agg
.items
, value_count
, true);
1813 for (; list
; list
= list
->next
)
1815 struct ipa_agg_jf_item item
;
1816 tree operand
= list
->value
.pass_through
.operand
;
1818 if (list
->value
.pass_through
.formal_id
>= 0)
1820 /* Content value is derived from some formal paramerter. */
1821 if (list
->value
.offset
>= 0)
1822 item
.jftype
= IPA_JF_LOAD_AGG
;
1824 item
.jftype
= IPA_JF_PASS_THROUGH
;
1826 item
.value
.load_agg
= list
->value
;
1828 item
.value
.pass_through
.operand
1829 = unshare_expr_without_location (operand
);
1833 /* Content value is known constant. */
1834 item
.jftype
= IPA_JF_CONST
;
1835 item
.value
.constant
= unshare_expr_without_location (operand
);
1840 item
.type
= list
->type
;
1841 gcc_assert (tree_to_shwi (TYPE_SIZE (list
->type
)) == list
->size
);
1843 item
.offset
= list
->offset
- arg_offset
;
1844 gcc_assert ((item
.offset
% BITS_PER_UNIT
) == 0);
1846 jfunc
->agg
.items
->quick_push (item
);
1850 /* Given an assignment statement STMT, try to collect information into
1851 AGG_VALUE that will be used to construct jump function for RHS of the
1852 assignment, from which content value of an aggregate part comes.
1854 Besides constant and simple pass-through jump functions, also try to
1855 identify whether it matches the following pattern that can be described by
1856 a load-value-from-aggregate jump function, which is a derivative of simple
1857 pass-through jump function.
1863 *(q_5 + 4) = *(p_3(D) + 28) op 1;
1867 Here IPA_LOAD_AGG_DATA data structure is informative enough to describe
1868 constant, simple pass-through and load-vale-from-aggregate. If value
1869 is constant, it will be kept in field OPERAND, and field FORMAL_ID is
1870 set to -1. For simple pass-through and load-value-from-aggregate, field
1871 FORMAL_ID specifies the related formal parameter index, and field
1872 OFFSET can be used to distinguish them, -1 means simple pass-through,
1873 otherwise means load-value-from-aggregate. */
1876 analyze_agg_content_value (struct ipa_func_body_info
*fbi
,
1877 struct ipa_load_agg_data
*agg_value
,
1880 tree lhs
= gimple_assign_lhs (stmt
);
1881 tree rhs1
= gimple_assign_rhs1 (stmt
);
1882 enum tree_code code
;
1885 /* Initialize jump function data for the aggregate part. */
1886 memset (agg_value
, 0, sizeof (*agg_value
));
1887 agg_value
->pass_through
.operation
= NOP_EXPR
;
1888 agg_value
->pass_through
.formal_id
= -1;
1889 agg_value
->offset
= -1;
1891 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs
)) /* TODO: Support aggregate type. */
1892 || TREE_THIS_VOLATILE (lhs
)
1893 || TREE_CODE (lhs
) == BIT_FIELD_REF
1894 || contains_bitfld_component_ref_p (lhs
))
1897 /* Skip SSA copies. */
1898 while (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
1900 if (TREE_CODE (rhs1
) != SSA_NAME
|| SSA_NAME_IS_DEFAULT_DEF (rhs1
))
1903 stmt
= SSA_NAME_DEF_STMT (rhs1
);
1904 if (!is_gimple_assign (stmt
))
1907 rhs1
= gimple_assign_rhs1 (stmt
);
1910 if (gphi
*phi
= dyn_cast
<gphi
*> (stmt
))
1912 /* Also special case like the following (a is a formal parameter):
1914 _12 = *a_11(D).dim[0].stride;
1916 # iftmp.22_9 = PHI <_12(2), 1(3)>
1918 parm.6.dim[0].stride = iftmp.22_9;
1920 __x_MOD_foo (&parm.6, b_31(D));
1922 The aggregate function describing parm.6.dim[0].stride is encoded as a
1923 PASS-THROUGH jump function with ASSERT_EXPR operation whith operand 1
1924 (the constant from the PHI node). */
1926 if (gimple_phi_num_args (phi
) != 2)
1928 tree arg0
= gimple_phi_arg_def (phi
, 0);
1929 tree arg1
= gimple_phi_arg_def (phi
, 1);
1932 if (is_gimple_ip_invariant (arg1
))
1937 else if (is_gimple_ip_invariant (arg0
))
1945 rhs1
= get_ssa_def_if_simple_copy (rhs1
, &stmt
);
1946 if (!is_gimple_assign (stmt
))
1950 agg_value
->pass_through
.operand
= operand
;
1952 else if (is_gimple_assign (stmt
))
1954 code
= gimple_assign_rhs_code (stmt
);
1955 switch (gimple_assign_rhs_class (stmt
))
1957 case GIMPLE_SINGLE_RHS
:
1958 if (is_gimple_ip_invariant (rhs1
))
1960 agg_value
->pass_through
.operand
= rhs1
;
1966 case GIMPLE_UNARY_RHS
:
1967 /* NOTE: A GIMPLE_UNARY_RHS operation might not be tcc_unary
1968 (truth_not_expr is example), GIMPLE_BINARY_RHS does not imply
1969 tcc_binary, this subtleness is somewhat misleading.
1971 Since tcc_unary is widely used in IPA-CP code to check an operation
1972 with one operand, here we only allow tc_unary operation to avoid
1973 possible problem. Then we can use (opclass == tc_unary) or not to
1974 distinguish unary and binary. */
1975 if (TREE_CODE_CLASS (code
) != tcc_unary
|| CONVERT_EXPR_CODE_P (code
))
1978 rhs1
= get_ssa_def_if_simple_copy (rhs1
, &stmt
);
1981 case GIMPLE_BINARY_RHS
:
1983 gimple
*rhs1_stmt
= stmt
;
1984 gimple
*rhs2_stmt
= stmt
;
1985 tree rhs2
= gimple_assign_rhs2 (stmt
);
1987 rhs1
= get_ssa_def_if_simple_copy (rhs1
, &rhs1_stmt
);
1988 rhs2
= get_ssa_def_if_simple_copy (rhs2
, &rhs2_stmt
);
1990 if (is_gimple_ip_invariant (rhs2
))
1992 agg_value
->pass_through
.operand
= rhs2
;
1995 else if (is_gimple_ip_invariant (rhs1
))
1997 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
1998 code
= swap_tree_comparison (code
);
1999 else if (!commutative_tree_code (code
))
2002 agg_value
->pass_through
.operand
= rhs1
;
2009 if (TREE_CODE_CLASS (code
) != tcc_comparison
2010 && !useless_type_conversion_p (TREE_TYPE (lhs
),
2023 if (TREE_CODE (rhs1
) != SSA_NAME
)
2024 index
= load_from_unmodified_param_or_agg (fbi
, fbi
->info
, stmt
,
2026 &agg_value
->by_ref
);
2027 else if (SSA_NAME_IS_DEFAULT_DEF (rhs1
))
2028 index
= ipa_get_param_decl_index (fbi
->info
, SSA_NAME_VAR (rhs1
));
2032 if (agg_value
->offset
>= 0)
2033 agg_value
->type
= TREE_TYPE (rhs1
);
2034 agg_value
->pass_through
.formal_id
= index
;
2035 agg_value
->pass_through
.operation
= code
;
2038 agg_value
->pass_through
.operand
= NULL_TREE
;
2041 /* If STMT is a memory store to the object whose address is BASE, extract
2042 information (offset, size, and value) into CONTENT, and return true,
2043 otherwise we conservatively assume the whole object is modified with
2044 unknown content, and return false. CHECK_REF means that access to object
2045 is expected to be in form of MEM_REF expression. */
2048 extract_mem_content (struct ipa_func_body_info
*fbi
,
2049 gimple
*stmt
, tree base
, bool check_ref
,
2050 struct ipa_known_agg_contents_list
*content
)
2052 HOST_WIDE_INT lhs_offset
, lhs_size
;
2055 if (!is_gimple_assign (stmt
))
2058 tree lhs
= gimple_assign_lhs (stmt
);
2059 tree lhs_base
= get_ref_base_and_extent_hwi (lhs
, &lhs_offset
, &lhs_size
,
2066 if (TREE_CODE (lhs_base
) != MEM_REF
2067 || TREE_OPERAND (lhs_base
, 0) != base
2068 || !integer_zerop (TREE_OPERAND (lhs_base
, 1)))
2071 else if (lhs_base
!= base
)
2074 content
->offset
= lhs_offset
;
2075 content
->size
= lhs_size
;
2076 content
->type
= TREE_TYPE (lhs
);
2077 content
->next
= NULL
;
2079 analyze_agg_content_value (fbi
, &content
->value
, stmt
);
2083 /* Traverse statements from CALL backwards, scanning whether an aggregate given
2084 in ARG is filled in constants or values that are derived from caller's
2085 formal parameter in the way described by some kinds of jump functions. FBI
2086 is the context of the caller function for interprocedural analysis. ARG can
2087 either be an aggregate expression or a pointer to an aggregate. ARG_TYPE is
2088 the type of the aggregate, JFUNC is the jump function for the aggregate. */
2091 determine_known_aggregate_parts (struct ipa_func_body_info
*fbi
,
2092 gcall
*call
, tree arg
,
2094 struct ipa_jump_func
*jfunc
)
2096 struct ipa_known_agg_contents_list
*list
= NULL
, *all_list
= NULL
;
2097 bitmap visited
= NULL
;
2098 int item_count
= 0, value_count
= 0;
2099 HOST_WIDE_INT arg_offset
, arg_size
;
2101 bool check_ref
, by_ref
;
2103 int max_agg_items
= opt_for_fn (fbi
->node
->decl
, param_ipa_max_agg_items
);
2105 if (max_agg_items
== 0)
2108 /* The function operates in three stages. First, we prepare check_ref, r,
2109 arg_base and arg_offset based on what is actually passed as an actual
2112 if (POINTER_TYPE_P (arg_type
))
2115 if (TREE_CODE (arg
) == SSA_NAME
)
2118 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type
)))
2119 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
2124 type_size
= TYPE_SIZE (TREE_TYPE (arg_type
));
2125 arg_size
= tree_to_uhwi (type_size
);
2126 ao_ref_init_from_ptr_and_size (&r
, arg_base
, NULL_TREE
);
2128 else if (TREE_CODE (arg
) == ADDR_EXPR
)
2132 arg
= TREE_OPERAND (arg
, 0);
2133 arg_base
= get_ref_base_and_extent_hwi (arg
, &arg_offset
,
2134 &arg_size
, &reverse
);
2137 if (DECL_P (arg_base
))
2140 ao_ref_init (&r
, arg_base
);
2152 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg
)));
2156 arg_base
= get_ref_base_and_extent_hwi (arg
, &arg_offset
,
2157 &arg_size
, &reverse
);
2161 ao_ref_init (&r
, arg
);
2164 /* Second stage traverses virtual SSA web backwards starting from the call
2165 statement, only looks at individual dominating virtual operand (its
2166 definition dominates the call), as long as it is confident that content
2167 of the aggregate is affected by definition of the virtual operand, it
2168 builds a sorted linked list of ipa_agg_jf_list describing that. */
2170 for (tree dom_vuse
= gimple_vuse (call
);
2171 dom_vuse
&& fbi
->aa_walk_budget
> 0;)
2173 gimple
*stmt
= SSA_NAME_DEF_STMT (dom_vuse
);
2175 if (gimple_code (stmt
) == GIMPLE_PHI
)
2177 dom_vuse
= get_continuation_for_phi (stmt
, &r
, true,
2178 fbi
->aa_walk_budget
,
2179 &visited
, false, NULL
, NULL
);
2183 fbi
->aa_walk_budget
--;
2184 if (stmt_may_clobber_ref_p_1 (stmt
, &r
))
2186 struct ipa_known_agg_contents_list
*content
2187 = XALLOCA (struct ipa_known_agg_contents_list
);
2189 if (!extract_mem_content (fbi
, stmt
, arg_base
, check_ref
, content
))
2192 /* Now we get a dominating virtual operand, and need to check
2193 whether its value is clobbered any other dominating one. */
2194 if ((content
->value
.pass_through
.formal_id
>= 0
2195 || content
->value
.pass_through
.operand
)
2196 && !clobber_by_agg_contents_list_p (all_list
, content
)
2197 /* Since IPA-CP stores results with unsigned int offsets, we can
2198 discard those which would not fit now before we stream them to
2200 && (content
->offset
+ content
->size
- arg_offset
2201 <= (HOST_WIDE_INT
) UINT_MAX
* BITS_PER_UNIT
))
2203 struct ipa_known_agg_contents_list
*copy
2204 = XALLOCA (struct ipa_known_agg_contents_list
);
2206 /* Add to the list consisting of only dominating virtual
2207 operands, whose definitions can finally reach the call. */
2208 add_to_agg_contents_list (&list
, (*copy
= *content
, copy
));
2210 if (++value_count
== max_agg_items
)
2214 /* Add to the list consisting of all dominating virtual operands. */
2215 add_to_agg_contents_list (&all_list
, content
);
2217 if (++item_count
== 2 * max_agg_items
)
2220 dom_vuse
= gimple_vuse (stmt
);
2224 BITMAP_FREE (visited
);
2226 /* Third stage just goes over the list and creates an appropriate vector of
2227 ipa_agg_jf_item structures out of it, of course only if there are
2228 any meaningful items to begin with. */
2232 jfunc
->agg
.by_ref
= by_ref
;
2233 build_agg_jump_func_from_list (list
, value_count
, arg_offset
, jfunc
);
2238 /* Return the Ith param type of callee associated with call graph
2242 ipa_get_callee_param_type (struct cgraph_edge
*e
, int i
)
2245 tree type
= (e
->callee
2246 ? TREE_TYPE (e
->callee
->decl
)
2247 : gimple_call_fntype (e
->call_stmt
));
2248 tree t
= TYPE_ARG_TYPES (type
);
2250 for (n
= 0; n
< i
; n
++)
2256 if (t
&& t
!= void_list_node
)
2257 return TREE_VALUE (t
);
2260 t
= DECL_ARGUMENTS (e
->callee
->decl
);
2261 for (n
= 0; n
< i
; n
++)
2268 return TREE_TYPE (t
);
2272 /* Return a pointer to an ipa_vr just like TMP, but either find it in
2273 ipa_vr_hash_table or allocate it in GC memory. */
2276 ipa_get_value_range (const vrange
&tmp
)
2278 inchash::hash hstate
;
2279 inchash::add_vrange (tmp
, hstate
);
2280 hashval_t hash
= hstate
.end ();
2281 ipa_vr
**slot
= ipa_vr_hash_table
->find_slot_with_hash (&tmp
, hash
, INSERT
);
2285 ipa_vr
*vr
= new (ggc_alloc
<ipa_vr
> ()) ipa_vr (tmp
);
2290 /* Assign to JF a pointer to a range just like TMP but either fetch a
2291 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
2294 ipa_set_jfunc_vr (ipa_jump_func
*jf
, const vrange
&tmp
)
2296 jf
->m_vr
= ipa_get_value_range (tmp
);
2300 ipa_set_jfunc_vr (ipa_jump_func
*jf
, const ipa_vr
&vr
)
2303 vr
.get_vrange (tmp
);
2304 ipa_set_jfunc_vr (jf
, tmp
);
2307 /* Compute jump function for all arguments of callsite CS and insert the
2308 information in the jump_functions array in the ipa_edge_args corresponding
2309 to this callsite. */
2312 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info
*fbi
,
2313 struct cgraph_edge
*cs
)
2315 ipa_node_params
*info
= ipa_node_params_sum
->get (cs
->caller
);
2316 ipa_edge_args
*args
= ipa_edge_args_sum
->get_create (cs
);
2317 gcall
*call
= cs
->call_stmt
;
2318 int n
, arg_num
= gimple_call_num_args (call
);
2319 bool useful_context
= false;
2321 if (arg_num
== 0 || args
->jump_functions
)
2323 vec_safe_grow_cleared (args
->jump_functions
, arg_num
, true);
2324 if (flag_devirtualize
)
2325 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, arg_num
, true);
2327 if (gimple_call_internal_p (call
))
2329 if (ipa_func_spec_opts_forbid_analysis_p (cs
->caller
))
2332 for (n
= 0; n
< arg_num
; n
++)
2334 struct ipa_jump_func
*jfunc
= ipa_get_ith_jump_func (args
, n
);
2335 tree arg
= gimple_call_arg (call
, n
);
2336 tree param_type
= ipa_get_callee_param_type (cs
, n
);
2337 if (flag_devirtualize
&& POINTER_TYPE_P (TREE_TYPE (arg
)))
2340 class ipa_polymorphic_call_context
context (cs
->caller
->decl
,
2343 context
.get_dynamic_type (instance
, arg
, NULL
, cs
->call_stmt
,
2344 &fbi
->aa_walk_budget
);
2345 *ipa_get_ith_polymorhic_call_context (args
, n
) = context
;
2346 if (!context
.useless_p ())
2347 useful_context
= true;
2350 Value_Range
vr (TREE_TYPE (arg
));
2351 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
2353 bool addr_nonzero
= false;
2354 bool strict_overflow
= false;
2356 if (TREE_CODE (arg
) == SSA_NAME
2358 && get_range_query (cfun
)->range_of_expr (vr
, arg
, cs
->call_stmt
)
2360 addr_nonzero
= true;
2361 else if (tree_single_nonzero_warnv_p (arg
, &strict_overflow
))
2362 addr_nonzero
= true;
2365 vr
.set_nonzero (TREE_TYPE (arg
));
2367 unsigned HOST_WIDE_INT bitpos
;
2368 unsigned align
, prec
= TYPE_PRECISION (TREE_TYPE (arg
));
2370 get_pointer_alignment_1 (arg
, &align
, &bitpos
);
2372 if (align
> BITS_PER_UNIT
2373 && opt_for_fn (cs
->caller
->decl
, flag_ipa_bit_cp
))
2376 = wi::bit_and_not (wi::mask (prec
, false, prec
),
2377 wide_int::from (align
/ BITS_PER_UNIT
- 1,
2379 wide_int value
= wide_int::from (bitpos
/ BITS_PER_UNIT
, prec
,
2381 irange_bitmask
bm (value
, mask
);
2383 vr
.set_varying (TREE_TYPE (arg
));
2384 irange
&r
= as_a
<irange
> (vr
);
2385 r
.update_bitmask (bm
);
2386 ipa_set_jfunc_vr (jfunc
, vr
);
2388 else if (addr_nonzero
)
2389 ipa_set_jfunc_vr (jfunc
, vr
);
2391 gcc_assert (!jfunc
->m_vr
);
2396 && Value_Range::supports_type_p (TREE_TYPE (arg
))
2397 && Value_Range::supports_type_p (param_type
)
2398 && irange::supports_p (TREE_TYPE (arg
))
2399 && irange::supports_p (param_type
)
2400 && get_range_query (cfun
)->range_of_expr (vr
, arg
, cs
->call_stmt
)
2401 && !vr
.undefined_p ())
2403 Value_Range
resvr (vr
);
2404 range_cast (resvr
, param_type
);
2405 if (!resvr
.undefined_p () && !resvr
.varying_p ())
2406 ipa_set_jfunc_vr (jfunc
, resvr
);
2408 gcc_assert (!jfunc
->m_vr
);
2411 gcc_assert (!jfunc
->m_vr
);
2414 if (is_gimple_ip_invariant (arg
)
2416 && is_global_var (arg
)
2417 && TREE_READONLY (arg
)))
2418 ipa_set_jf_constant (jfunc
, arg
, cs
);
2419 else if (!is_gimple_reg_type (TREE_TYPE (arg
))
2420 && TREE_CODE (arg
) == PARM_DECL
)
2422 int index
= ipa_get_param_decl_index (info
, arg
);
2424 gcc_assert (index
>=0);
2425 /* Aggregate passed by value, check for pass-through, otherwise we
2426 will attempt to fill in aggregate contents later in this
2428 if (parm_preserved_before_stmt_p (fbi
, index
, call
, arg
))
2430 ipa_set_jf_simple_pass_through (jfunc
, index
, false);
2434 else if (TREE_CODE (arg
) == SSA_NAME
)
2436 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
2438 int index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (arg
));
2442 agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
, arg
);
2443 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
2448 gimple
*stmt
= SSA_NAME_DEF_STMT (arg
);
2449 if (is_gimple_assign (stmt
))
2450 compute_complex_assign_jump_func (fbi
, info
, jfunc
,
2451 call
, stmt
, arg
, param_type
);
2452 else if (gimple_code (stmt
) == GIMPLE_PHI
)
2453 compute_complex_ancestor_jump_func (fbi
, info
, jfunc
,
2455 as_a
<gphi
*> (stmt
));
2459 /* If ARG is pointer, we cannot use its type to determine the type of aggregate
2460 passed (because type conversions are ignored in gimple). Usually we can
2461 safely get type from function declaration, but in case of K&R prototypes or
2462 variadic functions we can try our luck with type of the pointer passed.
2463 TODO: Since we look for actual initialization of the memory object, we may better
2464 work out the type based on the memory stores we find. */
2466 param_type
= TREE_TYPE (arg
);
2468 if ((jfunc
->type
!= IPA_JF_PASS_THROUGH
2469 || !ipa_get_jf_pass_through_agg_preserved (jfunc
))
2470 && (jfunc
->type
!= IPA_JF_ANCESTOR
2471 || !ipa_get_jf_ancestor_agg_preserved (jfunc
))
2472 && (AGGREGATE_TYPE_P (TREE_TYPE (arg
))
2473 || POINTER_TYPE_P (param_type
)))
2474 determine_known_aggregate_parts (fbi
, call
, arg
, param_type
, jfunc
);
2476 if (!useful_context
)
2477 vec_free (args
->polymorphic_call_contexts
);
2480 /* Compute jump functions for all edges - both direct and indirect - outgoing
2484 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info
*fbi
, basic_block bb
)
2486 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
2488 struct cgraph_edge
*cs
;
2490 FOR_EACH_VEC_ELT_REVERSE (bi
->cg_edges
, i
, cs
)
2492 struct cgraph_node
*callee
= cs
->callee
;
2496 callee
= callee
->ultimate_alias_target ();
2497 /* We do not need to bother analyzing calls to unknown functions
2498 unless they may become known during lto/whopr. */
2499 if (!callee
->definition
&& !flag_lto
2500 && !gimple_call_fnspec (cs
->call_stmt
).known_p ())
2503 ipa_compute_jump_functions_for_edge (fbi
, cs
);
2507 /* If STMT looks like a statement loading a value from a member pointer formal
2508 parameter, return that parameter and store the offset of the field to
2509 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2510 might be clobbered). If USE_DELTA, then we look for a use of the delta
2511 field rather than the pfn. */
2514 ipa_get_stmt_member_ptr_load_param (gimple
*stmt
, bool use_delta
,
2515 HOST_WIDE_INT
*offset_p
)
2517 tree rhs
, fld
, ptr_field
, delta_field
;
2518 tree ref_field
= NULL_TREE
;
2519 tree ref_offset
= NULL_TREE
;
2521 if (!gimple_assign_single_p (stmt
))
2524 rhs
= gimple_assign_rhs1 (stmt
);
2525 if (TREE_CODE (rhs
) == COMPONENT_REF
)
2527 ref_field
= TREE_OPERAND (rhs
, 1);
2528 rhs
= TREE_OPERAND (rhs
, 0);
2531 if (TREE_CODE (rhs
) == MEM_REF
)
2533 ref_offset
= TREE_OPERAND (rhs
, 1);
2534 if (ref_field
&& integer_nonzerop (ref_offset
))
2537 else if (!ref_field
)
2540 if (TREE_CODE (rhs
) == MEM_REF
2541 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
2542 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (rhs
, 0)))
2544 rhs
= TREE_OPERAND (rhs
, 0);
2545 if (TREE_CODE (SSA_NAME_VAR (rhs
)) != PARM_DECL
2546 || !type_like_member_ptr_p (TREE_TYPE (TREE_TYPE (rhs
)), &ptr_field
,
2552 if (TREE_CODE (rhs
) == MEM_REF
2553 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == ADDR_EXPR
)
2554 rhs
= TREE_OPERAND (TREE_OPERAND (rhs
, 0), 0);
2555 if (TREE_CODE (rhs
) != PARM_DECL
2556 || !type_like_member_ptr_p (TREE_TYPE (rhs
), &ptr_field
,
2568 if (ref_field
!= fld
)
2571 else if (!tree_int_cst_equal (byte_position (fld
), ref_offset
))
2575 *offset_p
= int_bit_position (fld
);
2579 /* Returns true iff T is an SSA_NAME defined by a statement. */
2582 ipa_is_ssa_with_stmt_def (tree t
)
2584 if (TREE_CODE (t
) == SSA_NAME
2585 && !SSA_NAME_IS_DEFAULT_DEF (t
))
2591 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2592 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2593 indirect call graph edge.
2594 If POLYMORPHIC is true record is as a destination of polymorphic call. */
2596 static struct cgraph_edge
*
2597 ipa_note_param_call (struct cgraph_node
*node
, int param_index
,
2598 gcall
*stmt
, bool polymorphic
)
2600 struct cgraph_edge
*cs
;
2602 cs
= node
->get_edge (stmt
);
2603 cs
->indirect_info
->param_index
= param_index
;
2604 cs
->indirect_info
->agg_contents
= 0;
2605 cs
->indirect_info
->member_ptr
= 0;
2606 cs
->indirect_info
->guaranteed_unmodified
= 0;
2607 ipa_node_params
*info
= ipa_node_params_sum
->get (node
);
2608 ipa_set_param_used_by_indirect_call (info
, param_index
, true);
2609 if (cs
->indirect_info
->polymorphic
|| polymorphic
)
2610 ipa_set_param_used_by_polymorphic_call (info
, param_index
, true);
2614 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2615 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2616 intermediate information about each formal parameter. Currently it checks
2617 whether the call calls a pointer that is a formal parameter and if so, the
2618 parameter is marked with the called flag and an indirect call graph edge
2619 describing the call is created. This is very simple for ordinary pointers
2620 represented in SSA but not-so-nice when it comes to member pointers. The
2621 ugly part of this function does nothing more than trying to match the
2622 pattern of such a call. Look up the documentation of macro
2623 TARGET_PTRMEMFUNC_VBIT_LOCATION for details. An example of such a pattern
2624 is the gimple dump below, the call is on the last line:
2627 f$__delta_5 = f.__delta;
2628 f$__pfn_24 = f.__pfn;
2632 f$__delta_5 = MEM[(struct *)&f];
2633 f$__pfn_24 = MEM[(struct *)&f + 4B];
2635 and a few lines below:
2638 D.2496_3 = (int) f$__pfn_24;
2639 D.2497_4 = D.2496_3 & 1;
2646 D.2500_7 = (unsigned int) f$__delta_5;
2647 D.2501_8 = &S + D.2500_7;
2648 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2649 D.2503_10 = *D.2502_9;
2650 D.2504_12 = f$__pfn_24 + -1;
2651 D.2505_13 = (unsigned int) D.2504_12;
2652 D.2506_14 = D.2503_10 + D.2505_13;
2653 D.2507_15 = *D.2506_14;
2654 iftmp.11_16 = (String:: *) D.2507_15;
2657 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2658 D.2500_19 = (unsigned int) f$__delta_5;
2659 D.2508_20 = &S + D.2500_19;
2660 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2662 Such patterns are results of simple calls to a member pointer:
2664 int doprinting (int (MyString::* f)(int) const)
2666 MyString S ("somestring");
2671 Moreover, the function also looks for called pointers loaded from aggregates
2672 passed by value or reference. */
2675 ipa_analyze_indirect_call_uses (struct ipa_func_body_info
*fbi
, gcall
*call
,
2678 class ipa_node_params
*info
= fbi
->info
;
2679 HOST_WIDE_INT offset
;
2682 if (SSA_NAME_IS_DEFAULT_DEF (target
))
2684 tree var
= SSA_NAME_VAR (target
);
2685 int index
= ipa_get_param_decl_index (info
, var
);
2687 ipa_note_param_call (fbi
->node
, index
, call
, false);
2692 gimple
*def
= SSA_NAME_DEF_STMT (target
);
2693 bool guaranteed_unmodified
;
2694 if (gimple_assign_single_p (def
)
2695 && ipa_load_from_parm_agg (fbi
, info
->descriptors
, def
,
2696 gimple_assign_rhs1 (def
), &index
, &offset
,
2697 NULL
, &by_ref
, &guaranteed_unmodified
))
2699 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
,
2701 cs
->indirect_info
->offset
= offset
;
2702 cs
->indirect_info
->agg_contents
= 1;
2703 cs
->indirect_info
->by_ref
= by_ref
;
2704 cs
->indirect_info
->guaranteed_unmodified
= guaranteed_unmodified
;
2708 /* Now we need to try to match the complex pattern of calling a member
2710 if (gimple_code (def
) != GIMPLE_PHI
2711 || gimple_phi_num_args (def
) != 2
2712 || !POINTER_TYPE_P (TREE_TYPE (target
))
2713 || TREE_CODE (TREE_TYPE (TREE_TYPE (target
))) != METHOD_TYPE
)
2716 /* First, we need to check whether one of these is a load from a member
2717 pointer that is a parameter to this function. */
2718 tree n1
= PHI_ARG_DEF (def
, 0);
2719 tree n2
= PHI_ARG_DEF (def
, 1);
2720 if (!ipa_is_ssa_with_stmt_def (n1
) || !ipa_is_ssa_with_stmt_def (n2
))
2722 gimple
*d1
= SSA_NAME_DEF_STMT (n1
);
2723 gimple
*d2
= SSA_NAME_DEF_STMT (n2
);
2726 basic_block bb
, virt_bb
;
2727 basic_block join
= gimple_bb (def
);
2728 if ((rec
= ipa_get_stmt_member_ptr_load_param (d1
, false, &offset
)))
2730 if (ipa_get_stmt_member_ptr_load_param (d2
, false, NULL
))
2733 bb
= EDGE_PRED (join
, 0)->src
;
2734 virt_bb
= gimple_bb (d2
);
2736 else if ((rec
= ipa_get_stmt_member_ptr_load_param (d2
, false, &offset
)))
2738 bb
= EDGE_PRED (join
, 1)->src
;
2739 virt_bb
= gimple_bb (d1
);
2744 /* Second, we need to check that the basic blocks are laid out in the way
2745 corresponding to the pattern. */
2747 if (!single_pred_p (virt_bb
) || !single_succ_p (virt_bb
)
2748 || single_succ (virt_bb
) != join
)
2752 if (single_pred (virt_bb
) != bb
)
2754 /* In cases when the distinction between a normal and a virtual
2755 function is encoded in the delta field, the load of the
2756 actual non-virtual function pointer can be in its own BB. */
2758 if (!single_pred_p (bb
) || !single_succ_p (bb
))
2760 bb
= single_pred (bb
);
2761 if (bb
!= single_pred (virt_bb
))
2765 /* Third, let's see that the branching is done depending on the least
2766 significant bit of the pfn. */
2768 gcond
*branch
= safe_dyn_cast
<gcond
*> (*gsi_last_bb (bb
));
2772 if ((gimple_cond_code (branch
) != NE_EXPR
2773 && gimple_cond_code (branch
) != EQ_EXPR
)
2774 || !integer_zerop (gimple_cond_rhs (branch
)))
2777 tree cond
= gimple_cond_lhs (branch
);
2778 if (!ipa_is_ssa_with_stmt_def (cond
))
2781 def
= SSA_NAME_DEF_STMT (cond
);
2782 if (!is_gimple_assign (def
)
2783 || gimple_assign_rhs_code (def
) != BIT_AND_EXPR
2784 || !integer_onep (gimple_assign_rhs2 (def
)))
2787 cond
= gimple_assign_rhs1 (def
);
2788 if (!ipa_is_ssa_with_stmt_def (cond
))
2791 def
= SSA_NAME_DEF_STMT (cond
);
2793 if (is_gimple_assign (def
)
2794 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
2796 cond
= gimple_assign_rhs1 (def
);
2797 if (!ipa_is_ssa_with_stmt_def (cond
))
2799 def
= SSA_NAME_DEF_STMT (cond
);
2803 rec2
= ipa_get_stmt_member_ptr_load_param (def
,
2804 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2805 == ptrmemfunc_vbit_in_delta
),
2810 if (TREE_CODE (rec
) == SSA_NAME
)
2812 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (rec
));
2814 || !parm_ref_data_preserved_p (fbi
, index
, call
,
2815 gimple_assign_rhs1 (def
)))
2821 index
= ipa_get_param_decl_index (info
, rec
);
2823 || !parm_preserved_before_stmt_p (fbi
, index
, call
, rec
))
2828 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
,
2830 cs
->indirect_info
->offset
= offset
;
2831 cs
->indirect_info
->agg_contents
= 1;
2832 cs
->indirect_info
->member_ptr
= 1;
2833 cs
->indirect_info
->by_ref
= by_ref
;
2834 cs
->indirect_info
->guaranteed_unmodified
= 1;
2839 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2840 object referenced in the expression is a formal parameter of the caller
2841 FBI->node (described by FBI->info), create a call note for the
2845 ipa_analyze_virtual_call_uses (struct ipa_func_body_info
*fbi
,
2846 gcall
*call
, tree target
)
2848 tree obj
= OBJ_TYPE_REF_OBJECT (target
);
2850 HOST_WIDE_INT anc_offset
;
2852 if (!flag_devirtualize
)
2855 if (TREE_CODE (obj
) != SSA_NAME
)
2858 class ipa_node_params
*info
= fbi
->info
;
2859 if (SSA_NAME_IS_DEFAULT_DEF (obj
))
2861 if (TREE_CODE (SSA_NAME_VAR (obj
)) != PARM_DECL
)
2865 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (obj
));
2866 gcc_assert (index
>= 0);
2867 if (detect_type_change_ssa (fbi
, obj
, obj_type_ref_class (target
),
2873 gimple
*stmt
= SSA_NAME_DEF_STMT (obj
);
2876 expr
= get_ancestor_addr_info (stmt
, &obj
, &anc_offset
);
2879 index
= ipa_get_param_decl_index (info
,
2880 SSA_NAME_VAR (TREE_OPERAND (expr
, 0)));
2881 gcc_assert (index
>= 0);
2882 if (detect_type_change (fbi
, obj
, expr
, obj_type_ref_class (target
),
2887 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
,
2889 class cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
2890 ii
->offset
= anc_offset
;
2891 ii
->otr_token
= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target
));
2892 ii
->otr_type
= obj_type_ref_class (target
);
2893 ii
->polymorphic
= 1;
2896 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2897 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2898 containing intermediate information about each formal parameter. */
2901 ipa_analyze_call_uses (struct ipa_func_body_info
*fbi
, gcall
*call
)
2903 tree target
= gimple_call_fn (call
);
2906 || (TREE_CODE (target
) != SSA_NAME
2907 && !virtual_method_call_p (target
)))
2910 struct cgraph_edge
*cs
= fbi
->node
->get_edge (call
);
2911 /* If we previously turned the call into a direct call, there is
2912 no need to analyze. */
2913 if (cs
&& !cs
->indirect_unknown_callee
)
2916 if (cs
->indirect_info
->polymorphic
&& flag_devirtualize
)
2919 tree target
= gimple_call_fn (call
);
2920 ipa_polymorphic_call_context
context (current_function_decl
,
2921 target
, call
, &instance
);
2923 gcc_checking_assert (cs
->indirect_info
->otr_type
2924 == obj_type_ref_class (target
));
2925 gcc_checking_assert (cs
->indirect_info
->otr_token
2926 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target
)));
2928 cs
->indirect_info
->vptr_changed
2929 = !context
.get_dynamic_type (instance
,
2930 OBJ_TYPE_REF_OBJECT (target
),
2931 obj_type_ref_class (target
), call
,
2932 &fbi
->aa_walk_budget
);
2933 cs
->indirect_info
->context
= context
;
2936 if (TREE_CODE (target
) == SSA_NAME
)
2937 ipa_analyze_indirect_call_uses (fbi
, call
, target
);
2938 else if (virtual_method_call_p (target
))
2939 ipa_analyze_virtual_call_uses (fbi
, call
, target
);
2943 /* Analyze the call statement STMT with respect to formal parameters (described
2944 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2945 formal parameters are called. */
2948 ipa_analyze_stmt_uses (struct ipa_func_body_info
*fbi
, gimple
*stmt
)
2950 if (is_gimple_call (stmt
))
2951 ipa_analyze_call_uses (fbi
, as_a
<gcall
*> (stmt
));
2954 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2955 If OP is a parameter declaration, mark it as used in the info structure
2959 visit_ref_for_mod_analysis (gimple
*, tree op
, tree
, void *data
)
2961 class ipa_node_params
*info
= (class ipa_node_params
*) data
;
2963 op
= get_base_address (op
);
2965 && TREE_CODE (op
) == PARM_DECL
)
2967 int index
= ipa_get_param_decl_index (info
, op
);
2968 gcc_assert (index
>= 0);
2969 ipa_set_param_used (info
, index
, true);
2975 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2976 the findings in various structures of the associated ipa_node_params
2977 structure, such as parameter flags, notes etc. FBI holds various data about
2978 the function being analyzed. */
2981 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info
*fbi
, basic_block bb
)
2983 gimple_stmt_iterator gsi
;
2984 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2986 gimple
*stmt
= gsi_stmt (gsi
);
2988 if (is_gimple_debug (stmt
))
2991 ipa_analyze_stmt_uses (fbi
, stmt
);
2992 walk_stmt_load_store_addr_ops (stmt
, fbi
->info
,
2993 visit_ref_for_mod_analysis
,
2994 visit_ref_for_mod_analysis
,
2995 visit_ref_for_mod_analysis
);
2997 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2998 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), fbi
->info
,
2999 visit_ref_for_mod_analysis
,
3000 visit_ref_for_mod_analysis
,
3001 visit_ref_for_mod_analysis
);
3004 /* Return true EXPR is a load from a dereference of SSA_NAME NAME. */
3007 load_from_dereferenced_name (tree expr
, tree name
)
3009 tree base
= get_base_address (expr
);
3010 return (TREE_CODE (base
) == MEM_REF
3011 && TREE_OPERAND (base
, 0) == name
);
3014 /* Calculate controlled uses of parameters of NODE. */
3017 ipa_analyze_controlled_uses (struct cgraph_node
*node
)
3019 ipa_node_params
*info
= ipa_node_params_sum
->get (node
);
3021 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
3023 tree parm
= ipa_get_param (info
, i
);
3025 bool load_dereferenced
= false;
3027 /* For SSA regs see if parameter is used. For non-SSA we compute
3028 the flag during modification analysis. */
3029 if (is_gimple_reg (parm
))
3031 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
),
3033 if (ddef
&& !has_zero_uses (ddef
))
3035 imm_use_iterator imm_iter
;
3038 ipa_set_param_used (info
, i
, true);
3039 FOR_EACH_IMM_USE_STMT (stmt
, imm_iter
, ddef
)
3041 if (is_gimple_debug (stmt
))
3044 int all_stmt_uses
= 0;
3045 use_operand_p use_p
;
3046 FOR_EACH_IMM_USE_ON_STMT (use_p
, imm_iter
)
3049 if (is_gimple_call (stmt
))
3051 if (gimple_call_internal_p (stmt
))
3053 call_uses
= IPA_UNDESCRIBED_USE
;
3056 int recognized_stmt_uses
;
3057 if (gimple_call_fn (stmt
) == ddef
)
3058 recognized_stmt_uses
= 1;
3060 recognized_stmt_uses
= 0;
3061 unsigned arg_count
= gimple_call_num_args (stmt
);
3062 for (unsigned i
= 0; i
< arg_count
; i
++)
3064 tree arg
= gimple_call_arg (stmt
, i
);
3066 recognized_stmt_uses
++;
3067 else if (load_from_dereferenced_name (arg
, ddef
))
3069 load_dereferenced
= true;
3070 recognized_stmt_uses
++;
3074 if (recognized_stmt_uses
!= all_stmt_uses
)
3076 call_uses
= IPA_UNDESCRIBED_USE
;
3080 call_uses
+= all_stmt_uses
;
3082 else if (gimple_assign_single_p (stmt
))
3084 tree rhs
= gimple_assign_rhs1 (stmt
);
3085 if (all_stmt_uses
!= 1
3086 || !load_from_dereferenced_name (rhs
, ddef
))
3088 call_uses
= IPA_UNDESCRIBED_USE
;
3091 load_dereferenced
= true;
3095 call_uses
= IPA_UNDESCRIBED_USE
;
3104 call_uses
= IPA_UNDESCRIBED_USE
;
3105 ipa_set_controlled_uses (info
, i
, call_uses
);
3106 ipa_set_param_load_dereferenced (info
, i
, load_dereferenced
);
3110 /* Free stuff in BI. */
3113 free_ipa_bb_info (struct ipa_bb_info
*bi
)
3115 bi
->cg_edges
.release ();
3116 bi
->param_aa_statuses
.release ();
3119 /* Dominator walker driving the analysis. */
3121 class analysis_dom_walker
: public dom_walker
3124 analysis_dom_walker (struct ipa_func_body_info
*fbi
)
3125 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
) {}
3127 edge
before_dom_children (basic_block
) final override
;
3130 struct ipa_func_body_info
*m_fbi
;
3134 analysis_dom_walker::before_dom_children (basic_block bb
)
3136 ipa_analyze_params_uses_in_bb (m_fbi
, bb
);
3137 ipa_compute_jump_functions_for_bb (m_fbi
, bb
);
3141 /* Release body info FBI. */
3144 ipa_release_body_info (struct ipa_func_body_info
*fbi
)
3147 struct ipa_bb_info
*bi
;
3149 FOR_EACH_VEC_ELT (fbi
->bb_infos
, i
, bi
)
3150 free_ipa_bb_info (bi
);
3151 fbi
->bb_infos
.release ();
3154 /* Initialize the array describing properties of formal parameters
3155 of NODE, analyze their uses and compute jump functions associated
3156 with actual arguments of calls from within NODE. */
3159 ipa_analyze_node (struct cgraph_node
*node
)
3161 struct ipa_func_body_info fbi
;
3162 class ipa_node_params
*info
;
3164 ipa_check_create_node_params ();
3165 ipa_check_create_edge_args ();
3166 info
= ipa_node_params_sum
->get_create (node
);
3168 if (info
->analysis_done
)
3170 info
->analysis_done
= 1;
3172 if (ipa_func_spec_opts_forbid_analysis_p (node
)
3173 || (count_formal_params (node
->decl
)
3174 >= (1 << IPA_PROP_ARG_INDEX_LIMIT_BITS
)))
3176 gcc_assert (!ipa_get_param_count (info
));
3180 struct function
*func
= DECL_STRUCT_FUNCTION (node
->decl
);
3182 calculate_dominance_info (CDI_DOMINATORS
);
3183 ipa_initialize_node_params (node
);
3184 ipa_analyze_controlled_uses (node
);
3188 fbi
.bb_infos
= vNULL
;
3189 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
), true);
3190 fbi
.param_count
= ipa_get_param_count (info
);
3191 fbi
.aa_walk_budget
= opt_for_fn (node
->decl
, param_ipa_max_aa_steps
);
3193 for (struct cgraph_edge
*cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
3195 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
3196 bi
->cg_edges
.safe_push (cs
);
3199 for (struct cgraph_edge
*cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
3201 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
3202 bi
->cg_edges
.safe_push (cs
);
3205 enable_ranger (cfun
, false);
3206 analysis_dom_walker (&fbi
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
3207 disable_ranger (cfun
);
3209 ipa_release_body_info (&fbi
);
3210 free_dominance_info (CDI_DOMINATORS
);
3214 /* Update the jump functions associated with call graph edge E when the call
3215 graph edge CS is being inlined, assuming that E->caller is already (possibly
3216 indirectly) inlined into CS->callee and that E has not been inlined. */
3219 update_jump_functions_after_inlining (struct cgraph_edge
*cs
,
3220 struct cgraph_edge
*e
)
3222 ipa_edge_args
*top
= ipa_edge_args_sum
->get (cs
);
3223 ipa_edge_args
*args
= ipa_edge_args_sum
->get (e
);
3226 int count
= ipa_get_cs_argument_count (args
);
3229 for (i
= 0; i
< count
; i
++)
3231 struct ipa_jump_func
*dst
= ipa_get_ith_jump_func (args
, i
);
3232 class ipa_polymorphic_call_context
*dst_ctx
3233 = ipa_get_ith_polymorhic_call_context (args
, i
);
3237 struct ipa_agg_jf_item
*item
;
3240 FOR_EACH_VEC_ELT (*dst
->agg
.items
, j
, item
)
3243 struct ipa_jump_func
*src
;
3245 if (item
->jftype
!= IPA_JF_PASS_THROUGH
3246 && item
->jftype
!= IPA_JF_LOAD_AGG
)
3249 dst_fid
= item
->value
.pass_through
.formal_id
;
3250 if (!top
|| dst_fid
>= ipa_get_cs_argument_count (top
))
3252 item
->jftype
= IPA_JF_UNKNOWN
;
3256 item
->value
.pass_through
.formal_id
= -1;
3257 src
= ipa_get_ith_jump_func (top
, dst_fid
);
3258 if (src
->type
== IPA_JF_CONST
)
3260 if (item
->jftype
== IPA_JF_PASS_THROUGH
3261 && item
->value
.pass_through
.operation
== NOP_EXPR
)
3263 item
->jftype
= IPA_JF_CONST
;
3264 item
->value
.constant
= src
->value
.constant
.value
;
3268 else if (src
->type
== IPA_JF_PASS_THROUGH
3269 && src
->value
.pass_through
.operation
== NOP_EXPR
)
3271 if (item
->jftype
== IPA_JF_PASS_THROUGH
3272 || !item
->value
.load_agg
.by_ref
3273 || src
->value
.pass_through
.agg_preserved
)
3274 item
->value
.pass_through
.formal_id
3275 = src
->value
.pass_through
.formal_id
;
3277 else if (src
->type
== IPA_JF_ANCESTOR
)
3279 if (item
->jftype
== IPA_JF_PASS_THROUGH
)
3281 if (!src
->value
.ancestor
.offset
)
3282 item
->value
.pass_through
.formal_id
3283 = src
->value
.ancestor
.formal_id
;
3285 else if (src
->value
.ancestor
.agg_preserved
)
3287 gcc_checking_assert (item
->value
.load_agg
.by_ref
);
3289 item
->value
.pass_through
.formal_id
3290 = src
->value
.ancestor
.formal_id
;
3291 item
->value
.load_agg
.offset
3292 += src
->value
.ancestor
.offset
;
3296 if (item
->value
.pass_through
.formal_id
< 0)
3297 item
->jftype
= IPA_JF_UNKNOWN
;
3303 ipa_set_jf_unknown (dst
);
3307 if (dst
->type
== IPA_JF_ANCESTOR
)
3309 struct ipa_jump_func
*src
;
3310 int dst_fid
= dst
->value
.ancestor
.formal_id
;
3311 class ipa_polymorphic_call_context
*src_ctx
3312 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
3314 /* Variable number of arguments can cause havoc if we try to access
3315 one that does not exist in the inlined edge. So make sure we
3317 if (dst_fid
>= ipa_get_cs_argument_count (top
))
3319 ipa_set_jf_unknown (dst
);
3323 src
= ipa_get_ith_jump_func (top
, dst_fid
);
3325 if (src_ctx
&& !src_ctx
->useless_p ())
3327 class ipa_polymorphic_call_context ctx
= *src_ctx
;
3329 /* TODO: Make type preserved safe WRT contexts. */
3330 if (!ipa_get_jf_ancestor_type_preserved (dst
))
3331 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
3332 ctx
.offset_by (dst
->value
.ancestor
.offset
);
3333 if (!ctx
.useless_p ())
3337 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
3339 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
3342 dst_ctx
->combine_with (ctx
);
3346 /* Parameter and argument in ancestor jump function must be pointer
3347 type, which means access to aggregate must be by-reference. */
3348 gcc_assert (!src
->agg
.items
|| src
->agg
.by_ref
);
3350 if (src
->agg
.items
&& dst
->value
.ancestor
.agg_preserved
)
3352 struct ipa_agg_jf_item
*item
;
3355 /* Currently we do not produce clobber aggregate jump functions,
3356 replace with merging when we do. */
3357 gcc_assert (!dst
->agg
.items
);
3359 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
3360 dst
->agg
.by_ref
= src
->agg
.by_ref
;
3361 FOR_EACH_VEC_SAFE_ELT (dst
->agg
.items
, j
, item
)
3362 item
->offset
-= dst
->value
.ancestor
.offset
;
3365 if (src
->type
== IPA_JF_PASS_THROUGH
3366 && src
->value
.pass_through
.operation
== NOP_EXPR
)
3368 dst
->value
.ancestor
.formal_id
= src
->value
.pass_through
.formal_id
;
3369 dst
->value
.ancestor
.agg_preserved
&=
3370 src
->value
.pass_through
.agg_preserved
;
3372 else if (src
->type
== IPA_JF_ANCESTOR
)
3374 dst
->value
.ancestor
.formal_id
= src
->value
.ancestor
.formal_id
;
3375 dst
->value
.ancestor
.offset
+= src
->value
.ancestor
.offset
;
3376 dst
->value
.ancestor
.agg_preserved
&=
3377 src
->value
.ancestor
.agg_preserved
;
3378 dst
->value
.ancestor
.keep_null
|= src
->value
.ancestor
.keep_null
;
3381 ipa_set_jf_unknown (dst
);
3383 else if (dst
->type
== IPA_JF_PASS_THROUGH
)
3385 struct ipa_jump_func
*src
;
3386 /* We must check range due to calls with variable number of arguments
3387 and we cannot combine jump functions with operations. */
3388 if (dst
->value
.pass_through
.operation
== NOP_EXPR
3389 && (top
&& dst
->value
.pass_through
.formal_id
3390 < ipa_get_cs_argument_count (top
)))
3392 int dst_fid
= dst
->value
.pass_through
.formal_id
;
3393 src
= ipa_get_ith_jump_func (top
, dst_fid
);
3394 bool dst_agg_p
= ipa_get_jf_pass_through_agg_preserved (dst
);
3395 class ipa_polymorphic_call_context
*src_ctx
3396 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
3398 if (src_ctx
&& !src_ctx
->useless_p ())
3400 class ipa_polymorphic_call_context ctx
= *src_ctx
;
3402 /* TODO: Make type preserved safe WRT contexts. */
3403 if (!ipa_get_jf_pass_through_type_preserved (dst
))
3404 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
3405 if (!ctx
.useless_p ())
3409 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
3411 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
3413 dst_ctx
->combine_with (ctx
);
3418 case IPA_JF_UNKNOWN
:
3419 ipa_set_jf_unknown (dst
);
3423 bool rd
= ipa_get_jf_pass_through_refdesc_decremented (dst
);
3424 ipa_set_jf_cst_copy (dst
, src
);
3426 ipa_zap_jf_refdesc (dst
);
3431 case IPA_JF_PASS_THROUGH
:
3433 int formal_id
= ipa_get_jf_pass_through_formal_id (src
);
3434 enum tree_code operation
;
3435 operation
= ipa_get_jf_pass_through_operation (src
);
3437 if (operation
== NOP_EXPR
)
3441 && ipa_get_jf_pass_through_agg_preserved (src
);
3442 ipa_set_jf_simple_pass_through (dst
, formal_id
, agg_p
);
3444 else if (TREE_CODE_CLASS (operation
) == tcc_unary
)
3445 ipa_set_jf_unary_pass_through (dst
, formal_id
, operation
);
3448 tree operand
= ipa_get_jf_pass_through_operand (src
);
3449 ipa_set_jf_arith_pass_through (dst
, formal_id
, operand
,
3454 case IPA_JF_ANCESTOR
:
3458 && ipa_get_jf_ancestor_agg_preserved (src
);
3459 ipa_set_ancestor_jf (dst
,
3460 ipa_get_jf_ancestor_offset (src
),
3461 ipa_get_jf_ancestor_formal_id (src
),
3463 ipa_get_jf_ancestor_keep_null (src
));
3471 && (dst_agg_p
|| !src
->agg
.by_ref
))
3473 /* Currently we do not produce clobber aggregate jump
3474 functions, replace with merging when we do. */
3475 gcc_assert (!dst
->agg
.items
);
3477 dst
->agg
.by_ref
= src
->agg
.by_ref
;
3478 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
3482 ipa_set_jf_unknown (dst
);
3487 /* If TARGET is an addr_expr of a function declaration, make it the
3488 (SPECULATIVE)destination of an indirect edge IE and return the edge.
3489 Otherwise, return NULL. */
3491 struct cgraph_edge
*
3492 ipa_make_edge_direct_to_target (struct cgraph_edge
*ie
, tree target
,
3495 struct cgraph_node
*callee
;
3496 bool unreachable
= false;
3498 if (TREE_CODE (target
) == ADDR_EXPR
)
3499 target
= TREE_OPERAND (target
, 0);
3500 if (TREE_CODE (target
) != FUNCTION_DECL
)
3502 target
= canonicalize_constructor_val (target
, NULL
);
3503 if (!target
|| TREE_CODE (target
) != FUNCTION_DECL
)
3505 /* Member pointer call that goes through a VMT lookup. */
3506 if (ie
->indirect_info
->member_ptr
3507 /* Or if target is not an invariant expression and we do not
3508 know if it will evaulate to function at runtime.
3509 This can happen when folding through &VAR, where &VAR
3510 is IP invariant, but VAR itself is not.
3512 TODO: Revisit this when GCC 5 is branched. It seems that
3513 member_ptr check is not needed and that we may try to fold
3514 the expression and see if VAR is readonly. */
3515 || !is_gimple_ip_invariant (target
))
3517 if (dump_enabled_p ())
3519 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, ie
->call_stmt
,
3520 "discovered direct call non-invariant %s\n",
3521 ie
->caller
->dump_name ());
3527 if (dump_enabled_p ())
3529 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, ie
->call_stmt
,
3530 "discovered direct call to non-function in %s, "
3531 "making it __builtin_unreachable\n",
3532 ie
->caller
->dump_name ());
3535 target
= builtin_decl_unreachable ();
3536 callee
= cgraph_node::get_create (target
);
3540 callee
= cgraph_node::get (target
);
3543 callee
= cgraph_node::get (target
);
3545 /* Because may-edges are not explicitely represented and vtable may be external,
3546 we may create the first reference to the object in the unit. */
3547 if (!callee
|| callee
->inlined_to
)
3550 /* We are better to ensure we can refer to it.
3551 In the case of static functions we are out of luck, since we already
3552 removed its body. In the case of public functions we may or may
3553 not introduce the reference. */
3554 if (!canonicalize_constructor_val (target
, NULL
)
3555 || !TREE_PUBLIC (target
))
3558 fprintf (dump_file
, "ipa-prop: Discovered call to a known target "
3559 "(%s -> %s) but cannot refer to it. Giving up.\n",
3560 ie
->caller
->dump_name (),
3561 ie
->callee
->dump_name ());
3564 callee
= cgraph_node::get_create (target
);
3567 /* If the edge is already speculated. */
3568 if (speculative
&& ie
->speculative
)
3572 cgraph_edge
*e2
= ie
->speculative_call_for_target (callee
);
3576 fprintf (dump_file
, "ipa-prop: Discovered call to a "
3577 "speculative target (%s -> %s) but the call is "
3578 "already speculated to different target. "
3580 ie
->caller
->dump_name (), callee
->dump_name ());
3586 "ipa-prop: Discovered call to a speculative target "
3587 "(%s -> %s) this agree with previous speculation.\n",
3588 ie
->caller
->dump_name (), callee
->dump_name ());
3594 if (!dbg_cnt (devirt
))
3597 ipa_check_create_node_params ();
3599 /* We cannot make edges to inline clones. It is bug that someone removed
3600 the cgraph node too early. */
3601 gcc_assert (!callee
->inlined_to
);
3603 if (dump_file
&& !unreachable
)
3605 fprintf (dump_file
, "ipa-prop: Discovered %s call to a %s target "
3606 "(%s -> %s), for stmt ",
3607 ie
->indirect_info
->polymorphic
? "a virtual" : "an indirect",
3608 speculative
? "speculative" : "known",
3609 ie
->caller
->dump_name (),
3610 callee
->dump_name ());
3612 print_gimple_stmt (dump_file
, ie
->call_stmt
, 2, TDF_SLIM
);
3614 fprintf (dump_file
, "with uid %i\n", ie
->lto_stmt_uid
);
3616 if (dump_enabled_p ())
3618 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, ie
->call_stmt
,
3619 "converting indirect call in %s to direct call to %s\n",
3620 ie
->caller
->dump_name (), callee
->dump_name ());
3624 struct cgraph_edge
*orig
= ie
;
3625 ie
= cgraph_edge::make_direct (ie
, callee
);
3626 /* If we resolved speculative edge the cost is already up to date
3627 for direct call (adjusted by inline_edge_duplication_hook). */
3630 ipa_call_summary
*es
= ipa_call_summaries
->get (ie
);
3631 es
->call_stmt_size
-= (eni_size_weights
.indirect_call_cost
3632 - eni_size_weights
.call_cost
);
3633 es
->call_stmt_time
-= (eni_time_weights
.indirect_call_cost
3634 - eni_time_weights
.call_cost
);
3639 if (!callee
->can_be_discarded_p ())
3642 alias
= dyn_cast
<cgraph_node
*> (callee
->noninterposable_alias ());
3646 /* make_speculative will update ie's cost to direct call cost. */
3647 ie
= ie
->make_speculative
3648 (callee
, ie
->count
.apply_scale (8, 10));
3654 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3655 CONSTRUCTOR and return it. Return NULL if the search fails for some
3659 find_constructor_constant_at_offset (tree constructor
, HOST_WIDE_INT req_offset
)
3661 tree type
= TREE_TYPE (constructor
);
3662 if (TREE_CODE (type
) != ARRAY_TYPE
3663 && TREE_CODE (type
) != RECORD_TYPE
)
3668 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor
), ix
, index
, val
)
3670 HOST_WIDE_INT elt_offset
;
3671 if (TREE_CODE (type
) == ARRAY_TYPE
)
3674 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (type
));
3675 gcc_assert (TREE_CODE (unit_size
) == INTEGER_CST
);
3679 if (TREE_CODE (index
) == RANGE_EXPR
)
3680 off
= wi::to_offset (TREE_OPERAND (index
, 0));
3682 off
= wi::to_offset (index
);
3683 if (TYPE_DOMAIN (type
) && TYPE_MIN_VALUE (TYPE_DOMAIN (type
)))
3685 tree low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
3686 gcc_assert (TREE_CODE (unit_size
) == INTEGER_CST
);
3687 off
= wi::sext (off
- wi::to_offset (low_bound
),
3688 TYPE_PRECISION (TREE_TYPE (index
)));
3690 off
*= wi::to_offset (unit_size
);
3691 /* ??? Handle more than just the first index of a
3695 off
= wi::to_offset (unit_size
) * ix
;
3697 off
= wi::lshift (off
, LOG2_BITS_PER_UNIT
);
3698 if (!wi::fits_shwi_p (off
) || wi::neg_p (off
))
3700 elt_offset
= off
.to_shwi ();
3702 else if (TREE_CODE (type
) == RECORD_TYPE
)
3704 gcc_checking_assert (index
&& TREE_CODE (index
) == FIELD_DECL
);
3705 if (DECL_BIT_FIELD (index
))
3707 elt_offset
= int_bit_position (index
);
3712 if (elt_offset
> req_offset
)
3715 if (TREE_CODE (val
) == CONSTRUCTOR
)
3716 return find_constructor_constant_at_offset (val
,
3717 req_offset
- elt_offset
);
3719 if (elt_offset
== req_offset
3720 && is_gimple_reg_type (TREE_TYPE (val
))
3721 && is_gimple_ip_invariant (val
))
3727 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3728 invariant from a static constructor and if so, return it. Otherwise return
3732 ipa_find_agg_cst_from_init (tree scalar
, HOST_WIDE_INT offset
, bool by_ref
)
3736 if (TREE_CODE (scalar
) != ADDR_EXPR
)
3738 scalar
= TREE_OPERAND (scalar
, 0);
3742 || !is_global_var (scalar
)
3743 || !TREE_READONLY (scalar
)
3744 || !DECL_INITIAL (scalar
)
3745 || TREE_CODE (DECL_INITIAL (scalar
)) != CONSTRUCTOR
)
3748 return find_constructor_constant_at_offset (DECL_INITIAL (scalar
), offset
);
3751 /* Retrieve value from AGG_JFUNC for the given OFFSET or return NULL if there
3752 is none. BY_REF specifies whether the value has to be passed by reference
3756 ipa_find_agg_cst_from_jfunc_items (struct ipa_agg_jump_function
*agg_jfunc
,
3757 ipa_node_params
*src_info
,
3758 cgraph_node
*src_node
,
3759 HOST_WIDE_INT offset
, bool by_ref
)
3761 if (by_ref
!= agg_jfunc
->by_ref
)
3764 for (const ipa_agg_jf_item
&item
: agg_jfunc
->items
)
3765 if (item
.offset
== offset
)
3766 return ipa_agg_value_from_jfunc (src_info
, src_node
, &item
);
3771 /* Remove a reference to SYMBOL from the list of references of a node given by
3772 reference description RDESC. Return true if the reference has been
3773 successfully found and removed. */
3776 remove_described_reference (symtab_node
*symbol
, struct ipa_cst_ref_desc
*rdesc
)
3778 struct ipa_ref
*to_del
;
3779 struct cgraph_edge
*origin
;
3784 to_del
= origin
->caller
->find_reference (symbol
, origin
->call_stmt
,
3785 origin
->lto_stmt_uid
, IPA_REF_ADDR
);
3789 to_del
->remove_reference ();
3791 fprintf (dump_file
, "ipa-prop: Removed a reference from %s to %s.\n",
3792 origin
->caller
->dump_name (), symbol
->dump_name ());
3796 /* If JFUNC has a reference description with refcount different from
3797 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3798 NULL. JFUNC must be a constant jump function. */
3800 static struct ipa_cst_ref_desc
*
3801 jfunc_rdesc_usable (struct ipa_jump_func
*jfunc
)
3803 struct ipa_cst_ref_desc
*rdesc
= ipa_get_jf_constant_rdesc (jfunc
);
3804 if (rdesc
&& rdesc
->refcount
!= IPA_UNDESCRIBED_USE
)
3810 /* If the value of constant jump function JFUNC is an address of a function
3811 declaration, return the associated call graph node. Otherwise return
3814 static symtab_node
*
3815 symtab_node_for_jfunc (struct ipa_jump_func
*jfunc
)
3817 gcc_checking_assert (jfunc
->type
== IPA_JF_CONST
);
3818 tree cst
= ipa_get_jf_constant (jfunc
);
3819 if (TREE_CODE (cst
) != ADDR_EXPR
3820 || (TREE_CODE (TREE_OPERAND (cst
, 0)) != FUNCTION_DECL
3821 && TREE_CODE (TREE_OPERAND (cst
, 0)) != VAR_DECL
))
3824 return symtab_node::get (TREE_OPERAND (cst
, 0));
3828 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3829 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3830 the edge specified in the rdesc. Return false if either the symbol or the
3831 reference could not be found, otherwise return true. */
3834 try_decrement_rdesc_refcount (struct ipa_jump_func
*jfunc
)
3836 struct ipa_cst_ref_desc
*rdesc
;
3837 if (jfunc
->type
== IPA_JF_CONST
3838 && (rdesc
= jfunc_rdesc_usable (jfunc
))
3839 && --rdesc
->refcount
== 0)
3841 symtab_node
*symbol
= symtab_node_for_jfunc (jfunc
);
3845 return remove_described_reference (symbol
, rdesc
);
3850 /* Try to find a destination for indirect edge IE that corresponds to a simple
3851 call or a call of a member function pointer and where the destination is a
3852 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3853 the type of the parameter to which the result of JFUNC is passed. If it can
3854 be determined, return the newly direct edge, otherwise return NULL.
3855 NEW_ROOT and NEW_ROOT_INFO is the node and its info that JFUNC lattices are
3858 static struct cgraph_edge
*
3859 try_make_edge_direct_simple_call (struct cgraph_edge
*ie
,
3860 struct ipa_jump_func
*jfunc
, tree target_type
,
3861 struct cgraph_node
*new_root
,
3862 class ipa_node_params
*new_root_info
)
3864 struct cgraph_edge
*cs
;
3865 tree target
= NULL_TREE
;
3866 bool agg_contents
= ie
->indirect_info
->agg_contents
;
3867 tree scalar
= ipa_value_from_jfunc (new_root_info
, jfunc
, target_type
);
3871 target
= ipa_find_agg_cst_from_init (scalar
, ie
->indirect_info
->offset
,
3872 ie
->indirect_info
->by_ref
);
3873 if (!target
&& ie
->indirect_info
->guaranteed_unmodified
)
3874 target
= ipa_find_agg_cst_from_jfunc_items (&jfunc
->agg
, new_root_info
,
3876 ie
->indirect_info
->offset
,
3877 ie
->indirect_info
->by_ref
);
3883 cs
= ipa_make_edge_direct_to_target (ie
, target
);
3885 if (cs
&& !agg_contents
)
3888 gcc_checking_assert (cs
->callee
3890 || jfunc
->type
!= IPA_JF_CONST
3891 || !symtab_node_for_jfunc (jfunc
)
3892 || cs
->callee
== symtab_node_for_jfunc (jfunc
)));
3893 ok
= try_decrement_rdesc_refcount (jfunc
);
3894 gcc_checking_assert (ok
);
3900 /* Return the target to be used in cases of impossible devirtualization. IE
3901 and target (the latter can be NULL) are dumped when dumping is enabled. */
3904 ipa_impossible_devirt_target (struct cgraph_edge
*ie
, tree target
)
3910 "Type inconsistent devirtualization: %s->%s\n",
3911 ie
->caller
->dump_name (),
3912 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target
)));
3915 "No devirtualization target in %s\n",
3916 ie
->caller
->dump_name ());
3918 tree new_target
= builtin_decl_unreachable ();
3919 cgraph_node::get_create (new_target
);
3923 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3924 call based on a formal parameter which is described by jump function JFUNC
3925 and if it can be determined, make it direct and return the direct edge.
3926 Otherwise, return NULL. CTX describes the polymorphic context that the
3927 parameter the call is based on brings along with it. NEW_ROOT and
3928 NEW_ROOT_INFO is the node and its info that JFUNC lattices are relative
3931 static struct cgraph_edge
*
3932 try_make_edge_direct_virtual_call (struct cgraph_edge
*ie
,
3933 struct ipa_jump_func
*jfunc
,
3934 class ipa_polymorphic_call_context ctx
,
3935 struct cgraph_node
*new_root
,
3936 class ipa_node_params
*new_root_info
)
3939 bool speculative
= false;
3941 if (!opt_for_fn (ie
->caller
->decl
, flag_devirtualize
))
3944 gcc_assert (!ie
->indirect_info
->by_ref
);
3946 /* Try to do lookup via known virtual table pointer value. */
3947 if (!ie
->indirect_info
->vptr_changed
3948 || opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
))
3951 unsigned HOST_WIDE_INT offset
;
3953 if (jfunc
->type
== IPA_JF_CONST
)
3954 t
= ipa_find_agg_cst_from_init (ipa_get_jf_constant (jfunc
),
3955 ie
->indirect_info
->offset
, true);
3957 t
= ipa_find_agg_cst_from_jfunc_items (&jfunc
->agg
, new_root_info
,
3959 ie
->indirect_info
->offset
, true);
3960 if (t
&& vtable_pointer_value_to_vtable (t
, &vtable
, &offset
))
3963 t
= gimple_get_virt_method_for_vtable (ie
->indirect_info
->otr_token
,
3964 vtable
, offset
, &can_refer
);
3968 || fndecl_built_in_p (t
, BUILT_IN_UNREACHABLE
,
3969 BUILT_IN_UNREACHABLE_TRAP
)
3970 || !possible_polymorphic_call_target_p
3971 (ie
, cgraph_node::get (t
)))
3973 /* Do not speculate builtin_unreachable, it is stupid! */
3974 if (!ie
->indirect_info
->vptr_changed
)
3975 target
= ipa_impossible_devirt_target (ie
, target
);
3982 speculative
= ie
->indirect_info
->vptr_changed
;
3988 ipa_polymorphic_call_context
ie_context (ie
);
3989 vec
<cgraph_node
*>targets
;
3992 ctx
.offset_by (ie
->indirect_info
->offset
);
3993 if (ie
->indirect_info
->vptr_changed
)
3994 ctx
.possible_dynamic_type_change (ie
->in_polymorphic_cdtor
,
3995 ie
->indirect_info
->otr_type
);
3996 ctx
.combine_with (ie_context
, ie
->indirect_info
->otr_type
);
3997 targets
= possible_polymorphic_call_targets
3998 (ie
->indirect_info
->otr_type
,
3999 ie
->indirect_info
->otr_token
,
4001 if (final
&& targets
.length () <= 1)
4003 speculative
= false;
4004 if (targets
.length () == 1)
4005 target
= targets
[0]->decl
;
4007 target
= ipa_impossible_devirt_target (ie
, NULL_TREE
);
4009 else if (!target
&& opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
)
4010 && !ie
->speculative
&& ie
->maybe_hot_p ())
4013 n
= try_speculative_devirtualization (ie
->indirect_info
->otr_type
,
4014 ie
->indirect_info
->otr_token
,
4015 ie
->indirect_info
->context
);
4025 if (!possible_polymorphic_call_target_p
4026 (ie
, cgraph_node::get_create (target
)))
4030 target
= ipa_impossible_devirt_target (ie
, target
);
4032 return ipa_make_edge_direct_to_target (ie
, target
, speculative
);
4038 /* Update the param called notes associated with NODE when CS is being inlined,
4039 assuming NODE is (potentially indirectly) inlined into CS->callee.
4040 Moreover, if the callee is discovered to be constant, create a new cgraph
4041 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
4042 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
4045 update_indirect_edges_after_inlining (struct cgraph_edge
*cs
,
4046 struct cgraph_node
*node
,
4047 vec
<cgraph_edge
*> *new_edges
)
4049 class ipa_edge_args
*top
;
4050 struct cgraph_edge
*ie
, *next_ie
, *new_direct_edge
;
4051 struct cgraph_node
*new_root
;
4052 class ipa_node_params
*new_root_info
, *inlined_node_info
;
4055 ipa_check_create_edge_args ();
4056 top
= ipa_edge_args_sum
->get (cs
);
4057 new_root
= cs
->caller
->inlined_to
4058 ? cs
->caller
->inlined_to
: cs
->caller
;
4059 new_root_info
= ipa_node_params_sum
->get (new_root
);
4060 inlined_node_info
= ipa_node_params_sum
->get (cs
->callee
->function_symbol ());
4062 for (ie
= node
->indirect_calls
; ie
; ie
= next_ie
)
4064 class cgraph_indirect_call_info
*ici
= ie
->indirect_info
;
4065 struct ipa_jump_func
*jfunc
;
4068 next_ie
= ie
->next_callee
;
4070 if (ici
->param_index
== -1)
4073 /* We must check range due to calls with variable number of arguments: */
4074 if (!top
|| ici
->param_index
>= ipa_get_cs_argument_count (top
))
4076 ici
->param_index
= -1;
4080 param_index
= ici
->param_index
;
4081 jfunc
= ipa_get_ith_jump_func (top
, param_index
);
4083 auto_vec
<cgraph_node
*, 4> spec_targets
;
4084 if (ie
->speculative
)
4085 for (cgraph_edge
*direct
= ie
->first_speculative_call_target ();
4087 direct
= direct
->next_speculative_call_target ())
4088 spec_targets
.safe_push (direct
->callee
);
4090 if (!opt_for_fn (node
->decl
, flag_indirect_inlining
))
4091 new_direct_edge
= NULL
;
4092 else if (ici
->polymorphic
)
4094 ipa_polymorphic_call_context ctx
;
4095 ctx
= ipa_context_from_jfunc (new_root_info
, cs
, param_index
, jfunc
);
4096 new_direct_edge
= try_make_edge_direct_virtual_call (ie
, jfunc
, ctx
,
4102 tree target_type
= ipa_get_type (inlined_node_info
, param_index
);
4103 new_direct_edge
= try_make_edge_direct_simple_call (ie
, jfunc
,
4109 /* If speculation was removed, then we need to do nothing. */
4110 if (new_direct_edge
&& new_direct_edge
!= ie
4111 && spec_targets
.contains (new_direct_edge
->callee
))
4113 new_direct_edge
->indirect_inlining_edge
= 1;
4115 if (!new_direct_edge
->speculative
)
4118 else if (new_direct_edge
)
4120 new_direct_edge
->indirect_inlining_edge
= 1;
4123 new_edges
->safe_push (new_direct_edge
);
4126 /* If speculative edge was introduced we still need to update
4127 call info of the indirect edge. */
4128 if (!new_direct_edge
->speculative
)
4131 if (jfunc
->type
== IPA_JF_PASS_THROUGH
4132 && ipa_get_jf_pass_through_operation (jfunc
) == NOP_EXPR
)
4134 if (ici
->agg_contents
4135 && !ipa_get_jf_pass_through_agg_preserved (jfunc
)
4136 && !ici
->polymorphic
)
4137 ici
->param_index
= -1;
4140 ici
->param_index
= ipa_get_jf_pass_through_formal_id (jfunc
);
4141 if (ici
->polymorphic
4142 && !ipa_get_jf_pass_through_type_preserved (jfunc
))
4143 ici
->vptr_changed
= true;
4144 ipa_set_param_used_by_indirect_call (new_root_info
,
4145 ici
->param_index
, true);
4146 if (ici
->polymorphic
)
4147 ipa_set_param_used_by_polymorphic_call (new_root_info
,
4148 ici
->param_index
, true);
4151 else if (jfunc
->type
== IPA_JF_ANCESTOR
)
4153 if (ici
->agg_contents
4154 && !ipa_get_jf_ancestor_agg_preserved (jfunc
)
4155 && !ici
->polymorphic
)
4156 ici
->param_index
= -1;
4159 ici
->param_index
= ipa_get_jf_ancestor_formal_id (jfunc
);
4160 ici
->offset
+= ipa_get_jf_ancestor_offset (jfunc
);
4161 if (ici
->polymorphic
4162 && !ipa_get_jf_ancestor_type_preserved (jfunc
))
4163 ici
->vptr_changed
= true;
4164 ipa_set_param_used_by_indirect_call (new_root_info
,
4165 ici
->param_index
, true);
4166 if (ici
->polymorphic
)
4167 ipa_set_param_used_by_polymorphic_call (new_root_info
,
4168 ici
->param_index
, true);
4172 /* Either we can find a destination for this edge now or never. */
4173 ici
->param_index
= -1;
4179 /* Recursively traverse subtree of NODE (including node) made of inlined
4180 cgraph_edges when CS has been inlined and invoke
4181 update_indirect_edges_after_inlining on all nodes and
4182 update_jump_functions_after_inlining on all non-inlined edges that lead out
4183 of this subtree. Newly discovered indirect edges will be added to
4184 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
4188 propagate_info_to_inlined_callees (struct cgraph_edge
*cs
,
4189 struct cgraph_node
*node
,
4190 vec
<cgraph_edge
*> *new_edges
)
4192 struct cgraph_edge
*e
;
4195 res
= update_indirect_edges_after_inlining (cs
, node
, new_edges
);
4197 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4198 if (!e
->inline_failed
)
4199 res
|= propagate_info_to_inlined_callees (cs
, e
->callee
, new_edges
);
4201 update_jump_functions_after_inlining (cs
, e
);
4202 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4203 update_jump_functions_after_inlining (cs
, e
);
4208 /* Combine two controlled uses counts as done during inlining. */
4211 combine_controlled_uses_counters (int c
, int d
)
4213 if (c
== IPA_UNDESCRIBED_USE
|| d
== IPA_UNDESCRIBED_USE
)
4214 return IPA_UNDESCRIBED_USE
;
4219 /* Propagate number of controlled users from CS->caleee to the new root of the
4220 tree of inlined nodes. */
4223 propagate_controlled_uses (struct cgraph_edge
*cs
)
4225 ipa_edge_args
*args
= ipa_edge_args_sum
->get (cs
);
4228 struct cgraph_node
*new_root
= cs
->caller
->inlined_to
4229 ? cs
->caller
->inlined_to
: cs
->caller
;
4230 ipa_node_params
*new_root_info
= ipa_node_params_sum
->get (new_root
);
4231 ipa_node_params
*old_root_info
= ipa_node_params_sum
->get (cs
->callee
);
4237 count
= MIN (ipa_get_cs_argument_count (args
),
4238 ipa_get_param_count (old_root_info
));
4239 for (i
= 0; i
< count
; i
++)
4241 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
4242 struct ipa_cst_ref_desc
*rdesc
;
4244 if (jf
->type
== IPA_JF_PASS_THROUGH
4245 && !ipa_get_jf_pass_through_refdesc_decremented (jf
))
4248 src_idx
= ipa_get_jf_pass_through_formal_id (jf
);
4249 c
= ipa_get_controlled_uses (new_root_info
, src_idx
);
4250 d
= ipa_get_controlled_uses (old_root_info
, i
);
4252 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf
)
4253 == NOP_EXPR
|| c
== IPA_UNDESCRIBED_USE
);
4254 c
= combine_controlled_uses_counters (c
, d
);
4255 ipa_set_controlled_uses (new_root_info
, src_idx
, c
);
4257 if (c
!= IPA_UNDESCRIBED_USE
)
4259 lderef
= (ipa_get_param_load_dereferenced (new_root_info
, src_idx
)
4260 || ipa_get_param_load_dereferenced (old_root_info
, i
));
4261 ipa_set_param_load_dereferenced (new_root_info
, src_idx
, lderef
);
4264 if (c
== 0 && !lderef
&& new_root_info
->ipcp_orig_node
)
4266 struct cgraph_node
*n
;
4267 struct ipa_ref
*ref
;
4268 tree t
= new_root_info
->known_csts
[src_idx
];
4270 if (t
&& TREE_CODE (t
) == ADDR_EXPR
4271 && TREE_CODE (TREE_OPERAND (t
, 0)) == FUNCTION_DECL
4272 && (n
= cgraph_node::get (TREE_OPERAND (t
, 0)))
4273 && (ref
= new_root
->find_reference (n
, NULL
, 0,
4277 fprintf (dump_file
, "ipa-prop: Removing cloning-created "
4278 "reference from %s to %s.\n",
4279 new_root
->dump_name (),
4281 ref
->remove_reference ();
4285 else if (jf
->type
== IPA_JF_CONST
4286 && (rdesc
= jfunc_rdesc_usable (jf
)))
4288 int d
= ipa_get_controlled_uses (old_root_info
, i
);
4289 int c
= rdesc
->refcount
;
4290 tree cst
= ipa_get_jf_constant (jf
);
4291 rdesc
->refcount
= combine_controlled_uses_counters (c
, d
);
4292 if (rdesc
->refcount
!= IPA_UNDESCRIBED_USE
4293 && ipa_get_param_load_dereferenced (old_root_info
, i
)
4294 && TREE_CODE (cst
) == ADDR_EXPR
4295 && VAR_P (TREE_OPERAND (cst
, 0)))
4297 symtab_node
*n
= symtab_node::get (TREE_OPERAND (cst
, 0));
4298 new_root
->create_reference (n
, IPA_REF_LOAD
, NULL
);
4300 fprintf (dump_file
, "ipa-prop: Address IPA constant will reach "
4301 "a load so adding LOAD reference from %s to %s.\n",
4302 new_root
->dump_name (), n
->dump_name ());
4304 if (rdesc
->refcount
== 0)
4306 gcc_checking_assert (TREE_CODE (cst
) == ADDR_EXPR
4307 && ((TREE_CODE (TREE_OPERAND (cst
, 0))
4309 || VAR_P (TREE_OPERAND (cst
, 0))));
4311 symtab_node
*n
= symtab_node::get (TREE_OPERAND (cst
, 0));
4314 remove_described_reference (n
, rdesc
);
4315 cgraph_node
*clone
= cs
->caller
;
4316 while (clone
->inlined_to
4317 && clone
->ipcp_clone
4318 && clone
!= rdesc
->cs
->caller
)
4320 struct ipa_ref
*ref
;
4321 ref
= clone
->find_reference (n
, NULL
, 0, IPA_REF_ADDR
);
4325 fprintf (dump_file
, "ipa-prop: Removing "
4326 "cloning-created reference "
4328 clone
->dump_name (),
4330 ref
->remove_reference ();
4332 clone
= clone
->callers
->caller
;
4339 for (i
= ipa_get_param_count (old_root_info
);
4340 i
< ipa_get_cs_argument_count (args
);
4343 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
4345 if (jf
->type
== IPA_JF_CONST
)
4347 struct ipa_cst_ref_desc
*rdesc
= jfunc_rdesc_usable (jf
);
4349 rdesc
->refcount
= IPA_UNDESCRIBED_USE
;
4351 else if (jf
->type
== IPA_JF_PASS_THROUGH
)
4352 ipa_set_controlled_uses (new_root_info
,
4353 jf
->value
.pass_through
.formal_id
,
4354 IPA_UNDESCRIBED_USE
);
4358 /* Update jump functions and call note functions on inlining the call site CS.
4359 CS is expected to lead to a node already cloned by
4360 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
4361 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
4365 ipa_propagate_indirect_call_infos (struct cgraph_edge
*cs
,
4366 vec
<cgraph_edge
*> *new_edges
)
4369 /* Do nothing if the preparation phase has not been carried out yet
4370 (i.e. during early inlining). */
4371 if (!ipa_node_params_sum
)
4373 gcc_assert (ipa_edge_args_sum
);
4375 propagate_controlled_uses (cs
);
4376 changed
= propagate_info_to_inlined_callees (cs
, cs
->callee
, new_edges
);
4377 ipa_node_params_sum
->remove (cs
->callee
);
4379 ipa_edge_args
*args
= ipa_edge_args_sum
->get (cs
);
4383 if (args
->jump_functions
)
4385 struct ipa_jump_func
*jf
;
4387 FOR_EACH_VEC_ELT (*args
->jump_functions
, i
, jf
)
4388 if (jf
->type
== IPA_JF_CONST
4389 && ipa_get_jf_constant_rdesc (jf
))
4396 ipa_edge_args_sum
->remove (cs
);
4398 if (ipcp_transformation_sum
)
4399 ipcp_transformation_sum
->remove (cs
->callee
);
4404 /* Ensure that array of edge arguments infos is big enough to accommodate a
4405 structure for all edges and reallocates it if not. Also, allocate
4406 associated hash tables is they do not already exist. */
4409 ipa_check_create_edge_args (void)
4411 if (!ipa_edge_args_sum
)
4413 = (new (ggc_alloc_no_dtor
<ipa_edge_args_sum_t
> ())
4414 ipa_edge_args_sum_t (symtab
, true));
4415 if (!ipa_vr_hash_table
)
4416 ipa_vr_hash_table
= hash_table
<ipa_vr_ggc_hash_traits
>::create_ggc (37);
4419 /* Free all ipa_edge structures. */
4422 ipa_free_all_edge_args (void)
4424 if (!ipa_edge_args_sum
)
4427 ggc_delete (ipa_edge_args_sum
);
4428 ipa_edge_args_sum
= NULL
;
4431 /* Free all ipa_node_params structures. */
4434 ipa_free_all_node_params (void)
4436 if (ipa_node_params_sum
)
4437 ggc_delete (ipa_node_params_sum
);
4438 ipa_node_params_sum
= NULL
;
4441 /* Initialize IPA CP transformation summary and also allocate any necessary hash
4442 tables if they do not already exist. */
4445 ipcp_transformation_initialize (void)
4447 if (!ipa_vr_hash_table
)
4448 ipa_vr_hash_table
= hash_table
<ipa_vr_ggc_hash_traits
>::create_ggc (37);
4449 if (ipcp_transformation_sum
== NULL
)
4451 ipcp_transformation_sum
= ipcp_transformation_t::create_ggc (symtab
);
4452 ipcp_transformation_sum
->disable_insertion_hook ();
4456 /* Release the IPA CP transformation summary. */
4459 ipcp_free_transformation_sum (void)
4461 if (!ipcp_transformation_sum
)
4464 ipcp_transformation_sum
->~function_summary
<ipcp_transformation
*> ();
4465 ggc_free (ipcp_transformation_sum
);
4466 ipcp_transformation_sum
= NULL
;
4469 /* Set the aggregate replacements of NODE to be AGGVALS. */
4472 ipa_set_node_agg_value_chain (struct cgraph_node
*node
,
4473 vec
<ipa_argagg_value
, va_gc
> *aggs
)
4475 ipcp_transformation_initialize ();
4476 ipcp_transformation
*s
= ipcp_transformation_sum
->get_create (node
);
4477 s
->m_agg_values
= aggs
;
4480 /* Hook that is called by cgraph.cc when an edge is removed. Adjust reference
4481 count data structures accordingly. */
4484 ipa_edge_args_sum_t::remove (cgraph_edge
*cs
, ipa_edge_args
*args
)
4486 if (args
->jump_functions
)
4488 struct ipa_jump_func
*jf
;
4490 FOR_EACH_VEC_ELT (*args
->jump_functions
, i
, jf
)
4492 struct ipa_cst_ref_desc
*rdesc
;
4493 try_decrement_rdesc_refcount (jf
);
4494 if (jf
->type
== IPA_JF_CONST
4495 && (rdesc
= ipa_get_jf_constant_rdesc (jf
))
4502 /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
4503 reference count data strucutres accordingly. */
4506 ipa_edge_args_sum_t::duplicate (cgraph_edge
*src
, cgraph_edge
*dst
,
4507 ipa_edge_args
*old_args
, ipa_edge_args
*new_args
)
4511 new_args
->jump_functions
= vec_safe_copy (old_args
->jump_functions
);
4512 if (old_args
->polymorphic_call_contexts
)
4513 new_args
->polymorphic_call_contexts
4514 = vec_safe_copy (old_args
->polymorphic_call_contexts
);
4516 for (i
= 0; i
< vec_safe_length (old_args
->jump_functions
); i
++)
4518 struct ipa_jump_func
*src_jf
= ipa_get_ith_jump_func (old_args
, i
);
4519 struct ipa_jump_func
*dst_jf
= ipa_get_ith_jump_func (new_args
, i
);
4521 dst_jf
->agg
.items
= vec_safe_copy (dst_jf
->agg
.items
);
4523 if (src_jf
->type
== IPA_JF_CONST
)
4525 struct ipa_cst_ref_desc
*src_rdesc
= jfunc_rdesc_usable (src_jf
);
4528 dst_jf
->value
.constant
.rdesc
= NULL
;
4529 else if (src
->caller
== dst
->caller
)
4531 /* Creation of a speculative edge. If the source edge is the one
4532 grabbing a reference, we must create a new (duplicate)
4533 reference description. Otherwise they refer to the same
4534 description corresponding to a reference taken in a function
4535 src->caller is inlined to. In that case we just must
4536 increment the refcount. */
4537 if (src_rdesc
->cs
== src
)
4539 symtab_node
*n
= symtab_node_for_jfunc (src_jf
);
4540 gcc_checking_assert (n
);
4542 = src
->caller
->find_reference (n
, src
->call_stmt
,
4545 gcc_checking_assert (ref
);
4546 dst
->caller
->clone_reference (ref
, ref
->stmt
);
4548 ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
4549 dst_rdesc
->cs
= dst
;
4550 dst_rdesc
->refcount
= src_rdesc
->refcount
;
4551 dst_rdesc
->next_duplicate
= NULL
;
4552 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
4556 src_rdesc
->refcount
++;
4557 dst_jf
->value
.constant
.rdesc
= src_rdesc
;
4560 else if (src_rdesc
->cs
== src
)
4562 struct ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
4563 dst_rdesc
->cs
= dst
;
4564 dst_rdesc
->refcount
= src_rdesc
->refcount
;
4565 dst_rdesc
->next_duplicate
= src_rdesc
->next_duplicate
;
4566 src_rdesc
->next_duplicate
= dst_rdesc
;
4567 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
4571 struct ipa_cst_ref_desc
*dst_rdesc
;
4572 /* This can happen during inlining, when a JFUNC can refer to a
4573 reference taken in a function up in the tree of inline clones.
4574 We need to find the duplicate that refers to our tree of
4577 gcc_assert (dst
->caller
->inlined_to
);
4578 for (dst_rdesc
= src_rdesc
->next_duplicate
;
4580 dst_rdesc
= dst_rdesc
->next_duplicate
)
4582 struct cgraph_node
*top
;
4583 top
= dst_rdesc
->cs
->caller
->inlined_to
4584 ? dst_rdesc
->cs
->caller
->inlined_to
4585 : dst_rdesc
->cs
->caller
;
4586 if (dst
->caller
->inlined_to
== top
)
4589 gcc_assert (dst_rdesc
);
4590 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
4593 else if (dst_jf
->type
== IPA_JF_PASS_THROUGH
4594 && src
->caller
== dst
->caller
)
4596 struct cgraph_node
*inline_root
= dst
->caller
->inlined_to
4597 ? dst
->caller
->inlined_to
: dst
->caller
;
4598 ipa_node_params
*root_info
= ipa_node_params_sum
->get (inline_root
);
4599 int idx
= ipa_get_jf_pass_through_formal_id (dst_jf
);
4601 int c
= ipa_get_controlled_uses (root_info
, idx
);
4602 if (c
!= IPA_UNDESCRIBED_USE
)
4605 ipa_set_controlled_uses (root_info
, idx
, c
);
4611 /* Analyze newly added function into callgraph. */
4614 ipa_add_new_function (cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
4616 if (node
->has_gimple_body_p ())
4617 ipa_analyze_node (node
);
4620 /* Hook that is called by summary when a node is duplicated. */
4623 ipa_node_params_t::duplicate(cgraph_node
*, cgraph_node
*,
4624 ipa_node_params
*old_info
,
4625 ipa_node_params
*new_info
)
4627 new_info
->descriptors
= vec_safe_copy (old_info
->descriptors
);
4628 gcc_assert (new_info
->lattices
.is_empty ());
4629 new_info
->ipcp_orig_node
= old_info
->ipcp_orig_node
;
4630 new_info
->known_csts
= old_info
->known_csts
.copy ();
4631 new_info
->known_contexts
= old_info
->known_contexts
.copy ();
4633 new_info
->analysis_done
= old_info
->analysis_done
;
4634 new_info
->node_enqueued
= old_info
->node_enqueued
;
4635 new_info
->versionable
= old_info
->versionable
;
4638 /* Duplication of ipcp transformation summaries. */
4641 ipcp_transformation_t::duplicate(cgraph_node
*, cgraph_node
*dst
,
4642 ipcp_transformation
*src_trans
,
4643 ipcp_transformation
*dst_trans
)
4645 /* Avoid redundant work of duplicating vectors we will never use. */
4646 if (dst
->inlined_to
)
4648 dst_trans
->m_agg_values
= vec_safe_copy (src_trans
->m_agg_values
);
4649 dst_trans
->m_vr
= vec_safe_copy (src_trans
->m_vr
);
4652 /* Register our cgraph hooks if they are not already there. */
4655 ipa_register_cgraph_hooks (void)
4657 ipa_check_create_node_params ();
4658 ipa_check_create_edge_args ();
4660 function_insertion_hook_holder
=
4661 symtab
->add_cgraph_insertion_hook (&ipa_add_new_function
, NULL
);
4664 /* Unregister our cgraph hooks if they are not already there. */
4667 ipa_unregister_cgraph_hooks (void)
4669 if (function_insertion_hook_holder
)
4670 symtab
->remove_cgraph_insertion_hook (function_insertion_hook_holder
);
4671 function_insertion_hook_holder
= NULL
;
4674 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4675 longer needed after ipa-cp. */
4678 ipa_free_all_structures_after_ipa_cp (void)
4680 if (!optimize
&& !in_lto_p
)
4682 ipa_free_all_edge_args ();
4683 ipa_free_all_node_params ();
4684 ipcp_sources_pool
.release ();
4685 ipcp_cst_values_pool
.release ();
4686 ipcp_poly_ctx_values_pool
.release ();
4687 ipcp_agg_lattice_pool
.release ();
4688 ipa_unregister_cgraph_hooks ();
4689 ipa_refdesc_pool
.release ();
4693 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4694 longer needed after indirect inlining. */
4697 ipa_free_all_structures_after_iinln (void)
4699 ipa_free_all_edge_args ();
4700 ipa_free_all_node_params ();
4701 ipa_unregister_cgraph_hooks ();
4702 ipcp_sources_pool
.release ();
4703 ipcp_cst_values_pool
.release ();
4704 ipcp_poly_ctx_values_pool
.release ();
4705 ipcp_agg_lattice_pool
.release ();
4706 ipa_refdesc_pool
.release ();
4709 /* Print ipa_tree_map data structures of all functions in the
4713 ipa_print_node_params (FILE *f
, struct cgraph_node
*node
)
4716 class ipa_node_params
*info
;
4718 if (!node
->definition
)
4720 info
= ipa_node_params_sum
->get (node
);
4721 fprintf (f
, " function %s parameter descriptors:\n", node
->dump_name ());
4724 fprintf (f
, " no params return\n");
4727 count
= ipa_get_param_count (info
);
4728 for (i
= 0; i
< count
; i
++)
4733 ipa_dump_param (f
, info
, i
);
4734 if (ipa_is_param_used (info
, i
))
4735 fprintf (f
, " used");
4736 if (ipa_is_param_used_by_ipa_predicates (info
, i
))
4737 fprintf (f
, " used_by_ipa_predicates");
4738 if (ipa_is_param_used_by_indirect_call (info
, i
))
4739 fprintf (f
, " used_by_indirect_call");
4740 if (ipa_is_param_used_by_polymorphic_call (info
, i
))
4741 fprintf (f
, " used_by_polymorphic_call");
4742 c
= ipa_get_controlled_uses (info
, i
);
4743 if (c
== IPA_UNDESCRIBED_USE
)
4744 fprintf (f
, " undescribed_use");
4746 fprintf (f
, " controlled_uses=%i %s", c
,
4747 ipa_get_param_load_dereferenced (info
, i
)
4748 ? "(load_dereferenced)" : "");
4753 /* Print ipa_tree_map data structures of all functions in the
4757 ipa_print_all_params (FILE * f
)
4759 struct cgraph_node
*node
;
4761 fprintf (f
, "\nFunction parameters:\n");
4762 FOR_EACH_FUNCTION (node
)
4763 ipa_print_node_params (f
, node
);
4766 /* Stream out jump function JUMP_FUNC to OB. */
4769 ipa_write_jump_function (struct output_block
*ob
,
4770 struct ipa_jump_func
*jump_func
)
4772 struct ipa_agg_jf_item
*item
;
4773 struct bitpack_d bp
;
4777 /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4778 as well as WPA memory by handling them specially. */
4779 if (jump_func
->type
== IPA_JF_CONST
4780 && TREE_CODE (jump_func
->value
.constant
.value
) == ADDR_EXPR
)
4783 streamer_write_uhwi (ob
, jump_func
->type
* 2 + flag
);
4784 switch (jump_func
->type
)
4786 case IPA_JF_UNKNOWN
:
4790 EXPR_LOCATION (jump_func
->value
.constant
.value
) == UNKNOWN_LOCATION
);
4791 stream_write_tree (ob
,
4793 ? TREE_OPERAND (jump_func
->value
.constant
.value
, 0)
4794 : jump_func
->value
.constant
.value
, true);
4796 case IPA_JF_PASS_THROUGH
:
4797 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.operation
);
4798 if (jump_func
->value
.pass_through
.operation
== NOP_EXPR
)
4800 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4801 bp
= bitpack_create (ob
->main_stream
);
4802 bp_pack_value (&bp
, jump_func
->value
.pass_through
.agg_preserved
, 1);
4803 gcc_assert (!jump_func
->value
.pass_through
.refdesc_decremented
);
4804 streamer_write_bitpack (&bp
);
4806 else if (TREE_CODE_CLASS (jump_func
->value
.pass_through
.operation
)
4808 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4811 stream_write_tree (ob
, jump_func
->value
.pass_through
.operand
, true);
4812 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4815 case IPA_JF_ANCESTOR
:
4816 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.offset
);
4817 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.formal_id
);
4818 bp
= bitpack_create (ob
->main_stream
);
4819 bp_pack_value (&bp
, jump_func
->value
.ancestor
.agg_preserved
, 1);
4820 bp_pack_value (&bp
, jump_func
->value
.ancestor
.keep_null
, 1);
4821 streamer_write_bitpack (&bp
);
4824 fatal_error (UNKNOWN_LOCATION
, "invalid jump function in LTO stream");
4827 count
= vec_safe_length (jump_func
->agg
.items
);
4828 streamer_write_uhwi (ob
, count
);
4831 bp
= bitpack_create (ob
->main_stream
);
4832 bp_pack_value (&bp
, jump_func
->agg
.by_ref
, 1);
4833 streamer_write_bitpack (&bp
);
4836 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, i
, item
)
4838 stream_write_tree (ob
, item
->type
, true);
4839 streamer_write_uhwi (ob
, item
->offset
);
4840 streamer_write_uhwi (ob
, item
->jftype
);
4841 switch (item
->jftype
)
4843 case IPA_JF_UNKNOWN
:
4846 stream_write_tree (ob
, item
->value
.constant
, true);
4848 case IPA_JF_PASS_THROUGH
:
4849 case IPA_JF_LOAD_AGG
:
4850 streamer_write_uhwi (ob
, item
->value
.pass_through
.operation
);
4851 streamer_write_uhwi (ob
, item
->value
.pass_through
.formal_id
);
4852 if (TREE_CODE_CLASS (item
->value
.pass_through
.operation
)
4854 stream_write_tree (ob
, item
->value
.pass_through
.operand
, true);
4855 if (item
->jftype
== IPA_JF_LOAD_AGG
)
4857 stream_write_tree (ob
, item
->value
.load_agg
.type
, true);
4858 streamer_write_uhwi (ob
, item
->value
.load_agg
.offset
);
4859 bp
= bitpack_create (ob
->main_stream
);
4860 bp_pack_value (&bp
, item
->value
.load_agg
.by_ref
, 1);
4861 streamer_write_bitpack (&bp
);
4865 fatal_error (UNKNOWN_LOCATION
,
4866 "invalid jump function in LTO stream");
4870 bp
= bitpack_create (ob
->main_stream
);
4871 if (jump_func
->m_vr
)
4872 jump_func
->m_vr
->streamer_write (ob
);
4875 bp_pack_value (&bp
, false, 1);
4876 streamer_write_bitpack (&bp
);
4880 /* Read in jump function JUMP_FUNC from IB. */
4883 ipa_read_jump_function (class lto_input_block
*ib
,
4884 struct ipa_jump_func
*jump_func
,
4885 struct cgraph_edge
*cs
,
4886 class data_in
*data_in
,
4889 enum jump_func_type jftype
;
4890 enum tree_code operation
;
4892 int val
= streamer_read_uhwi (ib
);
4893 bool flag
= val
& 1;
4895 jftype
= (enum jump_func_type
) (val
/ 2);
4898 case IPA_JF_UNKNOWN
:
4899 ipa_set_jf_unknown (jump_func
);
4903 tree t
= stream_read_tree (ib
, data_in
);
4904 if (flag
&& prevails
)
4905 t
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (t
)), t
);
4906 ipa_set_jf_constant (jump_func
, t
, cs
);
4909 case IPA_JF_PASS_THROUGH
:
4910 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
4911 if (operation
== NOP_EXPR
)
4913 int formal_id
= streamer_read_uhwi (ib
);
4914 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4915 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4916 ipa_set_jf_simple_pass_through (jump_func
, formal_id
, agg_preserved
);
4918 else if (TREE_CODE_CLASS (operation
) == tcc_unary
)
4920 int formal_id
= streamer_read_uhwi (ib
);
4921 ipa_set_jf_unary_pass_through (jump_func
, formal_id
, operation
);
4925 tree operand
= stream_read_tree (ib
, data_in
);
4926 int formal_id
= streamer_read_uhwi (ib
);
4927 ipa_set_jf_arith_pass_through (jump_func
, formal_id
, operand
,
4931 case IPA_JF_ANCESTOR
:
4933 HOST_WIDE_INT offset
= streamer_read_uhwi (ib
);
4934 int formal_id
= streamer_read_uhwi (ib
);
4935 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4936 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4937 bool keep_null
= bp_unpack_value (&bp
, 1);
4938 ipa_set_ancestor_jf (jump_func
, offset
, formal_id
, agg_preserved
,
4943 fatal_error (UNKNOWN_LOCATION
, "invalid jump function in LTO stream");
4946 count
= streamer_read_uhwi (ib
);
4949 jump_func
->agg
.items
= NULL
;
4950 vec_safe_reserve (jump_func
->agg
.items
, count
, true);
4954 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4955 jump_func
->agg
.by_ref
= bp_unpack_value (&bp
, 1);
4957 for (i
= 0; i
< count
; i
++)
4959 struct ipa_agg_jf_item item
;
4960 item
.type
= stream_read_tree (ib
, data_in
);
4961 item
.offset
= streamer_read_uhwi (ib
);
4962 item
.jftype
= (enum jump_func_type
) streamer_read_uhwi (ib
);
4964 switch (item
.jftype
)
4966 case IPA_JF_UNKNOWN
:
4969 item
.value
.constant
= stream_read_tree (ib
, data_in
);
4971 case IPA_JF_PASS_THROUGH
:
4972 case IPA_JF_LOAD_AGG
:
4973 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
4974 item
.value
.pass_through
.operation
= operation
;
4975 item
.value
.pass_through
.formal_id
= streamer_read_uhwi (ib
);
4976 if (TREE_CODE_CLASS (operation
) == tcc_unary
)
4977 item
.value
.pass_through
.operand
= NULL_TREE
;
4979 item
.value
.pass_through
.operand
= stream_read_tree (ib
, data_in
);
4980 if (item
.jftype
== IPA_JF_LOAD_AGG
)
4982 struct bitpack_d bp
;
4983 item
.value
.load_agg
.type
= stream_read_tree (ib
, data_in
);
4984 item
.value
.load_agg
.offset
= streamer_read_uhwi (ib
);
4985 bp
= streamer_read_bitpack (ib
);
4986 item
.value
.load_agg
.by_ref
= bp_unpack_value (&bp
, 1);
4990 fatal_error (UNKNOWN_LOCATION
,
4991 "invalid jump function in LTO stream");
4994 jump_func
->agg
.items
->quick_push (item
);
4998 vr
.streamer_read (ib
, data_in
);
5002 ipa_set_jfunc_vr (jump_func
, vr
);
5005 jump_func
->m_vr
= NULL
;
5008 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
5009 relevant to indirect inlining to OB. */
5012 ipa_write_indirect_edge_info (struct output_block
*ob
,
5013 struct cgraph_edge
*cs
)
5015 class cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
5016 struct bitpack_d bp
;
5018 streamer_write_hwi (ob
, ii
->param_index
);
5019 bp
= bitpack_create (ob
->main_stream
);
5020 bp_pack_value (&bp
, ii
->polymorphic
, 1);
5021 bp_pack_value (&bp
, ii
->agg_contents
, 1);
5022 bp_pack_value (&bp
, ii
->member_ptr
, 1);
5023 bp_pack_value (&bp
, ii
->by_ref
, 1);
5024 bp_pack_value (&bp
, ii
->guaranteed_unmodified
, 1);
5025 bp_pack_value (&bp
, ii
->vptr_changed
, 1);
5026 streamer_write_bitpack (&bp
);
5027 if (ii
->agg_contents
|| ii
->polymorphic
)
5028 streamer_write_hwi (ob
, ii
->offset
);
5030 gcc_assert (ii
->offset
== 0);
5032 if (ii
->polymorphic
)
5034 streamer_write_hwi (ob
, ii
->otr_token
);
5035 stream_write_tree (ob
, ii
->otr_type
, true);
5036 ii
->context
.stream_out (ob
);
5040 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
5041 relevant to indirect inlining from IB. */
5044 ipa_read_indirect_edge_info (class lto_input_block
*ib
,
5045 class data_in
*data_in
,
5046 struct cgraph_edge
*cs
,
5047 class ipa_node_params
*info
)
5049 class cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
5050 struct bitpack_d bp
;
5052 ii
->param_index
= (int) streamer_read_hwi (ib
);
5053 bp
= streamer_read_bitpack (ib
);
5054 ii
->polymorphic
= bp_unpack_value (&bp
, 1);
5055 ii
->agg_contents
= bp_unpack_value (&bp
, 1);
5056 ii
->member_ptr
= bp_unpack_value (&bp
, 1);
5057 ii
->by_ref
= bp_unpack_value (&bp
, 1);
5058 ii
->guaranteed_unmodified
= bp_unpack_value (&bp
, 1);
5059 ii
->vptr_changed
= bp_unpack_value (&bp
, 1);
5060 if (ii
->agg_contents
|| ii
->polymorphic
)
5061 ii
->offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
5064 if (ii
->polymorphic
)
5066 ii
->otr_token
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
5067 ii
->otr_type
= stream_read_tree (ib
, data_in
);
5068 ii
->context
.stream_in (ib
, data_in
);
5070 if (info
&& ii
->param_index
>= 0)
5072 if (ii
->polymorphic
)
5073 ipa_set_param_used_by_polymorphic_call (info
,
5074 ii
->param_index
, true);
5075 ipa_set_param_used_by_indirect_call (info
,
5076 ii
->param_index
, true);
5080 /* Stream out NODE info to OB. */
5083 ipa_write_node_info (struct output_block
*ob
, struct cgraph_node
*node
)
5086 lto_symtab_encoder_t encoder
;
5087 ipa_node_params
*info
= ipa_node_params_sum
->get (node
);
5089 struct cgraph_edge
*e
;
5090 struct bitpack_d bp
;
5092 encoder
= ob
->decl_state
->symtab_node_encoder
;
5093 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
5094 streamer_write_uhwi (ob
, node_ref
);
5096 streamer_write_uhwi (ob
, ipa_get_param_count (info
));
5097 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
5098 streamer_write_uhwi (ob
, ipa_get_param_move_cost (info
, j
));
5099 bp
= bitpack_create (ob
->main_stream
);
5100 gcc_assert (info
->analysis_done
5101 || ipa_get_param_count (info
) == 0);
5102 gcc_assert (!info
->node_enqueued
);
5103 gcc_assert (!info
->ipcp_orig_node
);
5104 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
5106 /* TODO: We could just not stream the bit in the undescribed case. */
5107 bool d
= (ipa_get_controlled_uses (info
, j
) != IPA_UNDESCRIBED_USE
)
5108 ? ipa_get_param_load_dereferenced (info
, j
) : true;
5109 bp_pack_value (&bp
, d
, 1);
5110 bp_pack_value (&bp
, ipa_is_param_used (info
, j
), 1);
5112 streamer_write_bitpack (&bp
);
5113 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
5115 streamer_write_hwi (ob
, ipa_get_controlled_uses (info
, j
));
5116 stream_write_tree (ob
, ipa_get_type (info
, j
), true);
5118 for (e
= node
->callees
; e
; e
= e
->next_callee
)
5120 ipa_edge_args
*args
= ipa_edge_args_sum
->get (e
);
5124 streamer_write_uhwi (ob
, 0);
5128 streamer_write_uhwi (ob
,
5129 ipa_get_cs_argument_count (args
) * 2
5130 + (args
->polymorphic_call_contexts
!= NULL
));
5131 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
5133 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
5134 if (args
->polymorphic_call_contexts
!= NULL
)
5135 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
5138 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
5140 ipa_edge_args
*args
= ipa_edge_args_sum
->get (e
);
5142 streamer_write_uhwi (ob
, 0);
5145 streamer_write_uhwi (ob
,
5146 ipa_get_cs_argument_count (args
) * 2
5147 + (args
->polymorphic_call_contexts
!= NULL
));
5148 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
5150 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
5151 if (args
->polymorphic_call_contexts
!= NULL
)
5152 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
5155 ipa_write_indirect_edge_info (ob
, e
);
5159 /* Stream in edge E from IB. */
5162 ipa_read_edge_info (class lto_input_block
*ib
,
5163 class data_in
*data_in
,
5164 struct cgraph_edge
*e
, bool prevails
)
5166 int count
= streamer_read_uhwi (ib
);
5167 bool contexts_computed
= count
& 1;
5173 && (e
->possibly_call_in_translation_unit_p ()
5174 /* Also stream in jump functions to builtins in hope that they
5175 will get fnspecs. */
5176 || fndecl_built_in_p (e
->callee
->decl
, BUILT_IN_NORMAL
)))
5178 ipa_edge_args
*args
= ipa_edge_args_sum
->get_create (e
);
5179 vec_safe_grow_cleared (args
->jump_functions
, count
, true);
5180 if (contexts_computed
)
5181 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
, true);
5182 for (int k
= 0; k
< count
; k
++)
5184 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
5186 if (contexts_computed
)
5187 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in
5193 for (int k
= 0; k
< count
; k
++)
5195 struct ipa_jump_func dummy
;
5196 ipa_read_jump_function (ib
, &dummy
, e
,
5198 if (contexts_computed
)
5200 class ipa_polymorphic_call_context ctx
;
5201 ctx
.stream_in (ib
, data_in
);
5207 /* Stream in NODE info from IB. */
5210 ipa_read_node_info (class lto_input_block
*ib
, struct cgraph_node
*node
,
5211 class data_in
*data_in
)
5214 struct cgraph_edge
*e
;
5215 struct bitpack_d bp
;
5216 bool prevails
= node
->prevailing_p ();
5217 ipa_node_params
*info
5218 = prevails
? ipa_node_params_sum
->get_create (node
) : NULL
;
5220 int param_count
= streamer_read_uhwi (ib
);
5223 ipa_alloc_node_params (node
, param_count
);
5224 for (k
= 0; k
< param_count
; k
++)
5225 (*info
->descriptors
)[k
].move_cost
= streamer_read_uhwi (ib
);
5226 if (ipa_get_param_count (info
) != 0)
5227 info
->analysis_done
= true;
5228 info
->node_enqueued
= false;
5231 for (k
= 0; k
< param_count
; k
++)
5232 streamer_read_uhwi (ib
);
5234 bp
= streamer_read_bitpack (ib
);
5235 for (k
= 0; k
< param_count
; k
++)
5237 bool load_dereferenced
= bp_unpack_value (&bp
, 1);
5238 bool used
= bp_unpack_value (&bp
, 1);
5242 ipa_set_param_load_dereferenced (info
, k
, load_dereferenced
);
5243 ipa_set_param_used (info
, k
, used
);
5246 for (k
= 0; k
< param_count
; k
++)
5248 int nuses
= streamer_read_hwi (ib
);
5249 tree type
= stream_read_tree (ib
, data_in
);
5253 ipa_set_controlled_uses (info
, k
, nuses
);
5254 (*info
->descriptors
)[k
].decl_or_type
= type
;
5257 for (e
= node
->callees
; e
; e
= e
->next_callee
)
5258 ipa_read_edge_info (ib
, data_in
, e
, prevails
);
5259 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
5261 ipa_read_edge_info (ib
, data_in
, e
, prevails
);
5262 ipa_read_indirect_edge_info (ib
, data_in
, e
, info
);
5266 /* Write jump functions for nodes in SET. */
5269 ipa_prop_write_jump_functions (void)
5271 struct output_block
*ob
;
5272 unsigned int count
= 0;
5273 lto_symtab_encoder_iterator lsei
;
5274 lto_symtab_encoder_t encoder
;
5276 if (!ipa_node_params_sum
|| !ipa_edge_args_sum
)
5279 ob
= create_output_block (LTO_section_jump_functions
);
5280 encoder
= ob
->decl_state
->symtab_node_encoder
;
5282 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5283 lsei_next_function_in_partition (&lsei
))
5285 cgraph_node
*node
= lsei_cgraph_node (lsei
);
5286 if (node
->has_gimple_body_p ()
5287 && ipa_node_params_sum
->get (node
) != NULL
)
5291 streamer_write_uhwi (ob
, count
);
5293 /* Process all of the functions. */
5294 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5295 lsei_next_function_in_partition (&lsei
))
5297 cgraph_node
*node
= lsei_cgraph_node (lsei
);
5298 if (node
->has_gimple_body_p ()
5299 && ipa_node_params_sum
->get (node
) != NULL
)
5300 ipa_write_node_info (ob
, node
);
5302 streamer_write_char_stream (ob
->main_stream
, 0);
5303 produce_asm (ob
, NULL
);
5304 destroy_output_block (ob
);
5307 /* Read section in file FILE_DATA of length LEN with data DATA. */
5310 ipa_prop_read_section (struct lto_file_decl_data
*file_data
, const char *data
,
5313 const struct lto_function_header
*header
=
5314 (const struct lto_function_header
*) data
;
5315 const int cfg_offset
= sizeof (struct lto_function_header
);
5316 const int main_offset
= cfg_offset
+ header
->cfg_size
;
5317 const int string_offset
= main_offset
+ header
->main_size
;
5318 class data_in
*data_in
;
5322 lto_input_block
ib_main ((const char *) data
+ main_offset
,
5323 header
->main_size
, file_data
);
5326 lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
5327 header
->string_size
, vNULL
);
5328 count
= streamer_read_uhwi (&ib_main
);
5330 for (i
= 0; i
< count
; i
++)
5333 struct cgraph_node
*node
;
5334 lto_symtab_encoder_t encoder
;
5336 index
= streamer_read_uhwi (&ib_main
);
5337 encoder
= file_data
->symtab_node_encoder
;
5338 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
5340 gcc_assert (node
->definition
);
5341 ipa_read_node_info (&ib_main
, node
, data_in
);
5343 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
5345 lto_data_in_delete (data_in
);
5348 /* Read ipcp jump functions. */
5351 ipa_prop_read_jump_functions (void)
5353 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
5354 struct lto_file_decl_data
*file_data
;
5357 ipa_check_create_node_params ();
5358 ipa_check_create_edge_args ();
5359 ipa_register_cgraph_hooks ();
5361 while ((file_data
= file_data_vec
[j
++]))
5365 = lto_get_summary_section_data (file_data
, LTO_section_jump_functions
,
5368 ipa_prop_read_section (file_data
, data
, len
);
5372 /* Return true if the IPA-CP transformation summary TS is non-NULL and contains
5375 useful_ipcp_transformation_info_p (ipcp_transformation
*ts
)
5379 if (!vec_safe_is_empty (ts
->m_agg_values
)
5380 || !vec_safe_is_empty (ts
->m_vr
))
5385 /* Write into OB IPA-CP transfromation summary TS describing NODE. */
5388 write_ipcp_transformation_info (output_block
*ob
, cgraph_node
*node
,
5389 ipcp_transformation
*ts
)
5391 lto_symtab_encoder_t encoder
= ob
->decl_state
->symtab_node_encoder
;
5392 int node_ref
= lto_symtab_encoder_encode (encoder
, node
);
5393 streamer_write_uhwi (ob
, node_ref
);
5395 streamer_write_uhwi (ob
, vec_safe_length (ts
->m_agg_values
));
5396 for (const ipa_argagg_value
&av
: ts
->m_agg_values
)
5398 struct bitpack_d bp
;
5400 stream_write_tree (ob
, av
.value
, true);
5401 streamer_write_uhwi (ob
, av
.unit_offset
);
5402 streamer_write_uhwi (ob
, av
.index
);
5404 bp
= bitpack_create (ob
->main_stream
);
5405 bp_pack_value (&bp
, av
.by_ref
, 1);
5406 bp_pack_value (&bp
, av
.killed
, 1);
5407 streamer_write_bitpack (&bp
);
5410 streamer_write_uhwi (ob
, vec_safe_length (ts
->m_vr
));
5411 for (const ipa_vr
&parm_vr
: ts
->m_vr
)
5412 parm_vr
.streamer_write (ob
);
5415 /* Stream in the aggregate value replacement chain for NODE from IB. */
5418 read_ipcp_transformation_info (lto_input_block
*ib
, cgraph_node
*node
,
5421 unsigned int count
, i
;
5422 ipcp_transformation_initialize ();
5423 ipcp_transformation
*ts
= ipcp_transformation_sum
->get_create (node
);
5425 count
= streamer_read_uhwi (ib
);
5428 vec_safe_grow_cleared (ts
->m_agg_values
, count
, true);
5429 for (i
= 0; i
<count
; i
++)
5431 ipa_argagg_value
*av
= &(*ts
->m_agg_values
)[i
];;
5433 av
->value
= stream_read_tree (ib
, data_in
);
5434 av
->unit_offset
= streamer_read_uhwi (ib
);
5435 av
->index
= streamer_read_uhwi (ib
);
5437 bitpack_d bp
= streamer_read_bitpack (ib
);
5438 av
->by_ref
= bp_unpack_value (&bp
, 1);
5439 av
->killed
= bp_unpack_value (&bp
, 1);
5443 count
= streamer_read_uhwi (ib
);
5446 vec_safe_grow_cleared (ts
->m_vr
, count
, true);
5447 for (i
= 0; i
< count
; i
++)
5450 parm_vr
= &(*ts
->m_vr
)[i
];
5451 parm_vr
->streamer_read (ib
, data_in
);
5456 /* Write all aggregate replacement for nodes in set. */
5459 ipcp_write_transformation_summaries (void)
5461 struct output_block
*ob
;
5462 unsigned int count
= 0;
5463 lto_symtab_encoder_t encoder
;
5465 ob
= create_output_block (LTO_section_ipcp_transform
);
5466 encoder
= ob
->decl_state
->symtab_node_encoder
;
5469 for (int i
= 0; i
< lto_symtab_encoder_size (encoder
); i
++)
5471 symtab_node
*snode
= lto_symtab_encoder_deref (encoder
, i
);
5472 cgraph_node
*cnode
= dyn_cast
<cgraph_node
*> (snode
);
5475 ipcp_transformation
*ts
= ipcp_get_transformation_summary (cnode
);
5476 if (useful_ipcp_transformation_info_p (ts
)
5477 && lto_symtab_encoder_encode_body_p (encoder
, cnode
))
5481 streamer_write_uhwi (ob
, count
);
5483 for (int i
= 0; i
< lto_symtab_encoder_size (encoder
); i
++)
5485 symtab_node
*snode
= lto_symtab_encoder_deref (encoder
, i
);
5486 cgraph_node
*cnode
= dyn_cast
<cgraph_node
*> (snode
);
5489 ipcp_transformation
*ts
= ipcp_get_transformation_summary (cnode
);
5490 if (useful_ipcp_transformation_info_p (ts
)
5491 && lto_symtab_encoder_encode_body_p (encoder
, cnode
))
5492 write_ipcp_transformation_info (ob
, cnode
, ts
);
5494 streamer_write_char_stream (ob
->main_stream
, 0);
5495 produce_asm (ob
, NULL
);
5496 destroy_output_block (ob
);
5499 /* Read replacements section in file FILE_DATA of length LEN with data
5503 read_replacements_section (struct lto_file_decl_data
*file_data
,
5507 const struct lto_function_header
*header
=
5508 (const struct lto_function_header
*) data
;
5509 const int cfg_offset
= sizeof (struct lto_function_header
);
5510 const int main_offset
= cfg_offset
+ header
->cfg_size
;
5511 const int string_offset
= main_offset
+ header
->main_size
;
5512 class data_in
*data_in
;
5516 lto_input_block
ib_main ((const char *) data
+ main_offset
,
5517 header
->main_size
, file_data
);
5519 data_in
= lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
5520 header
->string_size
, vNULL
);
5521 count
= streamer_read_uhwi (&ib_main
);
5523 for (i
= 0; i
< count
; i
++)
5526 struct cgraph_node
*node
;
5527 lto_symtab_encoder_t encoder
;
5529 index
= streamer_read_uhwi (&ib_main
);
5530 encoder
= file_data
->symtab_node_encoder
;
5531 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
5533 read_ipcp_transformation_info (&ib_main
, node
, data_in
);
5535 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
5537 lto_data_in_delete (data_in
);
5540 /* Read IPA-CP aggregate replacements. */
5543 ipcp_read_transformation_summaries (void)
5545 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
5546 struct lto_file_decl_data
*file_data
;
5549 while ((file_data
= file_data_vec
[j
++]))
5553 = lto_get_summary_section_data (file_data
, LTO_section_ipcp_transform
,
5556 read_replacements_section (file_data
, data
, len
);
5560 /* Adjust the aggregate replacements in TS to reflect any parameter removals
5561 which might have already taken place. If after adjustments there are no
5562 aggregate replacements left, the m_agg_values will be set to NULL. In other
5563 cases, it may be shrunk. */
5566 adjust_agg_replacement_values (cgraph_node
*node
, ipcp_transformation
*ts
)
5568 clone_info
*cinfo
= clone_info::get (node
);
5569 if (!cinfo
|| !cinfo
->param_adjustments
)
5572 auto_vec
<int, 16> new_indices
;
5573 cinfo
->param_adjustments
->get_updated_indices (&new_indices
);
5574 bool removed_item
= false;
5575 unsigned dst_index
= 0;
5576 unsigned count
= ts
->m_agg_values
->length ();
5577 for (unsigned i
= 0; i
< count
; i
++)
5579 ipa_argagg_value
*v
= &(*ts
->m_agg_values
)[i
];
5580 gcc_checking_assert (v
->index
>= 0);
5583 if ((unsigned) v
->index
< new_indices
.length ())
5584 new_idx
= new_indices
[v
->index
];
5590 (*ts
->m_agg_values
)[dst_index
] = *v
;
5594 removed_item
= true;
5599 ggc_free (ts
->m_agg_values
);
5600 ts
->m_agg_values
= NULL
;
5602 else if (removed_item
)
5603 ts
->m_agg_values
->truncate (dst_index
);
5608 /* Dominator walker driving the ipcp modification phase. */
5610 class ipcp_modif_dom_walker
: public dom_walker
5613 ipcp_modif_dom_walker (struct ipa_func_body_info
*fbi
,
5614 vec
<ipa_param_descriptor
, va_gc
> *descs
,
5615 ipcp_transformation
*ts
, bool *sc
)
5616 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
), m_descriptors (descs
),
5617 m_ts (ts
), m_something_changed (sc
) {}
5619 edge
before_dom_children (basic_block
) final override
;
5621 { return gimple_purge_all_dead_eh_edges (m_need_eh_cleanup
); }
5624 struct ipa_func_body_info
*m_fbi
;
5625 vec
<ipa_param_descriptor
, va_gc
> *m_descriptors
;
5626 ipcp_transformation
*m_ts
;
5627 bool *m_something_changed
;
5628 auto_bitmap m_need_eh_cleanup
;
5632 ipcp_modif_dom_walker::before_dom_children (basic_block bb
)
5634 gimple_stmt_iterator gsi
;
5635 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5637 gimple
*stmt
= gsi_stmt (gsi
);
5639 HOST_WIDE_INT bit_offset
;
5644 if (!gimple_assign_load_p (stmt
))
5646 rhs
= gimple_assign_rhs1 (stmt
);
5647 if (!is_gimple_reg_type (TREE_TYPE (rhs
)))
5652 while (handled_component_p (t
))
5654 /* V_C_E can do things like convert an array of integers to one
5655 bigger integer and similar things we do not handle below. */
5656 if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
5661 t
= TREE_OPERAND (t
, 0);
5666 if (!ipa_load_from_parm_agg (m_fbi
, m_descriptors
, stmt
, rhs
, &index
,
5667 &bit_offset
, &size
, &by_ref
))
5669 unsigned unit_offset
= bit_offset
/ BITS_PER_UNIT
;
5670 ipa_argagg_value_list
avl (m_ts
);
5671 tree v
= avl
.get_value (index
, unit_offset
, by_ref
);
5674 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v
))), size
))
5677 gcc_checking_assert (is_gimple_ip_invariant (v
));
5678 if (!useless_type_conversion_p (TREE_TYPE (rhs
), TREE_TYPE (v
)))
5680 if (fold_convertible_p (TREE_TYPE (rhs
), v
))
5681 val
= fold_build1 (NOP_EXPR
, TREE_TYPE (rhs
), v
);
5682 else if (TYPE_SIZE (TREE_TYPE (rhs
))
5683 == TYPE_SIZE (TREE_TYPE (v
)))
5684 val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (rhs
), v
);
5689 fprintf (dump_file
, " const ");
5690 print_generic_expr (dump_file
, v
);
5691 fprintf (dump_file
, " can't be converted to type of ");
5692 print_generic_expr (dump_file
, rhs
);
5693 fprintf (dump_file
, "\n");
5701 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5703 fprintf (dump_file
, "Modifying stmt:\n ");
5704 print_gimple_stmt (dump_file
, stmt
, 0);
5706 gimple_assign_set_rhs_from_tree (&gsi
, val
);
5709 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5711 fprintf (dump_file
, "into:\n ");
5712 print_gimple_stmt (dump_file
, stmt
, 0);
5713 fprintf (dump_file
, "\n");
5716 *m_something_changed
= true;
5717 if (maybe_clean_eh_stmt (stmt
))
5718 bitmap_set_bit (m_need_eh_cleanup
, bb
->index
);
5723 /* If IPA-CP discovered a constant in parameter PARM at OFFSET of a given SIZE
5724 - whether passed by reference or not is given by BY_REF - return that
5725 constant. Otherwise return NULL_TREE. The is supposed to be used only
5726 after clone materialization and transformation is done (because it asserts
5727 that killed constants have been pruned). */
5730 ipcp_get_aggregate_const (struct function
*func
, tree parm
, bool by_ref
,
5731 HOST_WIDE_INT bit_offset
, HOST_WIDE_INT bit_size
)
5733 cgraph_node
*node
= cgraph_node::get (func
->decl
);
5734 ipcp_transformation
*ts
= ipcp_get_transformation_summary (node
);
5736 if (!ts
|| !ts
->m_agg_values
)
5739 int index
= ts
->get_param_index (func
->decl
, parm
);
5743 ipa_argagg_value_list
avl (ts
);
5744 unsigned unit_offset
= bit_offset
/ BITS_PER_UNIT
;
5745 const ipa_argagg_value
*av
= avl
.get_elt (index
, unit_offset
);
5746 if (!av
|| av
->by_ref
!= by_ref
)
5748 gcc_assert (!av
->killed
);
5751 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v
))), bit_size
))
5757 /* Return true if we have recorded VALUE and MASK about PARM.
5758 Set VALUE and MASk accordingly. */
5761 ipcp_get_parm_bits (tree parm
, tree
*value
, widest_int
*mask
)
5763 cgraph_node
*cnode
= cgraph_node::get (current_function_decl
);
5764 ipcp_transformation
*ts
= ipcp_get_transformation_summary (cnode
);
5766 || vec_safe_length (ts
->m_vr
) == 0
5767 || !irange::supports_p (TREE_TYPE (parm
)))
5770 int i
= ts
->get_param_index (current_function_decl
, parm
);
5773 clone_info
*cinfo
= clone_info::get (cnode
);
5774 if (cinfo
&& cinfo
->param_adjustments
)
5776 i
= cinfo
->param_adjustments
->get_original_index (i
);
5781 vec
<ipa_vr
, va_gc
> &vr
= *ts
->m_vr
;
5782 if (!vr
[i
].known_p ())
5785 vr
[i
].get_vrange (tmp
);
5786 if (tmp
.undefined_p () || tmp
.varying_p ())
5788 irange
&r
= as_a
<irange
> (tmp
);
5789 irange_bitmask bm
= r
.get_bitmask ();
5790 *mask
= widest_int::from (bm
.mask (), TYPE_SIGN (TREE_TYPE (parm
)));
5791 *value
= wide_int_to_tree (TREE_TYPE (parm
), bm
.value ());
5795 /* Update value range of formal parameters of NODE as described in TS. */
5798 ipcp_update_vr (struct cgraph_node
*node
, ipcp_transformation
*ts
)
5800 if (vec_safe_is_empty (ts
->m_vr
))
5802 const vec
<ipa_vr
, va_gc
> &vr
= *ts
->m_vr
;
5803 unsigned count
= vr
.length ();
5807 auto_vec
<int, 16> new_indices
;
5808 bool need_remapping
= false;
5809 clone_info
*cinfo
= clone_info::get (node
);
5810 if (cinfo
&& cinfo
->param_adjustments
)
5812 cinfo
->param_adjustments
->get_updated_indices (&new_indices
);
5813 need_remapping
= true;
5815 auto_vec
<tree
, 16> parm_decls
;
5816 push_function_arg_decls (&parm_decls
, node
->decl
);
5818 for (unsigned i
= 0; i
< count
; ++i
)
5824 if (i
>= new_indices
.length ())
5826 remapped_idx
= new_indices
[i
];
5827 if (remapped_idx
< 0)
5833 parm
= parm_decls
[remapped_idx
];
5835 gcc_checking_assert (parm
);
5836 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
), parm
);
5838 if (!ddef
|| !is_gimple_reg (parm
))
5841 if (vr
[i
].known_p ())
5844 vr
[i
].get_vrange (tmp
);
5846 if (!tmp
.undefined_p () && !tmp
.varying_p ())
5850 fprintf (dump_file
, "Setting value range of param %u "
5851 "(now %i) ", i
, remapped_idx
);
5852 tmp
.dump (dump_file
);
5853 fprintf (dump_file
, "]\n");
5855 set_range_info (ddef
, tmp
);
5857 if (POINTER_TYPE_P (TREE_TYPE (parm
))
5858 && opt_for_fn (node
->decl
, flag_ipa_bit_cp
))
5860 irange
&r
= as_a
<irange
> (tmp
);
5861 irange_bitmask bm
= r
.get_bitmask ();
5862 unsigned tem
= bm
.mask ().to_uhwi ();
5863 unsigned HOST_WIDE_INT bitpos
= bm
.value ().to_uhwi ();
5864 unsigned align
= tem
& -tem
;
5865 unsigned misalign
= bitpos
& (align
- 1);
5872 "Adjusting mask for param %u to ", i
);
5873 print_hex (bm
.mask (), dump_file
);
5874 fprintf (dump_file
, "\n");
5879 "Adjusting align: %u, misalign: %u\n",
5882 unsigned old_align
, old_misalign
;
5883 struct ptr_info_def
*pi
= get_ptr_info (ddef
);
5884 bool old_known
= get_ptr_info_alignment (pi
, &old_align
,
5887 if (old_known
&& old_align
> align
)
5892 "But alignment was already %u.\n",
5894 if ((old_misalign
& (align
- 1)) != misalign
)
5896 "old_misalign (%u) and misalign "
5898 old_misalign
, misalign
);
5905 && ((misalign
& (old_align
- 1)) != old_misalign
))
5907 "old_misalign (%u) and misalign (%u) "
5909 old_misalign
, misalign
);
5911 set_ptr_info_alignment (pi
, align
, misalign
);
5914 else if (dump_file
&& INTEGRAL_TYPE_P (TREE_TYPE (parm
)))
5916 irange
&r
= as_a
<irange
> (tmp
);
5917 irange_bitmask bm
= r
.get_bitmask ();
5918 unsigned prec
= TYPE_PRECISION (TREE_TYPE (parm
));
5919 if (wi::ne_p (bm
.mask (), wi::shwi (-1, prec
)))
5922 "Adjusting mask for param %u to ", i
);
5923 print_hex (bm
.mask (), dump_file
);
5924 fprintf (dump_file
, "\n");
5932 /* IPCP transformation phase doing propagation of aggregate values. */
5935 ipcp_transform_function (struct cgraph_node
*node
)
5937 struct ipa_func_body_info fbi
;
5940 gcc_checking_assert (cfun
);
5941 gcc_checking_assert (current_function_decl
);
5944 fprintf (dump_file
, "Modification phase of node %s\n",
5945 node
->dump_name ());
5947 ipcp_transformation
*ts
= ipcp_get_transformation_summary (node
);
5949 || (vec_safe_is_empty (ts
->m_agg_values
)
5950 && vec_safe_is_empty (ts
->m_vr
)))
5953 ts
->maybe_create_parm_idx_map (cfun
->decl
);
5954 ipcp_update_vr (node
, ts
);
5955 if (vec_safe_is_empty (ts
->m_agg_values
))
5957 param_count
= count_formal_params (node
->decl
);
5958 if (param_count
== 0)
5961 adjust_agg_replacement_values (node
, ts
);
5962 if (vec_safe_is_empty (ts
->m_agg_values
))
5965 fprintf (dump_file
, " All affected aggregate parameters were either "
5966 "removed or converted into scalars, phase done.\n");
5971 fprintf (dump_file
, " Aggregate replacements:");
5972 ipa_argagg_value_list
avs (ts
);
5973 avs
.dump (dump_file
);
5978 fbi
.bb_infos
= vNULL
;
5979 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
), true);
5980 fbi
.param_count
= param_count
;
5981 fbi
.aa_walk_budget
= opt_for_fn (node
->decl
, param_ipa_max_aa_steps
);
5983 vec
<ipa_param_descriptor
, va_gc
> *descriptors
= NULL
;
5984 vec_safe_grow_cleared (descriptors
, param_count
, true);
5985 ipa_populate_param_decls (node
, *descriptors
);
5986 bool modified_mem_access
= false;
5987 calculate_dominance_info (CDI_DOMINATORS
);
5988 ipcp_modif_dom_walker
walker (&fbi
, descriptors
, ts
, &modified_mem_access
);
5989 walker
.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5990 free_dominance_info (CDI_DOMINATORS
);
5991 bool cfg_changed
= walker
.cleanup_eh ();
5994 struct ipa_bb_info
*bi
;
5995 FOR_EACH_VEC_ELT (fbi
.bb_infos
, i
, bi
)
5996 free_ipa_bb_info (bi
);
5997 fbi
.bb_infos
.release ();
5999 ts
->remove_argaggs_if ([](const ipa_argagg_value
&v
)
6004 vec_free (descriptors
);
6006 delete_unreachable_blocks_update_callgraph (node
, false);
6008 return modified_mem_access
? TODO_update_ssa_only_virtuals
: 0;
6011 /* Record that current function return value range is VAL. */
6014 ipa_record_return_value_range (Value_Range val
)
6016 cgraph_node
*n
= cgraph_node::get (current_function_decl
);
6017 if (!ipa_return_value_sum
)
6019 if (!ipa_vr_hash_table
)
6020 ipa_vr_hash_table
= hash_table
<ipa_vr_ggc_hash_traits
>::create_ggc (37);
6021 ipa_return_value_sum
= new (ggc_alloc_no_dtor
<ipa_return_value_sum_t
> ())
6022 ipa_return_value_sum_t (symtab
, true);
6023 ipa_return_value_sum
->disable_insertion_hook ();
6025 ipa_return_value_sum
->get_create (n
)->vr
= ipa_get_value_range (val
);
6026 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6028 fprintf (dump_file
, "Recording return range ");
6029 val
.dump (dump_file
);
6030 fprintf (dump_file
, "\n");
6034 /* Return true if value range of DECL is known and if so initialize RANGE. */
6037 ipa_return_value_range (Value_Range
&range
, tree decl
)
6039 cgraph_node
*n
= cgraph_node::get (decl
);
6040 if (!n
|| !ipa_return_value_sum
)
6042 enum availability avail
;
6043 n
= n
->ultimate_alias_target (&avail
);
6044 if (avail
< AVAIL_AVAILABLE
)
6046 if (n
->decl
!= decl
&& !useless_type_conversion_p (TREE_TYPE (decl
), TREE_TYPE (n
->decl
)))
6048 ipa_return_value_summary
*v
= ipa_return_value_sum
->get (n
);
6051 v
->vr
->get_vrange (range
);
6055 /* Reset all state within ipa-prop.cc so that we can rerun the compiler
6056 within the same process. For use by toplev::finalize. */
6059 ipa_prop_cc_finalize (void)
6061 if (function_insertion_hook_holder
)
6062 symtab
->remove_cgraph_insertion_hook (function_insertion_hook_holder
);
6063 function_insertion_hook_holder
= NULL
;
6065 if (ipa_edge_args_sum
)
6066 ggc_delete (ipa_edge_args_sum
);
6067 ipa_edge_args_sum
= NULL
;
6069 if (ipa_node_params_sum
)
6070 ggc_delete (ipa_node_params_sum
);
6071 ipa_node_params_sum
= NULL
;
6074 /* Return true if the two pass_through components of two jump functions are
6075 known to be equivalent. AGG_JF denotes whether they are part of aggregate
6076 functions or not. The function can be used before the IPA phase of IPA-CP
6077 or inlining because it cannot cope with refdesc changes these passes can
6081 ipa_agg_pass_through_jf_equivalent_p (ipa_pass_through_data
*ipt1
,
6082 ipa_pass_through_data
*ipt2
,
6086 gcc_assert (agg_jf
||
6087 (!ipt1
->refdesc_decremented
&& !ipt2
->refdesc_decremented
));
6088 if (ipt1
->operation
!= ipt2
->operation
6089 || ipt1
->formal_id
!= ipt2
->formal_id
6090 || (!agg_jf
&& (ipt1
->agg_preserved
!= ipt2
->agg_preserved
)))
6092 if (((ipt1
->operand
!= NULL_TREE
) != (ipt2
->operand
!= NULL_TREE
))
6094 && !values_equal_for_ipcp_p (ipt1
->operand
, ipt2
->operand
)))
6099 /* Return true if the two aggregate jump functions are known to be equivalent.
6100 The function can be used before the IPA phase of IPA-CP or inlining because
6101 it cannot cope with refdesc changes these passes can carry out. */
6104 ipa_agg_jump_functions_equivalent_p (ipa_agg_jf_item
*ajf1
,
6105 ipa_agg_jf_item
*ajf2
)
6107 if (ajf1
->offset
!= ajf2
->offset
6108 || ajf1
->jftype
!= ajf2
->jftype
6109 || !types_compatible_p (ajf1
->type
, ajf2
->type
))
6112 switch (ajf1
->jftype
)
6115 if (!values_equal_for_ipcp_p (ajf1
->value
.constant
,
6116 ajf2
->value
.constant
))
6119 case IPA_JF_PASS_THROUGH
:
6121 ipa_pass_through_data
*ipt1
= &ajf1
->value
.pass_through
;
6122 ipa_pass_through_data
*ipt2
= &ajf2
->value
.pass_through
;
6123 if (!ipa_agg_pass_through_jf_equivalent_p (ipt1
, ipt2
, true))
6127 case IPA_JF_LOAD_AGG
:
6129 ipa_load_agg_data
*ila1
= &ajf1
->value
.load_agg
;
6130 ipa_load_agg_data
*ila2
= &ajf2
->value
.load_agg
;
6131 if (!ipa_agg_pass_through_jf_equivalent_p (&ila1
->pass_through
,
6132 &ila2
->pass_through
, true))
6134 if (ila1
->offset
!= ila2
->offset
6135 || ila1
->by_ref
!= ila2
->by_ref
6136 || !types_compatible_p (ila1
->type
, ila2
->type
))
6146 /* Return true if the two jump functions are known to be equivalent. The
6147 function can be used before the IPA phase of IPA-CP or inlining because it
6148 cannot cope with refdesc changes these passes can carry out. */
6151 ipa_jump_functions_equivalent_p (ipa_jump_func
*jf1
, ipa_jump_func
*jf2
)
6153 if (jf1
->type
!= jf2
->type
)
6158 case IPA_JF_UNKNOWN
:
6162 tree cst1
= ipa_get_jf_constant (jf1
);
6163 tree cst2
= ipa_get_jf_constant (jf2
);
6164 if (!values_equal_for_ipcp_p (cst1
, cst2
))
6167 ipa_cst_ref_desc
*rd1
= jfunc_rdesc_usable (jf1
);
6168 ipa_cst_ref_desc
*rd2
= jfunc_rdesc_usable (jf2
);
6171 gcc_assert (rd1
->refcount
== 1
6172 && rd2
->refcount
== 1);
6173 gcc_assert (!rd1
->next_duplicate
&& !rd2
->next_duplicate
);
6181 case IPA_JF_PASS_THROUGH
:
6183 ipa_pass_through_data
*ipt1
= &jf1
->value
.pass_through
;
6184 ipa_pass_through_data
*ipt2
= &jf2
->value
.pass_through
;
6185 if (!ipa_agg_pass_through_jf_equivalent_p (ipt1
, ipt2
, false))
6189 case IPA_JF_ANCESTOR
:
6191 ipa_ancestor_jf_data
*ia1
= &jf1
->value
.ancestor
;
6192 ipa_ancestor_jf_data
*ia2
= &jf2
->value
.ancestor
;
6194 if (ia1
->formal_id
!= ia2
->formal_id
6195 || ia1
->agg_preserved
!= ia2
->agg_preserved
6196 || ia1
->keep_null
!= ia2
->keep_null
6197 || ia1
->offset
!= ia2
->offset
)
6205 if (((jf1
->m_vr
!= nullptr) != (jf2
->m_vr
!= nullptr))
6206 || (jf1
->m_vr
&& !jf1
->m_vr
->equal_p (*jf2
->m_vr
)))
6209 unsigned alen
= vec_safe_length (jf1
->agg
.items
);
6210 if (vec_safe_length (jf2
->agg
.items
) != alen
)
6216 if (jf1
->agg
.by_ref
!= jf2
->agg
.by_ref
)
6219 for (unsigned i
= 0 ; i
< alen
; i
++)
6220 if (!ipa_agg_jump_functions_equivalent_p (&(*jf1
->agg
.items
)[i
],
6221 &(*jf2
->agg
.items
)[i
]))
6227 #include "gt-ipa-prop.h"