1 /* Interprocedural analyses.
2 Copyright (C) 2005-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
30 #include "tree-streamer.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-iterator.h"
35 #include "gimple-fold.h"
38 #include "stor-layout.h"
39 #include "print-tree.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
50 #include "ipa-utils.h"
54 #include "tree-cfgcleanup.h"
56 #include "symtab-clones.h"
57 #include "attr-fnspec.h"
58 #include "gimple-range.h"
59 #include "value-range-storage.h"
61 /* Function summary where the parameter infos are actually stored. */
62 ipa_node_params_t
*ipa_node_params_sum
= NULL
;
64 function_summary
<ipcp_transformation
*> *ipcp_transformation_sum
= NULL
;
66 /* Edge summary for IPA-CP edge information. */
67 ipa_edge_args_sum_t
*ipa_edge_args_sum
;
69 /* Traits for a hash table for reusing already existing ipa_bits. */
71 struct ipa_bit_ggc_hash_traits
: public ggc_cache_remove
<ipa_bits
*>
73 typedef ipa_bits
*value_type
;
74 typedef ipa_bits
*compare_type
;
76 hash (const ipa_bits
*p
)
78 hashval_t t
= (hashval_t
) p
->value
.to_shwi ();
79 return iterative_hash_host_wide_int (p
->mask
.to_shwi (), t
);
82 equal (const ipa_bits
*a
, const ipa_bits
*b
)
84 return a
->value
== b
->value
&& a
->mask
== b
->mask
;
86 static const bool empty_zero_p
= true;
88 mark_empty (ipa_bits
*&p
)
93 is_empty (const ipa_bits
*p
)
98 is_deleted (const ipa_bits
*p
)
100 return p
== reinterpret_cast<const ipa_bits
*> (1);
103 mark_deleted (ipa_bits
*&p
)
105 p
= reinterpret_cast<ipa_bits
*> (1);
109 /* Hash table for avoid repeated allocations of equal ipa_bits. */
110 static GTY ((cache
)) hash_table
<ipa_bit_ggc_hash_traits
> *ipa_bits_hash_table
;
112 /* Traits for a hash table for reusing ranges. */
114 struct ipa_vr_ggc_hash_traits
: public ggc_cache_remove
<ipa_vr
*>
116 typedef ipa_vr
*value_type
;
117 typedef const vrange
*compare_type
;
119 hash (const ipa_vr
*p
)
121 // This never get called, except in the verification code, as
122 // ipa_get_value_range() calculates the hash itself. This
123 // function is mostly here for completness' sake.
126 inchash::hash hstate
;
127 add_vrange (vr
, hstate
);
128 return hstate
.end ();
131 equal (const ipa_vr
*a
, const vrange
*b
)
133 return a
->equal_p (*b
);
135 static const bool empty_zero_p
= true;
137 mark_empty (ipa_vr
*&p
)
142 is_empty (const ipa_vr
*p
)
147 is_deleted (const ipa_vr
*p
)
149 return p
== reinterpret_cast<const ipa_vr
*> (1);
152 mark_deleted (ipa_vr
*&p
)
154 p
= reinterpret_cast<ipa_vr
*> (1);
158 /* Hash table for avoid repeated allocations of equal ranges. */
159 static GTY ((cache
)) hash_table
<ipa_vr_ggc_hash_traits
> *ipa_vr_hash_table
;
161 /* Holders of ipa cgraph hooks: */
162 static struct cgraph_node_hook_list
*function_insertion_hook_holder
;
164 /* Description of a reference to an IPA constant. */
165 struct ipa_cst_ref_desc
167 /* Edge that corresponds to the statement which took the reference. */
168 struct cgraph_edge
*cs
;
169 /* Linked list of duplicates created when call graph edges are cloned. */
170 struct ipa_cst_ref_desc
*next_duplicate
;
171 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
172 if out of control. */
176 /* Allocation pool for reference descriptions. */
178 static object_allocator
<ipa_cst_ref_desc
> ipa_refdesc_pool
179 ("IPA-PROP ref descriptions");
187 ipa_vr::ipa_vr (const vrange
&r
)
188 : m_storage (ggc_alloc_vrange_storage (r
)),
194 ipa_vr::equal_p (const vrange
&r
) const
196 gcc_checking_assert (!r
.undefined_p ());
197 return (types_compatible_p (m_type
, r
.type ()) && m_storage
->equal_p (r
));
201 ipa_vr::get_vrange (Value_Range
&r
) const
204 m_storage
->get_vrange (r
, m_type
);
208 ipa_vr::set_unknown ()
211 ggc_free (m_storage
);
217 ipa_vr::streamer_read (lto_input_block
*ib
, data_in
*data_in
)
219 struct bitpack_d bp
= streamer_read_bitpack (ib
);
220 bool known
= bp_unpack_value (&bp
, 1);
224 streamer_read_value_range (ib
, data_in
, vr
);
225 if (!m_storage
|| !m_storage
->fits_p (vr
))
228 ggc_free (m_storage
);
229 m_storage
= ggc_alloc_vrange_storage (vr
);
231 m_storage
->set_vrange (vr
);
242 ipa_vr::streamer_write (output_block
*ob
) const
244 struct bitpack_d bp
= bitpack_create (ob
->main_stream
);
245 bp_pack_value (&bp
, !!m_storage
, 1);
246 streamer_write_bitpack (&bp
);
249 Value_Range
vr (m_type
);
250 m_storage
->get_vrange (vr
, m_type
);
251 streamer_write_vrange (ob
, vr
);
256 ipa_vr::dump (FILE *out
) const
260 Value_Range
vr (m_type
);
261 m_storage
->get_vrange (vr
, m_type
);
265 fprintf (out
, "NO RANGE");
268 // These stubs are because we use an ipa_vr in a hash_traits and
269 // hash-traits.h defines an extern of gt_ggc_mx (T &) instead of
270 // picking up the gt_ggc_mx (T *) version.
272 gt_pch_nx (ipa_vr
*&x
)
274 return gt_pch_nx ((ipa_vr
*) x
);
278 gt_ggc_mx (ipa_vr
*&x
)
280 return gt_ggc_mx ((ipa_vr
*) x
);
284 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
285 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
288 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node
*node
)
290 tree fs_opts
= DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node
->decl
);
294 return !opt_for_fn (node
->decl
, optimize
) || !opt_for_fn (node
->decl
, flag_ipa_cp
);
297 /* Return index of the formal whose tree is PTREE in function which corresponds
301 ipa_get_param_decl_index_1 (vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
306 count
= vec_safe_length (descriptors
);
307 for (i
= 0; i
< count
; i
++)
308 if ((*descriptors
)[i
].decl_or_type
== ptree
)
314 /* Return index of the formal whose tree is PTREE in function which corresponds
318 ipa_get_param_decl_index (class ipa_node_params
*info
, tree ptree
)
320 return ipa_get_param_decl_index_1 (info
->descriptors
, ptree
);
323 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
327 ipa_populate_param_decls (struct cgraph_node
*node
,
328 vec
<ipa_param_descriptor
, va_gc
> &descriptors
)
336 gcc_assert (gimple_has_body_p (fndecl
));
337 fnargs
= DECL_ARGUMENTS (fndecl
);
339 for (parm
= fnargs
; parm
; parm
= DECL_CHAIN (parm
))
341 descriptors
[param_num
].decl_or_type
= parm
;
342 unsigned int cost
= estimate_move_cost (TREE_TYPE (parm
), true);
343 descriptors
[param_num
].move_cost
= cost
;
344 /* Watch overflow, move_cost is a bitfield. */
345 gcc_checking_assert (cost
== descriptors
[param_num
].move_cost
);
350 /* Return how many formal parameters FNDECL has. */
353 count_formal_params (tree fndecl
)
357 gcc_assert (gimple_has_body_p (fndecl
));
359 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
365 /* Return the declaration of Ith formal parameter of the function corresponding
366 to INFO. Note there is no setter function as this array is built just once
367 using ipa_initialize_node_params. */
370 ipa_dump_param (FILE *file
, class ipa_node_params
*info
, int i
)
372 fprintf (file
, "param #%i", i
);
373 if ((*info
->descriptors
)[i
].decl_or_type
)
376 print_generic_expr (file
, (*info
->descriptors
)[i
].decl_or_type
);
380 /* If necessary, allocate vector of parameter descriptors in info of NODE.
381 Return true if they were allocated, false if not. */
384 ipa_alloc_node_params (struct cgraph_node
*node
, int param_count
)
386 ipa_node_params
*info
= ipa_node_params_sum
->get_create (node
);
388 if (!info
->descriptors
&& param_count
)
390 vec_safe_grow_cleared (info
->descriptors
, param_count
, true);
397 /* Initialize the ipa_node_params structure associated with NODE by counting
398 the function parameters, creating the descriptors and populating their
402 ipa_initialize_node_params (struct cgraph_node
*node
)
404 ipa_node_params
*info
= ipa_node_params_sum
->get_create (node
);
406 if (!info
->descriptors
407 && ipa_alloc_node_params (node
, count_formal_params (node
->decl
)))
408 ipa_populate_param_decls (node
, *info
->descriptors
);
411 /* Print the jump functions associated with call graph edge CS to file F. */
414 ipa_print_node_jump_functions_for_edge (FILE *f
, struct cgraph_edge
*cs
)
416 ipa_edge_args
*args
= ipa_edge_args_sum
->get (cs
);
417 int count
= ipa_get_cs_argument_count (args
);
419 for (int i
= 0; i
< count
; i
++)
421 struct ipa_jump_func
*jump_func
;
422 enum jump_func_type type
;
424 jump_func
= ipa_get_ith_jump_func (args
, i
);
425 type
= jump_func
->type
;
427 fprintf (f
, " param %d: ", i
);
428 if (type
== IPA_JF_UNKNOWN
)
429 fprintf (f
, "UNKNOWN\n");
430 else if (type
== IPA_JF_CONST
)
432 tree val
= jump_func
->value
.constant
.value
;
433 fprintf (f
, "CONST: ");
434 print_generic_expr (f
, val
);
435 if (TREE_CODE (val
) == ADDR_EXPR
436 && TREE_CODE (TREE_OPERAND (val
, 0)) == CONST_DECL
)
439 print_generic_expr (f
, DECL_INITIAL (TREE_OPERAND (val
, 0)));
443 else if (type
== IPA_JF_PASS_THROUGH
)
445 fprintf (f
, "PASS THROUGH: ");
446 fprintf (f
, "%d, op %s",
447 jump_func
->value
.pass_through
.formal_id
,
448 get_tree_code_name(jump_func
->value
.pass_through
.operation
));
449 if (jump_func
->value
.pass_through
.operation
!= NOP_EXPR
)
452 print_generic_expr (f
, jump_func
->value
.pass_through
.operand
);
454 if (jump_func
->value
.pass_through
.agg_preserved
)
455 fprintf (f
, ", agg_preserved");
456 if (jump_func
->value
.pass_through
.refdesc_decremented
)
457 fprintf (f
, ", refdesc_decremented");
460 else if (type
== IPA_JF_ANCESTOR
)
462 fprintf (f
, "ANCESTOR: ");
463 fprintf (f
, "%d, offset " HOST_WIDE_INT_PRINT_DEC
,
464 jump_func
->value
.ancestor
.formal_id
,
465 jump_func
->value
.ancestor
.offset
);
466 if (jump_func
->value
.ancestor
.agg_preserved
)
467 fprintf (f
, ", agg_preserved");
468 if (jump_func
->value
.ancestor
.keep_null
)
469 fprintf (f
, ", keep_null");
473 if (jump_func
->agg
.items
)
475 struct ipa_agg_jf_item
*item
;
478 fprintf (f
, " Aggregate passed by %s:\n",
479 jump_func
->agg
.by_ref
? "reference" : "value");
480 FOR_EACH_VEC_ELT (*jump_func
->agg
.items
, j
, item
)
482 fprintf (f
, " offset: " HOST_WIDE_INT_PRINT_DEC
", ",
484 fprintf (f
, "type: ");
485 print_generic_expr (f
, item
->type
);
487 if (item
->jftype
== IPA_JF_PASS_THROUGH
)
488 fprintf (f
, "PASS THROUGH: %d,",
489 item
->value
.pass_through
.formal_id
);
490 else if (item
->jftype
== IPA_JF_LOAD_AGG
)
492 fprintf (f
, "LOAD AGG: %d",
493 item
->value
.pass_through
.formal_id
);
494 fprintf (f
, " [offset: " HOST_WIDE_INT_PRINT_DEC
", by %s],",
495 item
->value
.load_agg
.offset
,
496 item
->value
.load_agg
.by_ref
? "reference"
500 if (item
->jftype
== IPA_JF_PASS_THROUGH
501 || item
->jftype
== IPA_JF_LOAD_AGG
)
503 fprintf (f
, " op %s",
504 get_tree_code_name (item
->value
.pass_through
.operation
));
505 if (item
->value
.pass_through
.operation
!= NOP_EXPR
)
508 print_generic_expr (f
, item
->value
.pass_through
.operand
);
511 else if (item
->jftype
== IPA_JF_CONST
)
513 fprintf (f
, "CONST: ");
514 print_generic_expr (f
, item
->value
.constant
);
516 else if (item
->jftype
== IPA_JF_UNKNOWN
)
517 fprintf (f
, "UNKNOWN: " HOST_WIDE_INT_PRINT_DEC
" bits",
518 tree_to_uhwi (TYPE_SIZE (item
->type
)));
523 class ipa_polymorphic_call_context
*ctx
524 = ipa_get_ith_polymorhic_call_context (args
, i
);
525 if (ctx
&& !ctx
->useless_p ())
527 fprintf (f
, " Context: ");
528 ctx
->dump (dump_file
);
533 fprintf (f
, " value: ");
534 print_hex (jump_func
->bits
->value
, f
);
535 fprintf (f
, ", mask: ");
536 print_hex (jump_func
->bits
->mask
, f
);
540 fprintf (f
, " Unknown bits\n");
544 jump_func
->m_vr
->dump (f
);
548 fprintf (f
, " Unknown VR\n");
553 /* Print the jump functions of all arguments on all call graph edges going from
557 ipa_print_node_jump_functions (FILE *f
, struct cgraph_node
*node
)
559 struct cgraph_edge
*cs
;
561 fprintf (f
, " Jump functions of caller %s:\n", node
->dump_name ());
562 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
565 fprintf (f
, " callsite %s -> %s : \n",
567 cs
->callee
->dump_name ());
568 if (!ipa_edge_args_info_available_for_edge_p (cs
))
569 fprintf (f
, " no arg info\n");
571 ipa_print_node_jump_functions_for_edge (f
, cs
);
574 for (cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
576 class cgraph_indirect_call_info
*ii
;
578 ii
= cs
->indirect_info
;
579 if (ii
->agg_contents
)
580 fprintf (f
, " indirect %s callsite, calling param %i, "
581 "offset " HOST_WIDE_INT_PRINT_DEC
", %s",
582 ii
->member_ptr
? "member ptr" : "aggregate",
583 ii
->param_index
, ii
->offset
,
584 ii
->by_ref
? "by reference" : "by_value");
586 fprintf (f
, " indirect %s callsite, calling param %i, "
587 "offset " HOST_WIDE_INT_PRINT_DEC
,
588 ii
->polymorphic
? "polymorphic" : "simple", ii
->param_index
,
593 fprintf (f
, ", for stmt ");
594 print_gimple_stmt (f
, cs
->call_stmt
, 0, TDF_SLIM
);
599 ii
->context
.dump (f
);
600 if (!ipa_edge_args_info_available_for_edge_p (cs
))
601 fprintf (f
, " no arg info\n");
603 ipa_print_node_jump_functions_for_edge (f
, cs
);
607 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
610 ipa_print_all_jump_functions (FILE *f
)
612 struct cgraph_node
*node
;
614 fprintf (f
, "\nJump functions:\n");
615 FOR_EACH_FUNCTION (node
)
617 ipa_print_node_jump_functions (f
, node
);
621 /* Set jfunc to be a know-really nothing jump function. */
624 ipa_set_jf_unknown (struct ipa_jump_func
*jfunc
)
626 jfunc
->type
= IPA_JF_UNKNOWN
;
629 /* Set JFUNC to be a copy of another jmp (to be used by jump function
630 combination code). The two functions will share their rdesc. */
633 ipa_set_jf_cst_copy (struct ipa_jump_func
*dst
,
634 struct ipa_jump_func
*src
)
637 gcc_checking_assert (src
->type
== IPA_JF_CONST
);
638 dst
->type
= IPA_JF_CONST
;
639 dst
->value
.constant
= src
->value
.constant
;
642 /* Set JFUNC to be a constant jmp function. */
645 ipa_set_jf_constant (struct ipa_jump_func
*jfunc
, tree constant
,
646 struct cgraph_edge
*cs
)
648 jfunc
->type
= IPA_JF_CONST
;
649 jfunc
->value
.constant
.value
= unshare_expr_without_location (constant
);
651 if (TREE_CODE (constant
) == ADDR_EXPR
652 && (TREE_CODE (TREE_OPERAND (constant
, 0)) == FUNCTION_DECL
653 || (VAR_P (TREE_OPERAND (constant
, 0))
654 && TREE_STATIC (TREE_OPERAND (constant
, 0)))))
656 struct ipa_cst_ref_desc
*rdesc
;
658 rdesc
= ipa_refdesc_pool
.allocate ();
660 rdesc
->next_duplicate
= NULL
;
662 jfunc
->value
.constant
.rdesc
= rdesc
;
665 jfunc
->value
.constant
.rdesc
= NULL
;
668 /* Set JFUNC to be a simple pass-through jump function. */
670 ipa_set_jf_simple_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
673 jfunc
->type
= IPA_JF_PASS_THROUGH
;
674 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
675 jfunc
->value
.pass_through
.formal_id
= formal_id
;
676 jfunc
->value
.pass_through
.operation
= NOP_EXPR
;
677 jfunc
->value
.pass_through
.agg_preserved
= agg_preserved
;
678 jfunc
->value
.pass_through
.refdesc_decremented
= false;
681 /* Set JFUNC to be an unary pass through jump function. */
684 ipa_set_jf_unary_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
685 enum tree_code operation
)
687 jfunc
->type
= IPA_JF_PASS_THROUGH
;
688 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
689 jfunc
->value
.pass_through
.formal_id
= formal_id
;
690 jfunc
->value
.pass_through
.operation
= operation
;
691 jfunc
->value
.pass_through
.agg_preserved
= false;
692 jfunc
->value
.pass_through
.refdesc_decremented
= false;
694 /* Set JFUNC to be an arithmetic pass through jump function. */
697 ipa_set_jf_arith_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
698 tree operand
, enum tree_code operation
)
700 jfunc
->type
= IPA_JF_PASS_THROUGH
;
701 jfunc
->value
.pass_through
.operand
= unshare_expr_without_location (operand
);
702 jfunc
->value
.pass_through
.formal_id
= formal_id
;
703 jfunc
->value
.pass_through
.operation
= operation
;
704 jfunc
->value
.pass_through
.agg_preserved
= false;
705 jfunc
->value
.pass_through
.refdesc_decremented
= false;
708 /* Set JFUNC to be an ancestor jump function. */
711 ipa_set_ancestor_jf (struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
,
712 int formal_id
, bool agg_preserved
, bool keep_null
)
714 jfunc
->type
= IPA_JF_ANCESTOR
;
715 jfunc
->value
.ancestor
.formal_id
= formal_id
;
716 jfunc
->value
.ancestor
.offset
= offset
;
717 jfunc
->value
.ancestor
.agg_preserved
= agg_preserved
;
718 jfunc
->value
.ancestor
.keep_null
= keep_null
;
721 /* Get IPA BB information about the given BB. FBI is the context of analyzis
722 of this function body. */
724 static struct ipa_bb_info
*
725 ipa_get_bb_info (struct ipa_func_body_info
*fbi
, basic_block bb
)
727 gcc_checking_assert (fbi
);
728 return &fbi
->bb_infos
[bb
->index
];
731 /* Structure to be passed in between detect_type_change and
732 check_stmt_for_type_change. */
734 struct prop_type_change_info
736 /* Offset into the object where there is the virtual method pointer we are
738 HOST_WIDE_INT offset
;
739 /* The declaration or SSA_NAME pointer of the base that we are checking for
742 /* Set to true if dynamic type change has been detected. */
743 bool type_maybe_changed
;
746 /* Return true if STMT can modify a virtual method table pointer.
748 This function makes special assumptions about both constructors and
749 destructors which are all the functions that are allowed to alter the VMT
750 pointers. It assumes that destructors begin with assignment into all VMT
751 pointers and that constructors essentially look in the following way:
753 1) The very first thing they do is that they call constructors of ancestor
754 sub-objects that have them.
756 2) Then VMT pointers of this and all its ancestors is set to new values
757 corresponding to the type corresponding to the constructor.
759 3) Only afterwards, other stuff such as constructor of member sub-objects
760 and the code written by the user is run. Only this may include calling
761 virtual functions, directly or indirectly.
763 There is no way to call a constructor of an ancestor sub-object in any
766 This means that we do not have to care whether constructors get the correct
767 type information because they will always change it (in fact, if we define
768 the type to be given by the VMT pointer, it is undefined).
770 The most important fact to derive from the above is that if, for some
771 statement in the section 3, we try to detect whether the dynamic type has
772 changed, we can safely ignore all calls as we examine the function body
773 backwards until we reach statements in section 2 because these calls cannot
774 be ancestor constructors or destructors (if the input is not bogus) and so
775 do not change the dynamic type (this holds true only for automatically
776 allocated objects but at the moment we devirtualize only these). We then
777 must detect that statements in section 2 change the dynamic type and can try
778 to derive the new type. That is enough and we can stop, we will never see
779 the calls into constructors of sub-objects in this code. Therefore we can
780 safely ignore all call statements that we traverse.
784 stmt_may_be_vtbl_ptr_store (gimple
*stmt
)
786 if (is_gimple_call (stmt
))
788 if (gimple_clobber_p (stmt
))
790 else if (is_gimple_assign (stmt
))
792 tree lhs
= gimple_assign_lhs (stmt
);
794 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs
)))
796 if (flag_strict_aliasing
797 && !POINTER_TYPE_P (TREE_TYPE (lhs
)))
800 if (TREE_CODE (lhs
) == COMPONENT_REF
801 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
803 /* In the future we might want to use get_ref_base_and_extent to find
804 if there is a field corresponding to the offset and if so, proceed
805 almost like if it was a component ref. */
811 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
812 to check whether a particular statement may modify the virtual table
813 pointerIt stores its result into DATA, which points to a
814 prop_type_change_info structure. */
817 check_stmt_for_type_change (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef
, void *data
)
819 gimple
*stmt
= SSA_NAME_DEF_STMT (vdef
);
820 struct prop_type_change_info
*tci
= (struct prop_type_change_info
*) data
;
822 if (stmt_may_be_vtbl_ptr_store (stmt
))
824 tci
->type_maybe_changed
= true;
831 /* See if ARG is PARAM_DECl describing instance passed by pointer
832 or reference in FUNCTION. Return false if the dynamic type may change
833 in between beggining of the function until CALL is invoked.
835 Generally functions are not allowed to change type of such instances,
836 but they call destructors. We assume that methods cannot destroy the THIS
837 pointer. Also as a special cases, constructor and destructors may change
838 type of the THIS pointer. */
841 param_type_may_change_p (tree function
, tree arg
, gimple
*call
)
843 /* Pure functions cannot do any changes on the dynamic type;
844 that require writting to memory. */
845 if (flags_from_decl_or_type (function
) & (ECF_PURE
| ECF_CONST
))
847 /* We need to check if we are within inlined consturctor
848 or destructor (ideally we would have way to check that the
849 inline cdtor is actually working on ARG, but we don't have
850 easy tie on this, so punt on all non-pure cdtors.
851 We may also record the types of cdtors and once we know type
852 of the instance match them.
854 Also code unification optimizations may merge calls from
855 different blocks making return values unreliable. So
856 do nothing during late optimization. */
857 if (DECL_STRUCT_FUNCTION (function
)->after_inlining
)
859 if (TREE_CODE (arg
) == SSA_NAME
860 && SSA_NAME_IS_DEFAULT_DEF (arg
)
861 && TREE_CODE (SSA_NAME_VAR (arg
)) == PARM_DECL
)
863 /* Normal (non-THIS) argument. */
864 if ((SSA_NAME_VAR (arg
) != DECL_ARGUMENTS (function
)
865 || TREE_CODE (TREE_TYPE (function
)) != METHOD_TYPE
)
866 /* THIS pointer of an method - here we want to watch constructors
867 and destructors as those definitely may change the dynamic
869 || (TREE_CODE (TREE_TYPE (function
)) == METHOD_TYPE
870 && !DECL_CXX_CONSTRUCTOR_P (function
)
871 && !DECL_CXX_DESTRUCTOR_P (function
)
872 && (SSA_NAME_VAR (arg
) == DECL_ARGUMENTS (function
))))
874 /* Walk the inline stack and watch out for ctors/dtors. */
875 for (tree block
= gimple_block (call
); block
&& TREE_CODE (block
) == BLOCK
;
876 block
= BLOCK_SUPERCONTEXT (block
))
877 if (inlined_polymorphic_ctor_dtor_block_p (block
, false))
885 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
886 callsite CALL) by looking for assignments to its virtual table pointer. If
887 it is, return true. ARG is the object itself (not a pointer
888 to it, unless dereferenced). BASE is the base of the memory access as
889 returned by get_ref_base_and_extent, as is the offset.
891 This is helper function for detect_type_change and detect_type_change_ssa
892 that does the heavy work which is usually unnecesary. */
895 detect_type_change_from_memory_writes (ipa_func_body_info
*fbi
, tree arg
,
896 tree base
, tree comp_type
, gcall
*call
,
897 HOST_WIDE_INT offset
)
899 struct prop_type_change_info tci
;
902 gcc_checking_assert (DECL_P (arg
)
903 || TREE_CODE (arg
) == MEM_REF
904 || handled_component_p (arg
));
906 comp_type
= TYPE_MAIN_VARIANT (comp_type
);
908 /* Const calls cannot call virtual methods through VMT and so type changes do
910 if (!flag_devirtualize
|| !gimple_vuse (call
)
911 /* Be sure expected_type is polymorphic. */
913 || TREE_CODE (comp_type
) != RECORD_TYPE
914 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))
915 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))))
918 if (fbi
->aa_walk_budget
== 0)
921 ao_ref_init (&ao
, arg
);
924 ao
.size
= POINTER_SIZE
;
925 ao
.max_size
= ao
.size
;
928 tci
.object
= get_base_address (arg
);
929 tci
.type_maybe_changed
= false;
932 = walk_aliased_vdefs (&ao
, gimple_vuse (call
), check_stmt_for_type_change
,
933 &tci
, NULL
, NULL
, fbi
->aa_walk_budget
);
935 fbi
->aa_walk_budget
-= walked
;
937 fbi
->aa_walk_budget
= 0;
939 if (walked
>= 0 && !tci
.type_maybe_changed
)
945 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
946 If it is, return true. ARG is the object itself (not a pointer
947 to it, unless dereferenced). BASE is the base of the memory access as
948 returned by get_ref_base_and_extent, as is the offset. */
951 detect_type_change (ipa_func_body_info
*fbi
, tree arg
, tree base
,
952 tree comp_type
, gcall
*call
,
953 HOST_WIDE_INT offset
)
955 if (!flag_devirtualize
)
958 if (TREE_CODE (base
) == MEM_REF
959 && !param_type_may_change_p (current_function_decl
,
960 TREE_OPERAND (base
, 0),
963 return detect_type_change_from_memory_writes (fbi
, arg
, base
, comp_type
,
967 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
968 SSA name (its dereference will become the base and the offset is assumed to
972 detect_type_change_ssa (ipa_func_body_info
*fbi
, tree arg
, tree comp_type
,
975 gcc_checking_assert (TREE_CODE (arg
) == SSA_NAME
);
976 if (!flag_devirtualize
977 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
980 if (!param_type_may_change_p (current_function_decl
, arg
, call
))
983 arg
= build2 (MEM_REF
, ptr_type_node
, arg
,
984 build_int_cst (ptr_type_node
, 0));
986 return detect_type_change_from_memory_writes (fbi
, arg
, arg
, comp_type
,
990 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
991 boolean variable pointed to by DATA. */
994 mark_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
997 bool *b
= (bool *) data
;
1002 /* Find the nearest valid aa status for parameter specified by INDEX that
1005 static struct ipa_param_aa_status
*
1006 find_dominating_aa_status (struct ipa_func_body_info
*fbi
, basic_block bb
,
1011 bb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
1014 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
1015 if (!bi
->param_aa_statuses
.is_empty ()
1016 && bi
->param_aa_statuses
[index
].valid
)
1017 return &bi
->param_aa_statuses
[index
];
1021 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
1022 structures and/or intialize the result with a dominating description as
1025 static struct ipa_param_aa_status
*
1026 parm_bb_aa_status_for_bb (struct ipa_func_body_info
*fbi
, basic_block bb
,
1029 gcc_checking_assert (fbi
);
1030 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
1031 if (bi
->param_aa_statuses
.is_empty ())
1032 bi
->param_aa_statuses
.safe_grow_cleared (fbi
->param_count
, true);
1033 struct ipa_param_aa_status
*paa
= &bi
->param_aa_statuses
[index
];
1036 gcc_checking_assert (!paa
->parm_modified
1037 && !paa
->ref_modified
1038 && !paa
->pt_modified
);
1039 struct ipa_param_aa_status
*dom_paa
;
1040 dom_paa
= find_dominating_aa_status (fbi
, bb
, index
);
1050 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
1051 a value known not to be modified in this function before reaching the
1052 statement STMT. FBI holds information about the function we have so far
1053 gathered but do not survive the summary building stage. */
1056 parm_preserved_before_stmt_p (struct ipa_func_body_info
*fbi
, int index
,
1057 gimple
*stmt
, tree parm_load
)
1059 struct ipa_param_aa_status
*paa
;
1060 bool modified
= false;
1063 tree base
= get_base_address (parm_load
);
1064 gcc_assert (TREE_CODE (base
) == PARM_DECL
);
1065 if (TREE_READONLY (base
))
1068 gcc_checking_assert (fbi
);
1069 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
1070 if (paa
->parm_modified
|| fbi
->aa_walk_budget
== 0)
1073 gcc_checking_assert (gimple_vuse (stmt
) != NULL_TREE
);
1074 ao_ref_init (&refd
, parm_load
);
1075 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
1076 &modified
, NULL
, NULL
,
1077 fbi
->aa_walk_budget
);
1081 fbi
->aa_walk_budget
= 0;
1084 fbi
->aa_walk_budget
-= walked
;
1085 if (paa
&& modified
)
1086 paa
->parm_modified
= true;
1090 /* If STMT is an assignment that loads a value from an parameter declaration,
1091 return the index of the parameter in ipa_node_params which has not been
1092 modified. Otherwise return -1. */
1095 load_from_unmodified_param (struct ipa_func_body_info
*fbi
,
1096 vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
1102 if (!gimple_assign_single_p (stmt
))
1105 op1
= gimple_assign_rhs1 (stmt
);
1106 if (TREE_CODE (op1
) != PARM_DECL
)
1109 index
= ipa_get_param_decl_index_1 (descriptors
, op1
);
1111 || !parm_preserved_before_stmt_p (fbi
, index
, stmt
, op1
))
1117 /* Return true if memory reference REF (which must be a load through parameter
1118 with INDEX) loads data that are known to be unmodified in this function
1119 before reaching statement STMT. */
1122 parm_ref_data_preserved_p (struct ipa_func_body_info
*fbi
,
1123 int index
, gimple
*stmt
, tree ref
)
1125 struct ipa_param_aa_status
*paa
;
1126 bool modified
= false;
1129 gcc_checking_assert (fbi
);
1130 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
1131 if (paa
->ref_modified
|| fbi
->aa_walk_budget
== 0)
1134 gcc_checking_assert (gimple_vuse (stmt
));
1135 ao_ref_init (&refd
, ref
);
1136 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
1137 &modified
, NULL
, NULL
,
1138 fbi
->aa_walk_budget
);
1142 fbi
->aa_walk_budget
= 0;
1145 fbi
->aa_walk_budget
-= walked
;
1147 paa
->ref_modified
= true;
1151 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1152 is known to be unmodified in this function before reaching call statement
1153 CALL into which it is passed. FBI describes the function body. */
1156 parm_ref_data_pass_through_p (struct ipa_func_body_info
*fbi
, int index
,
1157 gimple
*call
, tree parm
)
1159 bool modified
= false;
1162 /* It's unnecessary to calculate anything about memory contnets for a const
1163 function because it is not goin to use it. But do not cache the result
1164 either. Also, no such calculations for non-pointers. */
1165 if (!gimple_vuse (call
)
1166 || !POINTER_TYPE_P (TREE_TYPE (parm
)))
1169 struct ipa_param_aa_status
*paa
= parm_bb_aa_status_for_bb (fbi
,
1172 if (paa
->pt_modified
|| fbi
->aa_walk_budget
== 0)
1175 ao_ref_init_from_ptr_and_size (&refd
, parm
, NULL_TREE
);
1176 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (call
), mark_modified
,
1177 &modified
, NULL
, NULL
,
1178 fbi
->aa_walk_budget
);
1181 fbi
->aa_walk_budget
= 0;
1185 fbi
->aa_walk_budget
-= walked
;
1187 paa
->pt_modified
= true;
1191 /* Return true if we can prove that OP is a memory reference loading
1192 data from an aggregate passed as a parameter.
1194 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1195 false if it cannot prove that the value has not been modified before the
1196 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1197 if it cannot prove the value has not been modified, in that case it will
1198 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1200 INFO and PARMS_AINFO describe parameters of the current function (but the
1201 latter can be NULL), STMT is the load statement. If function returns true,
1202 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1203 within the aggregate and whether it is a load from a value passed by
1204 reference respectively.
1206 Return false if the offset divided by BITS_PER_UNIT would not fit into an
1210 ipa_load_from_parm_agg (struct ipa_func_body_info
*fbi
,
1211 vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
1212 gimple
*stmt
, tree op
, int *index_p
,
1213 HOST_WIDE_INT
*offset_p
, poly_int64
*size_p
,
1214 bool *by_ref_p
, bool *guaranteed_unmodified
)
1219 tree base
= get_ref_base_and_extent_hwi (op
, offset_p
, &size
, &reverse
);
1222 || (*offset_p
/ BITS_PER_UNIT
) > UINT_MAX
)
1225 /* We can not propagate across volatile loads. */
1226 if (TREE_THIS_VOLATILE (op
))
1231 int index
= ipa_get_param_decl_index_1 (descriptors
, base
);
1233 && parm_preserved_before_stmt_p (fbi
, index
, stmt
, op
))
1239 if (guaranteed_unmodified
)
1240 *guaranteed_unmodified
= true;
1246 if (TREE_CODE (base
) != MEM_REF
1247 || TREE_CODE (TREE_OPERAND (base
, 0)) != SSA_NAME
1248 || !integer_zerop (TREE_OPERAND (base
, 1)))
1251 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base
, 0)))
1253 tree parm
= SSA_NAME_VAR (TREE_OPERAND (base
, 0));
1254 index
= ipa_get_param_decl_index_1 (descriptors
, parm
);
1258 /* This branch catches situations where a pointer parameter is not a
1259 gimple register, for example:
1261 void hip7(S*) (struct S * p)
1263 void (*<T2e4>) (struct S *) D.1867;
1268 D.1867_2 = p.1_1->f;
1273 gimple
*def
= SSA_NAME_DEF_STMT (TREE_OPERAND (base
, 0));
1274 index
= load_from_unmodified_param (fbi
, descriptors
, def
);
1279 bool data_preserved
= parm_ref_data_preserved_p (fbi
, index
, stmt
, op
);
1280 if (!data_preserved
&& !guaranteed_unmodified
)
1287 if (guaranteed_unmodified
)
1288 *guaranteed_unmodified
= data_preserved
;
1294 /* If STMT is an assignment that loads a value from a parameter declaration,
1295 or from an aggregate passed as the parameter either by value or reference,
1296 return the index of the parameter in ipa_node_params. Otherwise return -1.
1298 FBI holds gathered information about the function. INFO describes
1299 parameters of the function, STMT is the assignment statement. If it is a
1300 memory load from an aggregate, *OFFSET_P is filled with offset within the
1301 aggregate, and *BY_REF_P specifies whether the aggregate is passed by
1305 load_from_unmodified_param_or_agg (struct ipa_func_body_info
*fbi
,
1306 class ipa_node_params
*info
,
1308 HOST_WIDE_INT
*offset_p
,
1311 int index
= load_from_unmodified_param (fbi
, info
->descriptors
, stmt
);
1314 /* Load value from a parameter declaration. */
1321 if (!gimple_assign_load_p (stmt
))
1324 tree rhs
= gimple_assign_rhs1 (stmt
);
1326 /* Skip memory reference containing VIEW_CONVERT_EXPR. */
1327 for (tree t
= rhs
; handled_component_p (t
); t
= TREE_OPERAND (t
, 0))
1328 if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
1331 /* Skip memory reference containing bit-field. */
1332 if (TREE_CODE (rhs
) == BIT_FIELD_REF
1333 || contains_bitfld_component_ref_p (rhs
))
1336 if (!ipa_load_from_parm_agg (fbi
, info
->descriptors
, stmt
, rhs
, &index
,
1337 offset_p
, &size
, by_ref_p
))
1340 gcc_assert (!maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (rhs
))),
1344 tree param_type
= ipa_get_type (info
, index
);
1346 if (!param_type
|| !AGGREGATE_TYPE_P (param_type
))
1349 else if (TREE_THIS_VOLATILE (rhs
))
1355 /* Walk pointer adjustemnts from OP (such as POINTER_PLUS and ADDR_EXPR)
1356 to find original pointer. Initialize RET to the pointer which results from
1358 If offset is known return true and initialize OFFSET_RET. */
1361 unadjusted_ptr_and_unit_offset (tree op
, tree
*ret
, poly_int64
*offset_ret
)
1363 poly_int64 offset
= 0;
1364 bool offset_known
= true;
1367 for (i
= 0; i
< param_ipa_jump_function_lookups
; i
++)
1369 if (TREE_CODE (op
) == ADDR_EXPR
)
1371 poly_int64 extra_offset
= 0;
1372 tree base
= get_addr_base_and_unit_offset (TREE_OPERAND (op
, 0),
1376 base
= get_base_address (TREE_OPERAND (op
, 0));
1377 if (TREE_CODE (base
) != MEM_REF
)
1379 offset_known
= false;
1383 if (TREE_CODE (base
) != MEM_REF
)
1385 offset
+= extra_offset
;
1387 op
= TREE_OPERAND (base
, 0);
1388 if (mem_ref_offset (base
).to_shwi (&extra_offset
))
1389 offset
+= extra_offset
;
1391 offset_known
= false;
1393 else if (TREE_CODE (op
) == SSA_NAME
1394 && !SSA_NAME_IS_DEFAULT_DEF (op
))
1396 gimple
*pstmt
= SSA_NAME_DEF_STMT (op
);
1398 if (gimple_assign_single_p (pstmt
))
1399 op
= gimple_assign_rhs1 (pstmt
);
1400 else if (is_gimple_assign (pstmt
)
1401 && gimple_assign_rhs_code (pstmt
) == POINTER_PLUS_EXPR
)
1403 poly_int64 extra_offset
= 0;
1404 if (ptrdiff_tree_p (gimple_assign_rhs2 (pstmt
),
1406 offset
+= extra_offset
;
1408 offset_known
= false;
1409 op
= gimple_assign_rhs1 (pstmt
);
1418 *offset_ret
= offset
;
1419 return offset_known
;
1422 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1423 of an assignment statement STMT, try to determine whether we are actually
1424 handling any of the following cases and construct an appropriate jump
1425 function into JFUNC if so:
1427 1) The passed value is loaded from a formal parameter which is not a gimple
1428 register (most probably because it is addressable, the value has to be
1429 scalar) and we can guarantee the value has not changed. This case can
1430 therefore be described by a simple pass-through jump function. For example:
1439 2) The passed value can be described by a simple arithmetic pass-through
1446 D.2064_4 = a.1(D) + 4;
1449 This case can also occur in combination of the previous one, e.g.:
1457 D.2064_4 = a.0_3 + 4;
1460 3) The passed value is an address of an object within another one (which
1461 also passed by reference). Such situations are described by an ancestor
1462 jump function and describe situations such as:
1464 B::foo() (struct B * const this)
1468 D.1845_2 = &this_1(D)->D.1748;
1471 INFO is the structure describing individual parameters access different
1472 stages of IPA optimizations. PARMS_AINFO contains the information that is
1473 only needed for intraprocedural analysis. */
1476 compute_complex_assign_jump_func (struct ipa_func_body_info
*fbi
,
1477 class ipa_node_params
*info
,
1478 struct ipa_jump_func
*jfunc
,
1479 gcall
*call
, gimple
*stmt
, tree name
,
1482 HOST_WIDE_INT offset
, size
;
1483 tree op1
, tc_ssa
, base
, ssa
;
1487 op1
= gimple_assign_rhs1 (stmt
);
1489 if (TREE_CODE (op1
) == SSA_NAME
)
1491 if (SSA_NAME_IS_DEFAULT_DEF (op1
))
1492 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (op1
));
1494 index
= load_from_unmodified_param (fbi
, info
->descriptors
,
1495 SSA_NAME_DEF_STMT (op1
));
1500 index
= load_from_unmodified_param (fbi
, info
->descriptors
, stmt
);
1501 tc_ssa
= gimple_assign_lhs (stmt
);
1506 switch (gimple_assign_rhs_class (stmt
))
1508 case GIMPLE_BINARY_RHS
:
1510 tree op2
= gimple_assign_rhs2 (stmt
);
1511 if (!is_gimple_ip_invariant (op2
)
1512 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt
))
1514 && !useless_type_conversion_p (TREE_TYPE (name
),
1518 ipa_set_jf_arith_pass_through (jfunc
, index
, op2
,
1519 gimple_assign_rhs_code (stmt
));
1522 case GIMPLE_SINGLE_RHS
:
1524 bool agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
,
1526 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1529 case GIMPLE_UNARY_RHS
:
1530 if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
)))
1531 ipa_set_jf_unary_pass_through (jfunc
, index
,
1532 gimple_assign_rhs_code (stmt
));
1538 if (TREE_CODE (op1
) != ADDR_EXPR
)
1540 op1
= TREE_OPERAND (op1
, 0);
1541 base
= get_ref_base_and_extent_hwi (op1
, &offset
, &size
, &reverse
);
1542 offset_int mem_offset
;
1544 || TREE_CODE (base
) != MEM_REF
1545 || !mem_ref_offset (base
).is_constant (&mem_offset
))
1547 offset
+= mem_offset
.to_short_addr () * BITS_PER_UNIT
;
1548 ssa
= TREE_OPERAND (base
, 0);
1549 if (TREE_CODE (ssa
) != SSA_NAME
1550 || !SSA_NAME_IS_DEFAULT_DEF (ssa
)
1554 /* Dynamic types are changed in constructors and destructors. */
1555 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (ssa
));
1556 if (index
>= 0 && param_type
&& POINTER_TYPE_P (param_type
))
1557 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1558 parm_ref_data_pass_through_p (fbi
, index
, call
, ssa
),
1562 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1565 iftmp.1_3 = &obj_2(D)->D.1762;
1567 The base of the MEM_REF must be a default definition SSA NAME of a
1568 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1569 whole MEM_REF expression is returned and the offset calculated from any
1570 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1571 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1574 get_ancestor_addr_info (gimple
*assign
, tree
*obj_p
, HOST_WIDE_INT
*offset
)
1577 tree expr
, parm
, obj
;
1580 if (!gimple_assign_single_p (assign
))
1582 expr
= gimple_assign_rhs1 (assign
);
1584 if (TREE_CODE (expr
) != ADDR_EXPR
)
1586 expr
= TREE_OPERAND (expr
, 0);
1588 expr
= get_ref_base_and_extent_hwi (expr
, offset
, &size
, &reverse
);
1590 offset_int mem_offset
;
1592 || TREE_CODE (expr
) != MEM_REF
1593 || !mem_ref_offset (expr
).is_constant (&mem_offset
))
1595 parm
= TREE_OPERAND (expr
, 0);
1596 if (TREE_CODE (parm
) != SSA_NAME
1597 || !SSA_NAME_IS_DEFAULT_DEF (parm
)
1598 || TREE_CODE (SSA_NAME_VAR (parm
)) != PARM_DECL
)
1601 *offset
+= mem_offset
.to_short_addr () * BITS_PER_UNIT
;
1607 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1608 statement PHI, try to find out whether NAME is in fact a
1609 multiple-inheritance typecast from a descendant into an ancestor of a formal
1610 parameter and thus can be described by an ancestor jump function and if so,
1611 write the appropriate function into JFUNC.
1613 Essentially we want to match the following pattern:
1621 iftmp.1_3 = &obj_2(D)->D.1762;
1624 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1625 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1629 compute_complex_ancestor_jump_func (struct ipa_func_body_info
*fbi
,
1630 class ipa_node_params
*info
,
1631 struct ipa_jump_func
*jfunc
,
1632 gcall
*call
, gphi
*phi
)
1634 HOST_WIDE_INT offset
;
1636 basic_block phi_bb
, assign_bb
, cond_bb
;
1637 tree tmp
, parm
, expr
, obj
;
1640 if (gimple_phi_num_args (phi
) != 2)
1643 if (integer_zerop (PHI_ARG_DEF (phi
, 1)))
1644 tmp
= PHI_ARG_DEF (phi
, 0);
1645 else if (integer_zerop (PHI_ARG_DEF (phi
, 0)))
1646 tmp
= PHI_ARG_DEF (phi
, 1);
1649 if (TREE_CODE (tmp
) != SSA_NAME
1650 || SSA_NAME_IS_DEFAULT_DEF (tmp
)
1651 || !POINTER_TYPE_P (TREE_TYPE (tmp
))
1652 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp
))) != RECORD_TYPE
)
1655 assign
= SSA_NAME_DEF_STMT (tmp
);
1656 assign_bb
= gimple_bb (assign
);
1657 if (!single_pred_p (assign_bb
))
1659 expr
= get_ancestor_addr_info (assign
, &obj
, &offset
);
1662 parm
= TREE_OPERAND (expr
, 0);
1663 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (parm
));
1667 cond_bb
= single_pred (assign_bb
);
1668 gcond
*cond
= safe_dyn_cast
<gcond
*> (*gsi_last_bb (cond_bb
));
1670 || gimple_cond_code (cond
) != NE_EXPR
1671 || gimple_cond_lhs (cond
) != parm
1672 || !integer_zerop (gimple_cond_rhs (cond
)))
1675 phi_bb
= gimple_bb (phi
);
1676 for (i
= 0; i
< 2; i
++)
1678 basic_block pred
= EDGE_PRED (phi_bb
, i
)->src
;
1679 if (pred
!= assign_bb
&& pred
!= cond_bb
)
1683 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1684 parm_ref_data_pass_through_p (fbi
, index
, call
, parm
),
1688 /* Inspect the given TYPE and return true iff it has the same structure (the
1689 same number of fields of the same types) as a C++ member pointer. If
1690 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1691 corresponding fields there. */
1694 type_like_member_ptr_p (tree type
, tree
*method_ptr
, tree
*delta
)
1698 if (TREE_CODE (type
) != RECORD_TYPE
)
1701 fld
= TYPE_FIELDS (type
);
1702 if (!fld
|| !POINTER_TYPE_P (TREE_TYPE (fld
))
1703 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld
))) != METHOD_TYPE
1704 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1710 fld
= DECL_CHAIN (fld
);
1711 if (!fld
|| INTEGRAL_TYPE_P (fld
)
1712 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1717 if (DECL_CHAIN (fld
))
1723 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1724 return the rhs of its defining statement, and this statement is stored in
1725 *RHS_STMT. Otherwise return RHS as it is. */
1728 get_ssa_def_if_simple_copy (tree rhs
, gimple
**rhs_stmt
)
1730 while (TREE_CODE (rhs
) == SSA_NAME
&& !SSA_NAME_IS_DEFAULT_DEF (rhs
))
1732 gimple
*def_stmt
= SSA_NAME_DEF_STMT (rhs
);
1734 if (gimple_assign_single_p (def_stmt
))
1735 rhs
= gimple_assign_rhs1 (def_stmt
);
1738 *rhs_stmt
= def_stmt
;
1743 /* Simple linked list, describing contents of an aggregate before call. */
1745 struct ipa_known_agg_contents_list
1747 /* Offset and size of the described part of the aggregate. */
1748 HOST_WIDE_INT offset
, size
;
1750 /* Type of the described part of the aggregate. */
1753 /* Known constant value or jump function data describing contents. */
1754 struct ipa_load_agg_data value
;
1756 /* Pointer to the next structure in the list. */
1757 struct ipa_known_agg_contents_list
*next
;
1760 /* Add an aggregate content item into a linked list of
1761 ipa_known_agg_contents_list structure, in which all elements
1762 are sorted ascendingly by offset. */
1765 add_to_agg_contents_list (struct ipa_known_agg_contents_list
**plist
,
1766 struct ipa_known_agg_contents_list
*item
)
1768 struct ipa_known_agg_contents_list
*list
= *plist
;
1770 for (; list
; list
= list
->next
)
1772 if (list
->offset
>= item
->offset
)
1775 plist
= &list
->next
;
1782 /* Check whether a given aggregate content is clobbered by certain element in
1783 a linked list of ipa_known_agg_contents_list. */
1786 clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list
*list
,
1787 struct ipa_known_agg_contents_list
*item
)
1789 for (; list
; list
= list
->next
)
1791 if (list
->offset
>= item
->offset
)
1792 return list
->offset
< item
->offset
+ item
->size
;
1794 if (list
->offset
+ list
->size
> item
->offset
)
1801 /* Build aggregate jump function from LIST, assuming there are exactly
1802 VALUE_COUNT entries there and that offset of the passed argument
1803 is ARG_OFFSET and store it into JFUNC. */
1806 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list
*list
,
1807 int value_count
, HOST_WIDE_INT arg_offset
,
1808 struct ipa_jump_func
*jfunc
)
1810 vec_safe_reserve (jfunc
->agg
.items
, value_count
, true);
1811 for (; list
; list
= list
->next
)
1813 struct ipa_agg_jf_item item
;
1814 tree operand
= list
->value
.pass_through
.operand
;
1816 if (list
->value
.pass_through
.formal_id
>= 0)
1818 /* Content value is derived from some formal paramerter. */
1819 if (list
->value
.offset
>= 0)
1820 item
.jftype
= IPA_JF_LOAD_AGG
;
1822 item
.jftype
= IPA_JF_PASS_THROUGH
;
1824 item
.value
.load_agg
= list
->value
;
1826 item
.value
.pass_through
.operand
1827 = unshare_expr_without_location (operand
);
1831 /* Content value is known constant. */
1832 item
.jftype
= IPA_JF_CONST
;
1833 item
.value
.constant
= unshare_expr_without_location (operand
);
1838 item
.type
= list
->type
;
1839 gcc_assert (tree_to_shwi (TYPE_SIZE (list
->type
)) == list
->size
);
1841 item
.offset
= list
->offset
- arg_offset
;
1842 gcc_assert ((item
.offset
% BITS_PER_UNIT
) == 0);
1844 jfunc
->agg
.items
->quick_push (item
);
1848 /* Given an assignment statement STMT, try to collect information into
1849 AGG_VALUE that will be used to construct jump function for RHS of the
1850 assignment, from which content value of an aggregate part comes.
1852 Besides constant and simple pass-through jump functions, also try to
1853 identify whether it matches the following pattern that can be described by
1854 a load-value-from-aggregate jump function, which is a derivative of simple
1855 pass-through jump function.
1861 *(q_5 + 4) = *(p_3(D) + 28) op 1;
1865 Here IPA_LOAD_AGG_DATA data structure is informative enough to describe
1866 constant, simple pass-through and load-vale-from-aggregate. If value
1867 is constant, it will be kept in field OPERAND, and field FORMAL_ID is
1868 set to -1. For simple pass-through and load-value-from-aggregate, field
1869 FORMAL_ID specifies the related formal parameter index, and field
1870 OFFSET can be used to distinguish them, -1 means simple pass-through,
1871 otherwise means load-value-from-aggregate. */
1874 analyze_agg_content_value (struct ipa_func_body_info
*fbi
,
1875 struct ipa_load_agg_data
*agg_value
,
1878 tree lhs
= gimple_assign_lhs (stmt
);
1879 tree rhs1
= gimple_assign_rhs1 (stmt
);
1880 enum tree_code code
;
1883 /* Initialize jump function data for the aggregate part. */
1884 memset (agg_value
, 0, sizeof (*agg_value
));
1885 agg_value
->pass_through
.operation
= NOP_EXPR
;
1886 agg_value
->pass_through
.formal_id
= -1;
1887 agg_value
->offset
= -1;
1889 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs
)) /* TODO: Support aggregate type. */
1890 || TREE_THIS_VOLATILE (lhs
)
1891 || TREE_CODE (lhs
) == BIT_FIELD_REF
1892 || contains_bitfld_component_ref_p (lhs
))
1895 /* Skip SSA copies. */
1896 while (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
1898 if (TREE_CODE (rhs1
) != SSA_NAME
|| SSA_NAME_IS_DEFAULT_DEF (rhs1
))
1901 stmt
= SSA_NAME_DEF_STMT (rhs1
);
1902 if (!is_gimple_assign (stmt
))
1905 rhs1
= gimple_assign_rhs1 (stmt
);
1908 if (gphi
*phi
= dyn_cast
<gphi
*> (stmt
))
1910 /* Also special case like the following (a is a formal parameter):
1912 _12 = *a_11(D).dim[0].stride;
1914 # iftmp.22_9 = PHI <_12(2), 1(3)>
1916 parm.6.dim[0].stride = iftmp.22_9;
1918 __x_MOD_foo (&parm.6, b_31(D));
1920 The aggregate function describing parm.6.dim[0].stride is encoded as a
1921 PASS-THROUGH jump function with ASSERT_EXPR operation whith operand 1
1922 (the constant from the PHI node). */
1924 if (gimple_phi_num_args (phi
) != 2)
1926 tree arg0
= gimple_phi_arg_def (phi
, 0);
1927 tree arg1
= gimple_phi_arg_def (phi
, 1);
1930 if (is_gimple_ip_invariant (arg1
))
1935 else if (is_gimple_ip_invariant (arg0
))
1943 rhs1
= get_ssa_def_if_simple_copy (rhs1
, &stmt
);
1944 if (!is_gimple_assign (stmt
))
1948 agg_value
->pass_through
.operand
= operand
;
1950 else if (is_gimple_assign (stmt
))
1952 code
= gimple_assign_rhs_code (stmt
);
1953 switch (gimple_assign_rhs_class (stmt
))
1955 case GIMPLE_SINGLE_RHS
:
1956 if (is_gimple_ip_invariant (rhs1
))
1958 agg_value
->pass_through
.operand
= rhs1
;
1964 case GIMPLE_UNARY_RHS
:
1965 /* NOTE: A GIMPLE_UNARY_RHS operation might not be tcc_unary
1966 (truth_not_expr is example), GIMPLE_BINARY_RHS does not imply
1967 tcc_binary, this subtleness is somewhat misleading.
1969 Since tcc_unary is widely used in IPA-CP code to check an operation
1970 with one operand, here we only allow tc_unary operation to avoid
1971 possible problem. Then we can use (opclass == tc_unary) or not to
1972 distinguish unary and binary. */
1973 if (TREE_CODE_CLASS (code
) != tcc_unary
|| CONVERT_EXPR_CODE_P (code
))
1976 rhs1
= get_ssa_def_if_simple_copy (rhs1
, &stmt
);
1979 case GIMPLE_BINARY_RHS
:
1981 gimple
*rhs1_stmt
= stmt
;
1982 gimple
*rhs2_stmt
= stmt
;
1983 tree rhs2
= gimple_assign_rhs2 (stmt
);
1985 rhs1
= get_ssa_def_if_simple_copy (rhs1
, &rhs1_stmt
);
1986 rhs2
= get_ssa_def_if_simple_copy (rhs2
, &rhs2_stmt
);
1988 if (is_gimple_ip_invariant (rhs2
))
1990 agg_value
->pass_through
.operand
= rhs2
;
1993 else if (is_gimple_ip_invariant (rhs1
))
1995 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
1996 code
= swap_tree_comparison (code
);
1997 else if (!commutative_tree_code (code
))
2000 agg_value
->pass_through
.operand
= rhs1
;
2007 if (TREE_CODE_CLASS (code
) != tcc_comparison
2008 && !useless_type_conversion_p (TREE_TYPE (lhs
),
2021 if (TREE_CODE (rhs1
) != SSA_NAME
)
2022 index
= load_from_unmodified_param_or_agg (fbi
, fbi
->info
, stmt
,
2024 &agg_value
->by_ref
);
2025 else if (SSA_NAME_IS_DEFAULT_DEF (rhs1
))
2026 index
= ipa_get_param_decl_index (fbi
->info
, SSA_NAME_VAR (rhs1
));
2030 if (agg_value
->offset
>= 0)
2031 agg_value
->type
= TREE_TYPE (rhs1
);
2032 agg_value
->pass_through
.formal_id
= index
;
2033 agg_value
->pass_through
.operation
= code
;
2036 agg_value
->pass_through
.operand
= NULL_TREE
;
2039 /* If STMT is a memory store to the object whose address is BASE, extract
2040 information (offset, size, and value) into CONTENT, and return true,
2041 otherwise we conservatively assume the whole object is modified with
2042 unknown content, and return false. CHECK_REF means that access to object
2043 is expected to be in form of MEM_REF expression. */
2046 extract_mem_content (struct ipa_func_body_info
*fbi
,
2047 gimple
*stmt
, tree base
, bool check_ref
,
2048 struct ipa_known_agg_contents_list
*content
)
2050 HOST_WIDE_INT lhs_offset
, lhs_size
;
2053 if (!is_gimple_assign (stmt
))
2056 tree lhs
= gimple_assign_lhs (stmt
);
2057 tree lhs_base
= get_ref_base_and_extent_hwi (lhs
, &lhs_offset
, &lhs_size
,
2064 if (TREE_CODE (lhs_base
) != MEM_REF
2065 || TREE_OPERAND (lhs_base
, 0) != base
2066 || !integer_zerop (TREE_OPERAND (lhs_base
, 1)))
2069 else if (lhs_base
!= base
)
2072 content
->offset
= lhs_offset
;
2073 content
->size
= lhs_size
;
2074 content
->type
= TREE_TYPE (lhs
);
2075 content
->next
= NULL
;
2077 analyze_agg_content_value (fbi
, &content
->value
, stmt
);
2081 /* Traverse statements from CALL backwards, scanning whether an aggregate given
2082 in ARG is filled in constants or values that are derived from caller's
2083 formal parameter in the way described by some kinds of jump functions. FBI
2084 is the context of the caller function for interprocedural analysis. ARG can
2085 either be an aggregate expression or a pointer to an aggregate. ARG_TYPE is
2086 the type of the aggregate, JFUNC is the jump function for the aggregate. */
2089 determine_known_aggregate_parts (struct ipa_func_body_info
*fbi
,
2090 gcall
*call
, tree arg
,
2092 struct ipa_jump_func
*jfunc
)
2094 struct ipa_known_agg_contents_list
*list
= NULL
, *all_list
= NULL
;
2095 bitmap visited
= NULL
;
2096 int item_count
= 0, value_count
= 0;
2097 HOST_WIDE_INT arg_offset
, arg_size
;
2099 bool check_ref
, by_ref
;
2101 int max_agg_items
= opt_for_fn (fbi
->node
->decl
, param_ipa_max_agg_items
);
2103 if (max_agg_items
== 0)
2106 /* The function operates in three stages. First, we prepare check_ref, r,
2107 arg_base and arg_offset based on what is actually passed as an actual
2110 if (POINTER_TYPE_P (arg_type
))
2113 if (TREE_CODE (arg
) == SSA_NAME
)
2116 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type
)))
2117 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
2122 type_size
= TYPE_SIZE (TREE_TYPE (arg_type
));
2123 arg_size
= tree_to_uhwi (type_size
);
2124 ao_ref_init_from_ptr_and_size (&r
, arg_base
, NULL_TREE
);
2126 else if (TREE_CODE (arg
) == ADDR_EXPR
)
2130 arg
= TREE_OPERAND (arg
, 0);
2131 arg_base
= get_ref_base_and_extent_hwi (arg
, &arg_offset
,
2132 &arg_size
, &reverse
);
2135 if (DECL_P (arg_base
))
2138 ao_ref_init (&r
, arg_base
);
2150 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg
)));
2154 arg_base
= get_ref_base_and_extent_hwi (arg
, &arg_offset
,
2155 &arg_size
, &reverse
);
2159 ao_ref_init (&r
, arg
);
2162 /* Second stage traverses virtual SSA web backwards starting from the call
2163 statement, only looks at individual dominating virtual operand (its
2164 definition dominates the call), as long as it is confident that content
2165 of the aggregate is affected by definition of the virtual operand, it
2166 builds a sorted linked list of ipa_agg_jf_list describing that. */
2168 for (tree dom_vuse
= gimple_vuse (call
);
2169 dom_vuse
&& fbi
->aa_walk_budget
> 0;)
2171 gimple
*stmt
= SSA_NAME_DEF_STMT (dom_vuse
);
2173 if (gimple_code (stmt
) == GIMPLE_PHI
)
2175 dom_vuse
= get_continuation_for_phi (stmt
, &r
, true,
2176 fbi
->aa_walk_budget
,
2177 &visited
, false, NULL
, NULL
);
2181 fbi
->aa_walk_budget
--;
2182 if (stmt_may_clobber_ref_p_1 (stmt
, &r
))
2184 struct ipa_known_agg_contents_list
*content
2185 = XALLOCA (struct ipa_known_agg_contents_list
);
2187 if (!extract_mem_content (fbi
, stmt
, arg_base
, check_ref
, content
))
2190 /* Now we get a dominating virtual operand, and need to check
2191 whether its value is clobbered any other dominating one. */
2192 if ((content
->value
.pass_through
.formal_id
>= 0
2193 || content
->value
.pass_through
.operand
)
2194 && !clobber_by_agg_contents_list_p (all_list
, content
)
2195 /* Since IPA-CP stores results with unsigned int offsets, we can
2196 discard those which would not fit now before we stream them to
2198 && (content
->offset
+ content
->size
- arg_offset
2199 <= (HOST_WIDE_INT
) UINT_MAX
* BITS_PER_UNIT
))
2201 struct ipa_known_agg_contents_list
*copy
2202 = XALLOCA (struct ipa_known_agg_contents_list
);
2204 /* Add to the list consisting of only dominating virtual
2205 operands, whose definitions can finally reach the call. */
2206 add_to_agg_contents_list (&list
, (*copy
= *content
, copy
));
2208 if (++value_count
== max_agg_items
)
2212 /* Add to the list consisting of all dominating virtual operands. */
2213 add_to_agg_contents_list (&all_list
, content
);
2215 if (++item_count
== 2 * max_agg_items
)
2218 dom_vuse
= gimple_vuse (stmt
);
2222 BITMAP_FREE (visited
);
2224 /* Third stage just goes over the list and creates an appropriate vector of
2225 ipa_agg_jf_item structures out of it, of course only if there are
2226 any meaningful items to begin with. */
2230 jfunc
->agg
.by_ref
= by_ref
;
2231 build_agg_jump_func_from_list (list
, value_count
, arg_offset
, jfunc
);
2236 /* Return the Ith param type of callee associated with call graph
2240 ipa_get_callee_param_type (struct cgraph_edge
*e
, int i
)
2243 tree type
= (e
->callee
2244 ? TREE_TYPE (e
->callee
->decl
)
2245 : gimple_call_fntype (e
->call_stmt
));
2246 tree t
= TYPE_ARG_TYPES (type
);
2248 for (n
= 0; n
< i
; n
++)
2254 if (t
&& t
!= void_list_node
)
2255 return TREE_VALUE (t
);
2258 t
= DECL_ARGUMENTS (e
->callee
->decl
);
2259 for (n
= 0; n
< i
; n
++)
2266 return TREE_TYPE (t
);
2270 /* Return ipa_bits with VALUE and MASK values, which can be either a newly
2271 allocated structure or a previously existing one shared with other jump
2272 functions and/or transformation summaries. */
2275 ipa_get_ipa_bits_for_value (const widest_int
&value
, const widest_int
&mask
)
2281 ipa_bits
**slot
= ipa_bits_hash_table
->find_slot (&tmp
, INSERT
);
2285 ipa_bits
*res
= ggc_alloc
<ipa_bits
> ();
2293 /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
2294 table in order to avoid creating multiple same ipa_bits structures. */
2297 ipa_set_jfunc_bits (ipa_jump_func
*jf
, const widest_int
&value
,
2298 const widest_int
&mask
)
2300 jf
->bits
= ipa_get_ipa_bits_for_value (value
, mask
);
2303 /* Return a pointer to an ipa_vr just like TMP, but either find it in
2304 ipa_vr_hash_table or allocate it in GC memory. */
2307 ipa_get_value_range (const vrange
&tmp
)
2309 inchash::hash hstate
;
2310 inchash::add_vrange (tmp
, hstate
);
2311 hashval_t hash
= hstate
.end ();
2312 ipa_vr
**slot
= ipa_vr_hash_table
->find_slot_with_hash (&tmp
, hash
, INSERT
);
2316 ipa_vr
*vr
= new (ggc_alloc
<ipa_vr
> ()) ipa_vr (tmp
);
2321 /* Assign to JF a pointer to a range just like TMP but either fetch a
2322 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
2325 ipa_set_jfunc_vr (ipa_jump_func
*jf
, const vrange
&tmp
)
2327 jf
->m_vr
= ipa_get_value_range (tmp
);
2331 ipa_set_jfunc_vr (ipa_jump_func
*jf
, const ipa_vr
&vr
)
2334 vr
.get_vrange (tmp
);
2335 ipa_set_jfunc_vr (jf
, tmp
);
2338 /* Compute jump function for all arguments of callsite CS and insert the
2339 information in the jump_functions array in the ipa_edge_args corresponding
2340 to this callsite. */
2343 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info
*fbi
,
2344 struct cgraph_edge
*cs
)
2346 ipa_node_params
*info
= ipa_node_params_sum
->get (cs
->caller
);
2347 ipa_edge_args
*args
= ipa_edge_args_sum
->get_create (cs
);
2348 gcall
*call
= cs
->call_stmt
;
2349 int n
, arg_num
= gimple_call_num_args (call
);
2350 bool useful_context
= false;
2352 if (arg_num
== 0 || args
->jump_functions
)
2354 vec_safe_grow_cleared (args
->jump_functions
, arg_num
, true);
2355 if (flag_devirtualize
)
2356 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, arg_num
, true);
2358 if (gimple_call_internal_p (call
))
2360 if (ipa_func_spec_opts_forbid_analysis_p (cs
->caller
))
2363 for (n
= 0; n
< arg_num
; n
++)
2365 struct ipa_jump_func
*jfunc
= ipa_get_ith_jump_func (args
, n
);
2366 tree arg
= gimple_call_arg (call
, n
);
2367 tree param_type
= ipa_get_callee_param_type (cs
, n
);
2368 if (flag_devirtualize
&& POINTER_TYPE_P (TREE_TYPE (arg
)))
2371 class ipa_polymorphic_call_context
context (cs
->caller
->decl
,
2374 context
.get_dynamic_type (instance
, arg
, NULL
, cs
->call_stmt
,
2375 &fbi
->aa_walk_budget
);
2376 *ipa_get_ith_polymorhic_call_context (args
, n
) = context
;
2377 if (!context
.useless_p ())
2378 useful_context
= true;
2381 Value_Range
vr (TREE_TYPE (arg
));
2382 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
2384 bool addr_nonzero
= false;
2385 bool strict_overflow
= false;
2387 if (TREE_CODE (arg
) == SSA_NAME
2389 && get_range_query (cfun
)->range_of_expr (vr
, arg
, cs
->call_stmt
)
2391 addr_nonzero
= true;
2392 else if (tree_single_nonzero_warnv_p (arg
, &strict_overflow
))
2393 addr_nonzero
= true;
2397 vr
.set_nonzero (TREE_TYPE (arg
));
2398 ipa_set_jfunc_vr (jfunc
, vr
);
2401 gcc_assert (!jfunc
->m_vr
);
2406 && Value_Range::supports_type_p (TREE_TYPE (arg
))
2407 && Value_Range::supports_type_p (param_type
)
2408 && irange::supports_p (TREE_TYPE (arg
))
2409 && irange::supports_p (param_type
)
2410 && get_range_query (cfun
)->range_of_expr (vr
, arg
, cs
->call_stmt
)
2411 && !vr
.undefined_p ())
2413 Value_Range
resvr (vr
);
2414 range_cast (resvr
, param_type
);
2415 if (!resvr
.undefined_p () && !resvr
.varying_p ())
2416 ipa_set_jfunc_vr (jfunc
, resvr
);
2418 gcc_assert (!jfunc
->m_vr
);
2421 gcc_assert (!jfunc
->m_vr
);
2424 if (INTEGRAL_TYPE_P (TREE_TYPE (arg
)) && !vr
.undefined_p ())
2426 irange
&r
= as_a
<irange
> (vr
);
2427 irange_bitmask bm
= r
.get_bitmask ();
2428 signop sign
= TYPE_SIGN (TREE_TYPE (arg
));
2429 ipa_set_jfunc_bits (jfunc
,
2430 widest_int::from (bm
.value (), sign
),
2431 widest_int::from (bm
.mask (), sign
));
2433 else if (POINTER_TYPE_P (TREE_TYPE (arg
)))
2435 unsigned HOST_WIDE_INT bitpos
;
2438 get_pointer_alignment_1 (arg
, &align
, &bitpos
);
2439 widest_int mask
= wi::bit_and_not
2440 (wi::mask
<widest_int
> (TYPE_PRECISION (TREE_TYPE (arg
)), false),
2441 align
/ BITS_PER_UNIT
- 1);
2442 widest_int value
= bitpos
/ BITS_PER_UNIT
;
2443 ipa_set_jfunc_bits (jfunc
, value
, mask
);
2446 gcc_assert (!jfunc
->bits
);
2448 if (is_gimple_ip_invariant (arg
)
2450 && is_global_var (arg
)
2451 && TREE_READONLY (arg
)))
2452 ipa_set_jf_constant (jfunc
, arg
, cs
);
2453 else if (!is_gimple_reg_type (TREE_TYPE (arg
))
2454 && TREE_CODE (arg
) == PARM_DECL
)
2456 int index
= ipa_get_param_decl_index (info
, arg
);
2458 gcc_assert (index
>=0);
2459 /* Aggregate passed by value, check for pass-through, otherwise we
2460 will attempt to fill in aggregate contents later in this
2462 if (parm_preserved_before_stmt_p (fbi
, index
, call
, arg
))
2464 ipa_set_jf_simple_pass_through (jfunc
, index
, false);
2468 else if (TREE_CODE (arg
) == SSA_NAME
)
2470 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
2472 int index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (arg
));
2476 agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
, arg
);
2477 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
2482 gimple
*stmt
= SSA_NAME_DEF_STMT (arg
);
2483 if (is_gimple_assign (stmt
))
2484 compute_complex_assign_jump_func (fbi
, info
, jfunc
,
2485 call
, stmt
, arg
, param_type
);
2486 else if (gimple_code (stmt
) == GIMPLE_PHI
)
2487 compute_complex_ancestor_jump_func (fbi
, info
, jfunc
,
2489 as_a
<gphi
*> (stmt
));
2493 /* If ARG is pointer, we cannot use its type to determine the type of aggregate
2494 passed (because type conversions are ignored in gimple). Usually we can
2495 safely get type from function declaration, but in case of K&R prototypes or
2496 variadic functions we can try our luck with type of the pointer passed.
2497 TODO: Since we look for actual initialization of the memory object, we may better
2498 work out the type based on the memory stores we find. */
2500 param_type
= TREE_TYPE (arg
);
2502 if ((jfunc
->type
!= IPA_JF_PASS_THROUGH
2503 || !ipa_get_jf_pass_through_agg_preserved (jfunc
))
2504 && (jfunc
->type
!= IPA_JF_ANCESTOR
2505 || !ipa_get_jf_ancestor_agg_preserved (jfunc
))
2506 && (AGGREGATE_TYPE_P (TREE_TYPE (arg
))
2507 || POINTER_TYPE_P (param_type
)))
2508 determine_known_aggregate_parts (fbi
, call
, arg
, param_type
, jfunc
);
2510 if (!useful_context
)
2511 vec_free (args
->polymorphic_call_contexts
);
2514 /* Compute jump functions for all edges - both direct and indirect - outgoing
2518 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info
*fbi
, basic_block bb
)
2520 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
2522 struct cgraph_edge
*cs
;
2524 FOR_EACH_VEC_ELT_REVERSE (bi
->cg_edges
, i
, cs
)
2526 struct cgraph_node
*callee
= cs
->callee
;
2530 callee
= callee
->ultimate_alias_target ();
2531 /* We do not need to bother analyzing calls to unknown functions
2532 unless they may become known during lto/whopr. */
2533 if (!callee
->definition
&& !flag_lto
2534 && !gimple_call_fnspec (cs
->call_stmt
).known_p ())
2537 ipa_compute_jump_functions_for_edge (fbi
, cs
);
2541 /* If STMT looks like a statement loading a value from a member pointer formal
2542 parameter, return that parameter and store the offset of the field to
2543 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2544 might be clobbered). If USE_DELTA, then we look for a use of the delta
2545 field rather than the pfn. */
2548 ipa_get_stmt_member_ptr_load_param (gimple
*stmt
, bool use_delta
,
2549 HOST_WIDE_INT
*offset_p
)
2551 tree rhs
, rec
, ref_field
, ref_offset
, fld
, ptr_field
, delta_field
;
2553 if (!gimple_assign_single_p (stmt
))
2556 rhs
= gimple_assign_rhs1 (stmt
);
2557 if (TREE_CODE (rhs
) == COMPONENT_REF
)
2559 ref_field
= TREE_OPERAND (rhs
, 1);
2560 rhs
= TREE_OPERAND (rhs
, 0);
2563 ref_field
= NULL_TREE
;
2564 if (TREE_CODE (rhs
) != MEM_REF
)
2566 rec
= TREE_OPERAND (rhs
, 0);
2567 if (TREE_CODE (rec
) != ADDR_EXPR
)
2569 rec
= TREE_OPERAND (rec
, 0);
2570 if (TREE_CODE (rec
) != PARM_DECL
2571 || !type_like_member_ptr_p (TREE_TYPE (rec
), &ptr_field
, &delta_field
))
2573 ref_offset
= TREE_OPERAND (rhs
, 1);
2580 *offset_p
= int_bit_position (fld
);
2584 if (integer_nonzerop (ref_offset
))
2586 return ref_field
== fld
? rec
: NULL_TREE
;
2589 return tree_int_cst_equal (byte_position (fld
), ref_offset
) ? rec
2593 /* Returns true iff T is an SSA_NAME defined by a statement. */
2596 ipa_is_ssa_with_stmt_def (tree t
)
2598 if (TREE_CODE (t
) == SSA_NAME
2599 && !SSA_NAME_IS_DEFAULT_DEF (t
))
2605 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2606 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2607 indirect call graph edge.
2608 If POLYMORPHIC is true record is as a destination of polymorphic call. */
2610 static struct cgraph_edge
*
2611 ipa_note_param_call (struct cgraph_node
*node
, int param_index
,
2612 gcall
*stmt
, bool polymorphic
)
2614 struct cgraph_edge
*cs
;
2616 cs
= node
->get_edge (stmt
);
2617 cs
->indirect_info
->param_index
= param_index
;
2618 cs
->indirect_info
->agg_contents
= 0;
2619 cs
->indirect_info
->member_ptr
= 0;
2620 cs
->indirect_info
->guaranteed_unmodified
= 0;
2621 ipa_node_params
*info
= ipa_node_params_sum
->get (node
);
2622 ipa_set_param_used_by_indirect_call (info
, param_index
, true);
2623 if (cs
->indirect_info
->polymorphic
|| polymorphic
)
2624 ipa_set_param_used_by_polymorphic_call (info
, param_index
, true);
2628 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2629 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2630 intermediate information about each formal parameter. Currently it checks
2631 whether the call calls a pointer that is a formal parameter and if so, the
2632 parameter is marked with the called flag and an indirect call graph edge
2633 describing the call is created. This is very simple for ordinary pointers
2634 represented in SSA but not-so-nice when it comes to member pointers. The
2635 ugly part of this function does nothing more than trying to match the
2636 pattern of such a call. An example of such a pattern is the gimple dump
2637 below, the call is on the last line:
2640 f$__delta_5 = f.__delta;
2641 f$__pfn_24 = f.__pfn;
2645 f$__delta_5 = MEM[(struct *)&f];
2646 f$__pfn_24 = MEM[(struct *)&f + 4B];
2648 and a few lines below:
2651 D.2496_3 = (int) f$__pfn_24;
2652 D.2497_4 = D.2496_3 & 1;
2659 D.2500_7 = (unsigned int) f$__delta_5;
2660 D.2501_8 = &S + D.2500_7;
2661 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2662 D.2503_10 = *D.2502_9;
2663 D.2504_12 = f$__pfn_24 + -1;
2664 D.2505_13 = (unsigned int) D.2504_12;
2665 D.2506_14 = D.2503_10 + D.2505_13;
2666 D.2507_15 = *D.2506_14;
2667 iftmp.11_16 = (String:: *) D.2507_15;
2670 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2671 D.2500_19 = (unsigned int) f$__delta_5;
2672 D.2508_20 = &S + D.2500_19;
2673 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2675 Such patterns are results of simple calls to a member pointer:
2677 int doprinting (int (MyString::* f)(int) const)
2679 MyString S ("somestring");
2684 Moreover, the function also looks for called pointers loaded from aggregates
2685 passed by value or reference. */
2688 ipa_analyze_indirect_call_uses (struct ipa_func_body_info
*fbi
, gcall
*call
,
2691 class ipa_node_params
*info
= fbi
->info
;
2692 HOST_WIDE_INT offset
;
2695 if (SSA_NAME_IS_DEFAULT_DEF (target
))
2697 tree var
= SSA_NAME_VAR (target
);
2698 int index
= ipa_get_param_decl_index (info
, var
);
2700 ipa_note_param_call (fbi
->node
, index
, call
, false);
2705 gimple
*def
= SSA_NAME_DEF_STMT (target
);
2706 bool guaranteed_unmodified
;
2707 if (gimple_assign_single_p (def
)
2708 && ipa_load_from_parm_agg (fbi
, info
->descriptors
, def
,
2709 gimple_assign_rhs1 (def
), &index
, &offset
,
2710 NULL
, &by_ref
, &guaranteed_unmodified
))
2712 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
,
2714 cs
->indirect_info
->offset
= offset
;
2715 cs
->indirect_info
->agg_contents
= 1;
2716 cs
->indirect_info
->by_ref
= by_ref
;
2717 cs
->indirect_info
->guaranteed_unmodified
= guaranteed_unmodified
;
2721 /* Now we need to try to match the complex pattern of calling a member
2723 if (gimple_code (def
) != GIMPLE_PHI
2724 || gimple_phi_num_args (def
) != 2
2725 || !POINTER_TYPE_P (TREE_TYPE (target
))
2726 || TREE_CODE (TREE_TYPE (TREE_TYPE (target
))) != METHOD_TYPE
)
2729 /* First, we need to check whether one of these is a load from a member
2730 pointer that is a parameter to this function. */
2731 tree n1
= PHI_ARG_DEF (def
, 0);
2732 tree n2
= PHI_ARG_DEF (def
, 1);
2733 if (!ipa_is_ssa_with_stmt_def (n1
) || !ipa_is_ssa_with_stmt_def (n2
))
2735 gimple
*d1
= SSA_NAME_DEF_STMT (n1
);
2736 gimple
*d2
= SSA_NAME_DEF_STMT (n2
);
2739 basic_block bb
, virt_bb
;
2740 basic_block join
= gimple_bb (def
);
2741 if ((rec
= ipa_get_stmt_member_ptr_load_param (d1
, false, &offset
)))
2743 if (ipa_get_stmt_member_ptr_load_param (d2
, false, NULL
))
2746 bb
= EDGE_PRED (join
, 0)->src
;
2747 virt_bb
= gimple_bb (d2
);
2749 else if ((rec
= ipa_get_stmt_member_ptr_load_param (d2
, false, &offset
)))
2751 bb
= EDGE_PRED (join
, 1)->src
;
2752 virt_bb
= gimple_bb (d1
);
2757 /* Second, we need to check that the basic blocks are laid out in the way
2758 corresponding to the pattern. */
2760 if (!single_pred_p (virt_bb
) || !single_succ_p (virt_bb
)
2761 || single_pred (virt_bb
) != bb
2762 || single_succ (virt_bb
) != join
)
2765 /* Third, let's see that the branching is done depending on the least
2766 significant bit of the pfn. */
2768 gcond
*branch
= safe_dyn_cast
<gcond
*> (*gsi_last_bb (bb
));
2772 if ((gimple_cond_code (branch
) != NE_EXPR
2773 && gimple_cond_code (branch
) != EQ_EXPR
)
2774 || !integer_zerop (gimple_cond_rhs (branch
)))
2777 tree cond
= gimple_cond_lhs (branch
);
2778 if (!ipa_is_ssa_with_stmt_def (cond
))
2781 def
= SSA_NAME_DEF_STMT (cond
);
2782 if (!is_gimple_assign (def
)
2783 || gimple_assign_rhs_code (def
) != BIT_AND_EXPR
2784 || !integer_onep (gimple_assign_rhs2 (def
)))
2787 cond
= gimple_assign_rhs1 (def
);
2788 if (!ipa_is_ssa_with_stmt_def (cond
))
2791 def
= SSA_NAME_DEF_STMT (cond
);
2793 if (is_gimple_assign (def
)
2794 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
2796 cond
= gimple_assign_rhs1 (def
);
2797 if (!ipa_is_ssa_with_stmt_def (cond
))
2799 def
= SSA_NAME_DEF_STMT (cond
);
2803 rec2
= ipa_get_stmt_member_ptr_load_param (def
,
2804 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2805 == ptrmemfunc_vbit_in_delta
),
2810 index
= ipa_get_param_decl_index (info
, rec
);
2812 && parm_preserved_before_stmt_p (fbi
, index
, call
, rec
))
2814 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
,
2816 cs
->indirect_info
->offset
= offset
;
2817 cs
->indirect_info
->agg_contents
= 1;
2818 cs
->indirect_info
->member_ptr
= 1;
2819 cs
->indirect_info
->guaranteed_unmodified
= 1;
2825 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2826 object referenced in the expression is a formal parameter of the caller
2827 FBI->node (described by FBI->info), create a call note for the
2831 ipa_analyze_virtual_call_uses (struct ipa_func_body_info
*fbi
,
2832 gcall
*call
, tree target
)
2834 tree obj
= OBJ_TYPE_REF_OBJECT (target
);
2836 HOST_WIDE_INT anc_offset
;
2838 if (!flag_devirtualize
)
2841 if (TREE_CODE (obj
) != SSA_NAME
)
2844 class ipa_node_params
*info
= fbi
->info
;
2845 if (SSA_NAME_IS_DEFAULT_DEF (obj
))
2847 if (TREE_CODE (SSA_NAME_VAR (obj
)) != PARM_DECL
)
2851 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (obj
));
2852 gcc_assert (index
>= 0);
2853 if (detect_type_change_ssa (fbi
, obj
, obj_type_ref_class (target
),
2859 gimple
*stmt
= SSA_NAME_DEF_STMT (obj
);
2862 expr
= get_ancestor_addr_info (stmt
, &obj
, &anc_offset
);
2865 index
= ipa_get_param_decl_index (info
,
2866 SSA_NAME_VAR (TREE_OPERAND (expr
, 0)));
2867 gcc_assert (index
>= 0);
2868 if (detect_type_change (fbi
, obj
, expr
, obj_type_ref_class (target
),
2873 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
,
2875 class cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
2876 ii
->offset
= anc_offset
;
2877 ii
->otr_token
= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target
));
2878 ii
->otr_type
= obj_type_ref_class (target
);
2879 ii
->polymorphic
= 1;
2882 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2883 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2884 containing intermediate information about each formal parameter. */
2887 ipa_analyze_call_uses (struct ipa_func_body_info
*fbi
, gcall
*call
)
2889 tree target
= gimple_call_fn (call
);
2892 || (TREE_CODE (target
) != SSA_NAME
2893 && !virtual_method_call_p (target
)))
2896 struct cgraph_edge
*cs
= fbi
->node
->get_edge (call
);
2897 /* If we previously turned the call into a direct call, there is
2898 no need to analyze. */
2899 if (cs
&& !cs
->indirect_unknown_callee
)
2902 if (cs
->indirect_info
->polymorphic
&& flag_devirtualize
)
2905 tree target
= gimple_call_fn (call
);
2906 ipa_polymorphic_call_context
context (current_function_decl
,
2907 target
, call
, &instance
);
2909 gcc_checking_assert (cs
->indirect_info
->otr_type
2910 == obj_type_ref_class (target
));
2911 gcc_checking_assert (cs
->indirect_info
->otr_token
2912 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target
)));
2914 cs
->indirect_info
->vptr_changed
2915 = !context
.get_dynamic_type (instance
,
2916 OBJ_TYPE_REF_OBJECT (target
),
2917 obj_type_ref_class (target
), call
,
2918 &fbi
->aa_walk_budget
);
2919 cs
->indirect_info
->context
= context
;
2922 if (TREE_CODE (target
) == SSA_NAME
)
2923 ipa_analyze_indirect_call_uses (fbi
, call
, target
);
2924 else if (virtual_method_call_p (target
))
2925 ipa_analyze_virtual_call_uses (fbi
, call
, target
);
2929 /* Analyze the call statement STMT with respect to formal parameters (described
2930 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2931 formal parameters are called. */
2934 ipa_analyze_stmt_uses (struct ipa_func_body_info
*fbi
, gimple
*stmt
)
2936 if (is_gimple_call (stmt
))
2937 ipa_analyze_call_uses (fbi
, as_a
<gcall
*> (stmt
));
2940 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2941 If OP is a parameter declaration, mark it as used in the info structure
2945 visit_ref_for_mod_analysis (gimple
*, tree op
, tree
, void *data
)
2947 class ipa_node_params
*info
= (class ipa_node_params
*) data
;
2949 op
= get_base_address (op
);
2951 && TREE_CODE (op
) == PARM_DECL
)
2953 int index
= ipa_get_param_decl_index (info
, op
);
2954 gcc_assert (index
>= 0);
2955 ipa_set_param_used (info
, index
, true);
2961 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2962 the findings in various structures of the associated ipa_node_params
2963 structure, such as parameter flags, notes etc. FBI holds various data about
2964 the function being analyzed. */
2967 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info
*fbi
, basic_block bb
)
2969 gimple_stmt_iterator gsi
;
2970 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2972 gimple
*stmt
= gsi_stmt (gsi
);
2974 if (is_gimple_debug (stmt
))
2977 ipa_analyze_stmt_uses (fbi
, stmt
);
2978 walk_stmt_load_store_addr_ops (stmt
, fbi
->info
,
2979 visit_ref_for_mod_analysis
,
2980 visit_ref_for_mod_analysis
,
2981 visit_ref_for_mod_analysis
);
2983 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2984 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), fbi
->info
,
2985 visit_ref_for_mod_analysis
,
2986 visit_ref_for_mod_analysis
,
2987 visit_ref_for_mod_analysis
);
2990 /* Return true EXPR is a load from a dereference of SSA_NAME NAME. */
2993 load_from_dereferenced_name (tree expr
, tree name
)
2995 tree base
= get_base_address (expr
);
2996 return (TREE_CODE (base
) == MEM_REF
2997 && TREE_OPERAND (base
, 0) == name
);
3000 /* Calculate controlled uses of parameters of NODE. */
3003 ipa_analyze_controlled_uses (struct cgraph_node
*node
)
3005 ipa_node_params
*info
= ipa_node_params_sum
->get (node
);
3007 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
3009 tree parm
= ipa_get_param (info
, i
);
3011 bool load_dereferenced
= false;
3013 /* For SSA regs see if parameter is used. For non-SSA we compute
3014 the flag during modification analysis. */
3015 if (is_gimple_reg (parm
))
3017 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
),
3019 if (ddef
&& !has_zero_uses (ddef
))
3021 imm_use_iterator imm_iter
;
3024 ipa_set_param_used (info
, i
, true);
3025 FOR_EACH_IMM_USE_STMT (stmt
, imm_iter
, ddef
)
3027 if (is_gimple_debug (stmt
))
3030 int all_stmt_uses
= 0;
3031 use_operand_p use_p
;
3032 FOR_EACH_IMM_USE_ON_STMT (use_p
, imm_iter
)
3035 if (is_gimple_call (stmt
))
3037 if (gimple_call_internal_p (stmt
))
3039 call_uses
= IPA_UNDESCRIBED_USE
;
3042 int recognized_stmt_uses
;
3043 if (gimple_call_fn (stmt
) == ddef
)
3044 recognized_stmt_uses
= 1;
3046 recognized_stmt_uses
= 0;
3047 unsigned arg_count
= gimple_call_num_args (stmt
);
3048 for (unsigned i
= 0; i
< arg_count
; i
++)
3050 tree arg
= gimple_call_arg (stmt
, i
);
3052 recognized_stmt_uses
++;
3053 else if (load_from_dereferenced_name (arg
, ddef
))
3055 load_dereferenced
= true;
3056 recognized_stmt_uses
++;
3060 if (recognized_stmt_uses
!= all_stmt_uses
)
3062 call_uses
= IPA_UNDESCRIBED_USE
;
3066 call_uses
+= all_stmt_uses
;
3068 else if (gimple_assign_single_p (stmt
))
3070 tree rhs
= gimple_assign_rhs1 (stmt
);
3071 if (all_stmt_uses
!= 1
3072 || !load_from_dereferenced_name (rhs
, ddef
))
3074 call_uses
= IPA_UNDESCRIBED_USE
;
3077 load_dereferenced
= true;
3081 call_uses
= IPA_UNDESCRIBED_USE
;
3090 call_uses
= IPA_UNDESCRIBED_USE
;
3091 ipa_set_controlled_uses (info
, i
, call_uses
);
3092 ipa_set_param_load_dereferenced (info
, i
, load_dereferenced
);
3096 /* Free stuff in BI. */
3099 free_ipa_bb_info (struct ipa_bb_info
*bi
)
3101 bi
->cg_edges
.release ();
3102 bi
->param_aa_statuses
.release ();
3105 /* Dominator walker driving the analysis. */
3107 class analysis_dom_walker
: public dom_walker
3110 analysis_dom_walker (struct ipa_func_body_info
*fbi
)
3111 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
) {}
3113 edge
before_dom_children (basic_block
) final override
;
3116 struct ipa_func_body_info
*m_fbi
;
3120 analysis_dom_walker::before_dom_children (basic_block bb
)
3122 ipa_analyze_params_uses_in_bb (m_fbi
, bb
);
3123 ipa_compute_jump_functions_for_bb (m_fbi
, bb
);
3127 /* Release body info FBI. */
3130 ipa_release_body_info (struct ipa_func_body_info
*fbi
)
3133 struct ipa_bb_info
*bi
;
3135 FOR_EACH_VEC_ELT (fbi
->bb_infos
, i
, bi
)
3136 free_ipa_bb_info (bi
);
3137 fbi
->bb_infos
.release ();
3140 /* Initialize the array describing properties of formal parameters
3141 of NODE, analyze their uses and compute jump functions associated
3142 with actual arguments of calls from within NODE. */
3145 ipa_analyze_node (struct cgraph_node
*node
)
3147 struct ipa_func_body_info fbi
;
3148 class ipa_node_params
*info
;
3150 ipa_check_create_node_params ();
3151 ipa_check_create_edge_args ();
3152 info
= ipa_node_params_sum
->get_create (node
);
3154 if (info
->analysis_done
)
3156 info
->analysis_done
= 1;
3158 if (ipa_func_spec_opts_forbid_analysis_p (node
)
3159 || (count_formal_params (node
->decl
)
3160 >= (1 << IPA_PROP_ARG_INDEX_LIMIT_BITS
)))
3162 gcc_assert (!ipa_get_param_count (info
));
3166 struct function
*func
= DECL_STRUCT_FUNCTION (node
->decl
);
3168 calculate_dominance_info (CDI_DOMINATORS
);
3169 ipa_initialize_node_params (node
);
3170 ipa_analyze_controlled_uses (node
);
3174 fbi
.bb_infos
= vNULL
;
3175 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
), true);
3176 fbi
.param_count
= ipa_get_param_count (info
);
3177 fbi
.aa_walk_budget
= opt_for_fn (node
->decl
, param_ipa_max_aa_steps
);
3179 for (struct cgraph_edge
*cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
3181 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
3182 bi
->cg_edges
.safe_push (cs
);
3185 for (struct cgraph_edge
*cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
3187 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
3188 bi
->cg_edges
.safe_push (cs
);
3191 enable_ranger (cfun
, false);
3192 analysis_dom_walker (&fbi
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
3193 disable_ranger (cfun
);
3195 ipa_release_body_info (&fbi
);
3196 free_dominance_info (CDI_DOMINATORS
);
3200 /* Update the jump functions associated with call graph edge E when the call
3201 graph edge CS is being inlined, assuming that E->caller is already (possibly
3202 indirectly) inlined into CS->callee and that E has not been inlined. */
3205 update_jump_functions_after_inlining (struct cgraph_edge
*cs
,
3206 struct cgraph_edge
*e
)
3208 ipa_edge_args
*top
= ipa_edge_args_sum
->get (cs
);
3209 ipa_edge_args
*args
= ipa_edge_args_sum
->get (e
);
3212 int count
= ipa_get_cs_argument_count (args
);
3215 for (i
= 0; i
< count
; i
++)
3217 struct ipa_jump_func
*dst
= ipa_get_ith_jump_func (args
, i
);
3218 class ipa_polymorphic_call_context
*dst_ctx
3219 = ipa_get_ith_polymorhic_call_context (args
, i
);
3223 struct ipa_agg_jf_item
*item
;
3226 FOR_EACH_VEC_ELT (*dst
->agg
.items
, j
, item
)
3229 struct ipa_jump_func
*src
;
3231 if (item
->jftype
!= IPA_JF_PASS_THROUGH
3232 && item
->jftype
!= IPA_JF_LOAD_AGG
)
3235 dst_fid
= item
->value
.pass_through
.formal_id
;
3236 if (!top
|| dst_fid
>= ipa_get_cs_argument_count (top
))
3238 item
->jftype
= IPA_JF_UNKNOWN
;
3242 item
->value
.pass_through
.formal_id
= -1;
3243 src
= ipa_get_ith_jump_func (top
, dst_fid
);
3244 if (src
->type
== IPA_JF_CONST
)
3246 if (item
->jftype
== IPA_JF_PASS_THROUGH
3247 && item
->value
.pass_through
.operation
== NOP_EXPR
)
3249 item
->jftype
= IPA_JF_CONST
;
3250 item
->value
.constant
= src
->value
.constant
.value
;
3254 else if (src
->type
== IPA_JF_PASS_THROUGH
3255 && src
->value
.pass_through
.operation
== NOP_EXPR
)
3257 if (item
->jftype
== IPA_JF_PASS_THROUGH
3258 || !item
->value
.load_agg
.by_ref
3259 || src
->value
.pass_through
.agg_preserved
)
3260 item
->value
.pass_through
.formal_id
3261 = src
->value
.pass_through
.formal_id
;
3263 else if (src
->type
== IPA_JF_ANCESTOR
)
3265 if (item
->jftype
== IPA_JF_PASS_THROUGH
)
3267 if (!src
->value
.ancestor
.offset
)
3268 item
->value
.pass_through
.formal_id
3269 = src
->value
.ancestor
.formal_id
;
3271 else if (src
->value
.ancestor
.agg_preserved
)
3273 gcc_checking_assert (item
->value
.load_agg
.by_ref
);
3275 item
->value
.pass_through
.formal_id
3276 = src
->value
.ancestor
.formal_id
;
3277 item
->value
.load_agg
.offset
3278 += src
->value
.ancestor
.offset
;
3282 if (item
->value
.pass_through
.formal_id
< 0)
3283 item
->jftype
= IPA_JF_UNKNOWN
;
3289 ipa_set_jf_unknown (dst
);
3293 if (dst
->type
== IPA_JF_ANCESTOR
)
3295 struct ipa_jump_func
*src
;
3296 int dst_fid
= dst
->value
.ancestor
.formal_id
;
3297 class ipa_polymorphic_call_context
*src_ctx
3298 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
3300 /* Variable number of arguments can cause havoc if we try to access
3301 one that does not exist in the inlined edge. So make sure we
3303 if (dst_fid
>= ipa_get_cs_argument_count (top
))
3305 ipa_set_jf_unknown (dst
);
3309 src
= ipa_get_ith_jump_func (top
, dst_fid
);
3311 if (src_ctx
&& !src_ctx
->useless_p ())
3313 class ipa_polymorphic_call_context ctx
= *src_ctx
;
3315 /* TODO: Make type preserved safe WRT contexts. */
3316 if (!ipa_get_jf_ancestor_type_preserved (dst
))
3317 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
3318 ctx
.offset_by (dst
->value
.ancestor
.offset
);
3319 if (!ctx
.useless_p ())
3323 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
3325 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
3328 dst_ctx
->combine_with (ctx
);
3332 /* Parameter and argument in ancestor jump function must be pointer
3333 type, which means access to aggregate must be by-reference. */
3334 gcc_assert (!src
->agg
.items
|| src
->agg
.by_ref
);
3336 if (src
->agg
.items
&& dst
->value
.ancestor
.agg_preserved
)
3338 struct ipa_agg_jf_item
*item
;
3341 /* Currently we do not produce clobber aggregate jump functions,
3342 replace with merging when we do. */
3343 gcc_assert (!dst
->agg
.items
);
3345 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
3346 dst
->agg
.by_ref
= src
->agg
.by_ref
;
3347 FOR_EACH_VEC_SAFE_ELT (dst
->agg
.items
, j
, item
)
3348 item
->offset
-= dst
->value
.ancestor
.offset
;
3351 if (src
->type
== IPA_JF_PASS_THROUGH
3352 && src
->value
.pass_through
.operation
== NOP_EXPR
)
3354 dst
->value
.ancestor
.formal_id
= src
->value
.pass_through
.formal_id
;
3355 dst
->value
.ancestor
.agg_preserved
&=
3356 src
->value
.pass_through
.agg_preserved
;
3358 else if (src
->type
== IPA_JF_ANCESTOR
)
3360 dst
->value
.ancestor
.formal_id
= src
->value
.ancestor
.formal_id
;
3361 dst
->value
.ancestor
.offset
+= src
->value
.ancestor
.offset
;
3362 dst
->value
.ancestor
.agg_preserved
&=
3363 src
->value
.ancestor
.agg_preserved
;
3364 dst
->value
.ancestor
.keep_null
|= src
->value
.ancestor
.keep_null
;
3367 ipa_set_jf_unknown (dst
);
3369 else if (dst
->type
== IPA_JF_PASS_THROUGH
)
3371 struct ipa_jump_func
*src
;
3372 /* We must check range due to calls with variable number of arguments
3373 and we cannot combine jump functions with operations. */
3374 if (dst
->value
.pass_through
.operation
== NOP_EXPR
3375 && (top
&& dst
->value
.pass_through
.formal_id
3376 < ipa_get_cs_argument_count (top
)))
3378 int dst_fid
= dst
->value
.pass_through
.formal_id
;
3379 src
= ipa_get_ith_jump_func (top
, dst_fid
);
3380 bool dst_agg_p
= ipa_get_jf_pass_through_agg_preserved (dst
);
3381 class ipa_polymorphic_call_context
*src_ctx
3382 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
3384 if (src_ctx
&& !src_ctx
->useless_p ())
3386 class ipa_polymorphic_call_context ctx
= *src_ctx
;
3388 /* TODO: Make type preserved safe WRT contexts. */
3389 if (!ipa_get_jf_pass_through_type_preserved (dst
))
3390 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
3391 if (!ctx
.useless_p ())
3395 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
3397 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
3399 dst_ctx
->combine_with (ctx
);
3404 case IPA_JF_UNKNOWN
:
3405 ipa_set_jf_unknown (dst
);
3409 bool rd
= ipa_get_jf_pass_through_refdesc_decremented (dst
);
3410 ipa_set_jf_cst_copy (dst
, src
);
3412 ipa_zap_jf_refdesc (dst
);
3417 case IPA_JF_PASS_THROUGH
:
3419 int formal_id
= ipa_get_jf_pass_through_formal_id (src
);
3420 enum tree_code operation
;
3421 operation
= ipa_get_jf_pass_through_operation (src
);
3423 if (operation
== NOP_EXPR
)
3427 && ipa_get_jf_pass_through_agg_preserved (src
);
3428 ipa_set_jf_simple_pass_through (dst
, formal_id
, agg_p
);
3430 else if (TREE_CODE_CLASS (operation
) == tcc_unary
)
3431 ipa_set_jf_unary_pass_through (dst
, formal_id
, operation
);
3434 tree operand
= ipa_get_jf_pass_through_operand (src
);
3435 ipa_set_jf_arith_pass_through (dst
, formal_id
, operand
,
3440 case IPA_JF_ANCESTOR
:
3444 && ipa_get_jf_ancestor_agg_preserved (src
);
3445 ipa_set_ancestor_jf (dst
,
3446 ipa_get_jf_ancestor_offset (src
),
3447 ipa_get_jf_ancestor_formal_id (src
),
3449 ipa_get_jf_ancestor_keep_null (src
));
3457 && (dst_agg_p
|| !src
->agg
.by_ref
))
3459 /* Currently we do not produce clobber aggregate jump
3460 functions, replace with merging when we do. */
3461 gcc_assert (!dst
->agg
.items
);
3463 dst
->agg
.by_ref
= src
->agg
.by_ref
;
3464 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
3468 ipa_set_jf_unknown (dst
);
3473 /* If TARGET is an addr_expr of a function declaration, make it the
3474 (SPECULATIVE)destination of an indirect edge IE and return the edge.
3475 Otherwise, return NULL. */
3477 struct cgraph_edge
*
3478 ipa_make_edge_direct_to_target (struct cgraph_edge
*ie
, tree target
,
3481 struct cgraph_node
*callee
;
3482 bool unreachable
= false;
3484 if (TREE_CODE (target
) == ADDR_EXPR
)
3485 target
= TREE_OPERAND (target
, 0);
3486 if (TREE_CODE (target
) != FUNCTION_DECL
)
3488 target
= canonicalize_constructor_val (target
, NULL
);
3489 if (!target
|| TREE_CODE (target
) != FUNCTION_DECL
)
3491 /* Member pointer call that goes through a VMT lookup. */
3492 if (ie
->indirect_info
->member_ptr
3493 /* Or if target is not an invariant expression and we do not
3494 know if it will evaulate to function at runtime.
3495 This can happen when folding through &VAR, where &VAR
3496 is IP invariant, but VAR itself is not.
3498 TODO: Revisit this when GCC 5 is branched. It seems that
3499 member_ptr check is not needed and that we may try to fold
3500 the expression and see if VAR is readonly. */
3501 || !is_gimple_ip_invariant (target
))
3503 if (dump_enabled_p ())
3505 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, ie
->call_stmt
,
3506 "discovered direct call non-invariant %s\n",
3507 ie
->caller
->dump_name ());
3513 if (dump_enabled_p ())
3515 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, ie
->call_stmt
,
3516 "discovered direct call to non-function in %s, "
3517 "making it __builtin_unreachable\n",
3518 ie
->caller
->dump_name ());
3521 target
= builtin_decl_unreachable ();
3522 callee
= cgraph_node::get_create (target
);
3526 callee
= cgraph_node::get (target
);
3529 callee
= cgraph_node::get (target
);
3531 /* Because may-edges are not explicitely represented and vtable may be external,
3532 we may create the first reference to the object in the unit. */
3533 if (!callee
|| callee
->inlined_to
)
3536 /* We are better to ensure we can refer to it.
3537 In the case of static functions we are out of luck, since we already
3538 removed its body. In the case of public functions we may or may
3539 not introduce the reference. */
3540 if (!canonicalize_constructor_val (target
, NULL
)
3541 || !TREE_PUBLIC (target
))
3544 fprintf (dump_file
, "ipa-prop: Discovered call to a known target "
3545 "(%s -> %s) but cannot refer to it. Giving up.\n",
3546 ie
->caller
->dump_name (),
3547 ie
->callee
->dump_name ());
3550 callee
= cgraph_node::get_create (target
);
3553 /* If the edge is already speculated. */
3554 if (speculative
&& ie
->speculative
)
3558 cgraph_edge
*e2
= ie
->speculative_call_for_target (callee
);
3562 fprintf (dump_file
, "ipa-prop: Discovered call to a "
3563 "speculative target (%s -> %s) but the call is "
3564 "already speculated to different target. "
3566 ie
->caller
->dump_name (), callee
->dump_name ());
3572 "ipa-prop: Discovered call to a speculative target "
3573 "(%s -> %s) this agree with previous speculation.\n",
3574 ie
->caller
->dump_name (), callee
->dump_name ());
3580 if (!dbg_cnt (devirt
))
3583 ipa_check_create_node_params ();
3585 /* We cannot make edges to inline clones. It is bug that someone removed
3586 the cgraph node too early. */
3587 gcc_assert (!callee
->inlined_to
);
3589 if (dump_file
&& !unreachable
)
3591 fprintf (dump_file
, "ipa-prop: Discovered %s call to a %s target "
3592 "(%s -> %s), for stmt ",
3593 ie
->indirect_info
->polymorphic
? "a virtual" : "an indirect",
3594 speculative
? "speculative" : "known",
3595 ie
->caller
->dump_name (),
3596 callee
->dump_name ());
3598 print_gimple_stmt (dump_file
, ie
->call_stmt
, 2, TDF_SLIM
);
3600 fprintf (dump_file
, "with uid %i\n", ie
->lto_stmt_uid
);
3602 if (dump_enabled_p ())
3604 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, ie
->call_stmt
,
3605 "converting indirect call in %s to direct call to %s\n",
3606 ie
->caller
->dump_name (), callee
->dump_name ());
3610 struct cgraph_edge
*orig
= ie
;
3611 ie
= cgraph_edge::make_direct (ie
, callee
);
3612 /* If we resolved speculative edge the cost is already up to date
3613 for direct call (adjusted by inline_edge_duplication_hook). */
3616 ipa_call_summary
*es
= ipa_call_summaries
->get (ie
);
3617 es
->call_stmt_size
-= (eni_size_weights
.indirect_call_cost
3618 - eni_size_weights
.call_cost
);
3619 es
->call_stmt_time
-= (eni_time_weights
.indirect_call_cost
3620 - eni_time_weights
.call_cost
);
3625 if (!callee
->can_be_discarded_p ())
3628 alias
= dyn_cast
<cgraph_node
*> (callee
->noninterposable_alias ());
3632 /* make_speculative will update ie's cost to direct call cost. */
3633 ie
= ie
->make_speculative
3634 (callee
, ie
->count
.apply_scale (8, 10));
3640 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3641 CONSTRUCTOR and return it. Return NULL if the search fails for some
3645 find_constructor_constant_at_offset (tree constructor
, HOST_WIDE_INT req_offset
)
3647 tree type
= TREE_TYPE (constructor
);
3648 if (TREE_CODE (type
) != ARRAY_TYPE
3649 && TREE_CODE (type
) != RECORD_TYPE
)
3654 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor
), ix
, index
, val
)
3656 HOST_WIDE_INT elt_offset
;
3657 if (TREE_CODE (type
) == ARRAY_TYPE
)
3660 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (type
));
3661 gcc_assert (TREE_CODE (unit_size
) == INTEGER_CST
);
3665 if (TREE_CODE (index
) == RANGE_EXPR
)
3666 off
= wi::to_offset (TREE_OPERAND (index
, 0));
3668 off
= wi::to_offset (index
);
3669 if (TYPE_DOMAIN (type
) && TYPE_MIN_VALUE (TYPE_DOMAIN (type
)))
3671 tree low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
3672 gcc_assert (TREE_CODE (unit_size
) == INTEGER_CST
);
3673 off
= wi::sext (off
- wi::to_offset (low_bound
),
3674 TYPE_PRECISION (TREE_TYPE (index
)));
3676 off
*= wi::to_offset (unit_size
);
3677 /* ??? Handle more than just the first index of a
3681 off
= wi::to_offset (unit_size
) * ix
;
3683 off
= wi::lshift (off
, LOG2_BITS_PER_UNIT
);
3684 if (!wi::fits_shwi_p (off
) || wi::neg_p (off
))
3686 elt_offset
= off
.to_shwi ();
3688 else if (TREE_CODE (type
) == RECORD_TYPE
)
3690 gcc_checking_assert (index
&& TREE_CODE (index
) == FIELD_DECL
);
3691 if (DECL_BIT_FIELD (index
))
3693 elt_offset
= int_bit_position (index
);
3698 if (elt_offset
> req_offset
)
3701 if (TREE_CODE (val
) == CONSTRUCTOR
)
3702 return find_constructor_constant_at_offset (val
,
3703 req_offset
- elt_offset
);
3705 if (elt_offset
== req_offset
3706 && is_gimple_reg_type (TREE_TYPE (val
))
3707 && is_gimple_ip_invariant (val
))
3713 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3714 invariant from a static constructor and if so, return it. Otherwise return
3718 ipa_find_agg_cst_from_init (tree scalar
, HOST_WIDE_INT offset
, bool by_ref
)
3722 if (TREE_CODE (scalar
) != ADDR_EXPR
)
3724 scalar
= TREE_OPERAND (scalar
, 0);
3728 || !is_global_var (scalar
)
3729 || !TREE_READONLY (scalar
)
3730 || !DECL_INITIAL (scalar
)
3731 || TREE_CODE (DECL_INITIAL (scalar
)) != CONSTRUCTOR
)
3734 return find_constructor_constant_at_offset (DECL_INITIAL (scalar
), offset
);
3737 /* Retrieve value from AGG_JFUNC for the given OFFSET or return NULL if there
3738 is none. BY_REF specifies whether the value has to be passed by reference
3742 ipa_find_agg_cst_from_jfunc_items (struct ipa_agg_jump_function
*agg_jfunc
,
3743 ipa_node_params
*src_info
,
3744 cgraph_node
*src_node
,
3745 HOST_WIDE_INT offset
, bool by_ref
)
3747 if (by_ref
!= agg_jfunc
->by_ref
)
3750 for (const ipa_agg_jf_item
&item
: agg_jfunc
->items
)
3751 if (item
.offset
== offset
)
3752 return ipa_agg_value_from_jfunc (src_info
, src_node
, &item
);
3757 /* Remove a reference to SYMBOL from the list of references of a node given by
3758 reference description RDESC. Return true if the reference has been
3759 successfully found and removed. */
3762 remove_described_reference (symtab_node
*symbol
, struct ipa_cst_ref_desc
*rdesc
)
3764 struct ipa_ref
*to_del
;
3765 struct cgraph_edge
*origin
;
3770 to_del
= origin
->caller
->find_reference (symbol
, origin
->call_stmt
,
3771 origin
->lto_stmt_uid
, IPA_REF_ADDR
);
3775 to_del
->remove_reference ();
3777 fprintf (dump_file
, "ipa-prop: Removed a reference from %s to %s.\n",
3778 origin
->caller
->dump_name (), symbol
->dump_name ());
3782 /* If JFUNC has a reference description with refcount different from
3783 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3784 NULL. JFUNC must be a constant jump function. */
3786 static struct ipa_cst_ref_desc
*
3787 jfunc_rdesc_usable (struct ipa_jump_func
*jfunc
)
3789 struct ipa_cst_ref_desc
*rdesc
= ipa_get_jf_constant_rdesc (jfunc
);
3790 if (rdesc
&& rdesc
->refcount
!= IPA_UNDESCRIBED_USE
)
3796 /* If the value of constant jump function JFUNC is an address of a function
3797 declaration, return the associated call graph node. Otherwise return
3800 static symtab_node
*
3801 symtab_node_for_jfunc (struct ipa_jump_func
*jfunc
)
3803 gcc_checking_assert (jfunc
->type
== IPA_JF_CONST
);
3804 tree cst
= ipa_get_jf_constant (jfunc
);
3805 if (TREE_CODE (cst
) != ADDR_EXPR
3806 || (TREE_CODE (TREE_OPERAND (cst
, 0)) != FUNCTION_DECL
3807 && TREE_CODE (TREE_OPERAND (cst
, 0)) != VAR_DECL
))
3810 return symtab_node::get (TREE_OPERAND (cst
, 0));
3814 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3815 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3816 the edge specified in the rdesc. Return false if either the symbol or the
3817 reference could not be found, otherwise return true. */
3820 try_decrement_rdesc_refcount (struct ipa_jump_func
*jfunc
)
3822 struct ipa_cst_ref_desc
*rdesc
;
3823 if (jfunc
->type
== IPA_JF_CONST
3824 && (rdesc
= jfunc_rdesc_usable (jfunc
))
3825 && --rdesc
->refcount
== 0)
3827 symtab_node
*symbol
= symtab_node_for_jfunc (jfunc
);
3831 return remove_described_reference (symbol
, rdesc
);
3836 /* Try to find a destination for indirect edge IE that corresponds to a simple
3837 call or a call of a member function pointer and where the destination is a
3838 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3839 the type of the parameter to which the result of JFUNC is passed. If it can
3840 be determined, return the newly direct edge, otherwise return NULL.
3841 NEW_ROOT and NEW_ROOT_INFO is the node and its info that JFUNC lattices are
3844 static struct cgraph_edge
*
3845 try_make_edge_direct_simple_call (struct cgraph_edge
*ie
,
3846 struct ipa_jump_func
*jfunc
, tree target_type
,
3847 struct cgraph_node
*new_root
,
3848 class ipa_node_params
*new_root_info
)
3850 struct cgraph_edge
*cs
;
3851 tree target
= NULL_TREE
;
3852 bool agg_contents
= ie
->indirect_info
->agg_contents
;
3853 tree scalar
= ipa_value_from_jfunc (new_root_info
, jfunc
, target_type
);
3857 target
= ipa_find_agg_cst_from_init (scalar
, ie
->indirect_info
->offset
,
3858 ie
->indirect_info
->by_ref
);
3859 if (!target
&& ie
->indirect_info
->guaranteed_unmodified
)
3860 target
= ipa_find_agg_cst_from_jfunc_items (&jfunc
->agg
, new_root_info
,
3862 ie
->indirect_info
->offset
,
3863 ie
->indirect_info
->by_ref
);
3869 cs
= ipa_make_edge_direct_to_target (ie
, target
);
3871 if (cs
&& !agg_contents
)
3874 gcc_checking_assert (cs
->callee
3876 || jfunc
->type
!= IPA_JF_CONST
3877 || !symtab_node_for_jfunc (jfunc
)
3878 || cs
->callee
== symtab_node_for_jfunc (jfunc
)));
3879 ok
= try_decrement_rdesc_refcount (jfunc
);
3880 gcc_checking_assert (ok
);
3886 /* Return the target to be used in cases of impossible devirtualization. IE
3887 and target (the latter can be NULL) are dumped when dumping is enabled. */
3890 ipa_impossible_devirt_target (struct cgraph_edge
*ie
, tree target
)
3896 "Type inconsistent devirtualization: %s->%s\n",
3897 ie
->caller
->dump_name (),
3898 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target
)));
3901 "No devirtualization target in %s\n",
3902 ie
->caller
->dump_name ());
3904 tree new_target
= builtin_decl_unreachable ();
3905 cgraph_node::get_create (new_target
);
3909 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3910 call based on a formal parameter which is described by jump function JFUNC
3911 and if it can be determined, make it direct and return the direct edge.
3912 Otherwise, return NULL. CTX describes the polymorphic context that the
3913 parameter the call is based on brings along with it. NEW_ROOT and
3914 NEW_ROOT_INFO is the node and its info that JFUNC lattices are relative
3917 static struct cgraph_edge
*
3918 try_make_edge_direct_virtual_call (struct cgraph_edge
*ie
,
3919 struct ipa_jump_func
*jfunc
,
3920 class ipa_polymorphic_call_context ctx
,
3921 struct cgraph_node
*new_root
,
3922 class ipa_node_params
*new_root_info
)
3925 bool speculative
= false;
3927 if (!opt_for_fn (ie
->caller
->decl
, flag_devirtualize
))
3930 gcc_assert (!ie
->indirect_info
->by_ref
);
3932 /* Try to do lookup via known virtual table pointer value. */
3933 if (!ie
->indirect_info
->vptr_changed
3934 || opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
))
3937 unsigned HOST_WIDE_INT offset
;
3939 if (jfunc
->type
== IPA_JF_CONST
)
3940 t
= ipa_find_agg_cst_from_init (ipa_get_jf_constant (jfunc
),
3941 ie
->indirect_info
->offset
, true);
3943 t
= ipa_find_agg_cst_from_jfunc_items (&jfunc
->agg
, new_root_info
,
3945 ie
->indirect_info
->offset
, true);
3946 if (t
&& vtable_pointer_value_to_vtable (t
, &vtable
, &offset
))
3949 t
= gimple_get_virt_method_for_vtable (ie
->indirect_info
->otr_token
,
3950 vtable
, offset
, &can_refer
);
3954 || fndecl_built_in_p (t
, BUILT_IN_UNREACHABLE
,
3955 BUILT_IN_UNREACHABLE_TRAP
)
3956 || !possible_polymorphic_call_target_p
3957 (ie
, cgraph_node::get (t
)))
3959 /* Do not speculate builtin_unreachable, it is stupid! */
3960 if (!ie
->indirect_info
->vptr_changed
)
3961 target
= ipa_impossible_devirt_target (ie
, target
);
3968 speculative
= ie
->indirect_info
->vptr_changed
;
3974 ipa_polymorphic_call_context
ie_context (ie
);
3975 vec
<cgraph_node
*>targets
;
3978 ctx
.offset_by (ie
->indirect_info
->offset
);
3979 if (ie
->indirect_info
->vptr_changed
)
3980 ctx
.possible_dynamic_type_change (ie
->in_polymorphic_cdtor
,
3981 ie
->indirect_info
->otr_type
);
3982 ctx
.combine_with (ie_context
, ie
->indirect_info
->otr_type
);
3983 targets
= possible_polymorphic_call_targets
3984 (ie
->indirect_info
->otr_type
,
3985 ie
->indirect_info
->otr_token
,
3987 if (final
&& targets
.length () <= 1)
3989 speculative
= false;
3990 if (targets
.length () == 1)
3991 target
= targets
[0]->decl
;
3993 target
= ipa_impossible_devirt_target (ie
, NULL_TREE
);
3995 else if (!target
&& opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
)
3996 && !ie
->speculative
&& ie
->maybe_hot_p ())
3999 n
= try_speculative_devirtualization (ie
->indirect_info
->otr_type
,
4000 ie
->indirect_info
->otr_token
,
4001 ie
->indirect_info
->context
);
4011 if (!possible_polymorphic_call_target_p
4012 (ie
, cgraph_node::get_create (target
)))
4016 target
= ipa_impossible_devirt_target (ie
, target
);
4018 return ipa_make_edge_direct_to_target (ie
, target
, speculative
);
4024 /* Update the param called notes associated with NODE when CS is being inlined,
4025 assuming NODE is (potentially indirectly) inlined into CS->callee.
4026 Moreover, if the callee is discovered to be constant, create a new cgraph
4027 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
4028 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
4031 update_indirect_edges_after_inlining (struct cgraph_edge
*cs
,
4032 struct cgraph_node
*node
,
4033 vec
<cgraph_edge
*> *new_edges
)
4035 class ipa_edge_args
*top
;
4036 struct cgraph_edge
*ie
, *next_ie
, *new_direct_edge
;
4037 struct cgraph_node
*new_root
;
4038 class ipa_node_params
*new_root_info
, *inlined_node_info
;
4041 ipa_check_create_edge_args ();
4042 top
= ipa_edge_args_sum
->get (cs
);
4043 new_root
= cs
->caller
->inlined_to
4044 ? cs
->caller
->inlined_to
: cs
->caller
;
4045 new_root_info
= ipa_node_params_sum
->get (new_root
);
4046 inlined_node_info
= ipa_node_params_sum
->get (cs
->callee
->function_symbol ());
4048 for (ie
= node
->indirect_calls
; ie
; ie
= next_ie
)
4050 class cgraph_indirect_call_info
*ici
= ie
->indirect_info
;
4051 struct ipa_jump_func
*jfunc
;
4054 next_ie
= ie
->next_callee
;
4056 if (ici
->param_index
== -1)
4059 /* We must check range due to calls with variable number of arguments: */
4060 if (!top
|| ici
->param_index
>= ipa_get_cs_argument_count (top
))
4062 ici
->param_index
= -1;
4066 param_index
= ici
->param_index
;
4067 jfunc
= ipa_get_ith_jump_func (top
, param_index
);
4069 auto_vec
<cgraph_node
*, 4> spec_targets
;
4070 if (ie
->speculative
)
4071 for (cgraph_edge
*direct
= ie
->first_speculative_call_target ();
4073 direct
= direct
->next_speculative_call_target ())
4074 spec_targets
.safe_push (direct
->callee
);
4076 if (!opt_for_fn (node
->decl
, flag_indirect_inlining
))
4077 new_direct_edge
= NULL
;
4078 else if (ici
->polymorphic
)
4080 ipa_polymorphic_call_context ctx
;
4081 ctx
= ipa_context_from_jfunc (new_root_info
, cs
, param_index
, jfunc
);
4082 new_direct_edge
= try_make_edge_direct_virtual_call (ie
, jfunc
, ctx
,
4088 tree target_type
= ipa_get_type (inlined_node_info
, param_index
);
4089 new_direct_edge
= try_make_edge_direct_simple_call (ie
, jfunc
,
4095 /* If speculation was removed, then we need to do nothing. */
4096 if (new_direct_edge
&& new_direct_edge
!= ie
4097 && spec_targets
.contains (new_direct_edge
->callee
))
4099 new_direct_edge
->indirect_inlining_edge
= 1;
4101 if (!new_direct_edge
->speculative
)
4104 else if (new_direct_edge
)
4106 new_direct_edge
->indirect_inlining_edge
= 1;
4109 new_edges
->safe_push (new_direct_edge
);
4112 /* If speculative edge was introduced we still need to update
4113 call info of the indirect edge. */
4114 if (!new_direct_edge
->speculative
)
4117 if (jfunc
->type
== IPA_JF_PASS_THROUGH
4118 && ipa_get_jf_pass_through_operation (jfunc
) == NOP_EXPR
)
4120 if (ici
->agg_contents
4121 && !ipa_get_jf_pass_through_agg_preserved (jfunc
)
4122 && !ici
->polymorphic
)
4123 ici
->param_index
= -1;
4126 ici
->param_index
= ipa_get_jf_pass_through_formal_id (jfunc
);
4127 if (ici
->polymorphic
4128 && !ipa_get_jf_pass_through_type_preserved (jfunc
))
4129 ici
->vptr_changed
= true;
4130 ipa_set_param_used_by_indirect_call (new_root_info
,
4131 ici
->param_index
, true);
4132 if (ici
->polymorphic
)
4133 ipa_set_param_used_by_polymorphic_call (new_root_info
,
4134 ici
->param_index
, true);
4137 else if (jfunc
->type
== IPA_JF_ANCESTOR
)
4139 if (ici
->agg_contents
4140 && !ipa_get_jf_ancestor_agg_preserved (jfunc
)
4141 && !ici
->polymorphic
)
4142 ici
->param_index
= -1;
4145 ici
->param_index
= ipa_get_jf_ancestor_formal_id (jfunc
);
4146 ici
->offset
+= ipa_get_jf_ancestor_offset (jfunc
);
4147 if (ici
->polymorphic
4148 && !ipa_get_jf_ancestor_type_preserved (jfunc
))
4149 ici
->vptr_changed
= true;
4150 ipa_set_param_used_by_indirect_call (new_root_info
,
4151 ici
->param_index
, true);
4152 if (ici
->polymorphic
)
4153 ipa_set_param_used_by_polymorphic_call (new_root_info
,
4154 ici
->param_index
, true);
4158 /* Either we can find a destination for this edge now or never. */
4159 ici
->param_index
= -1;
4165 /* Recursively traverse subtree of NODE (including node) made of inlined
4166 cgraph_edges when CS has been inlined and invoke
4167 update_indirect_edges_after_inlining on all nodes and
4168 update_jump_functions_after_inlining on all non-inlined edges that lead out
4169 of this subtree. Newly discovered indirect edges will be added to
4170 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
4174 propagate_info_to_inlined_callees (struct cgraph_edge
*cs
,
4175 struct cgraph_node
*node
,
4176 vec
<cgraph_edge
*> *new_edges
)
4178 struct cgraph_edge
*e
;
4181 res
= update_indirect_edges_after_inlining (cs
, node
, new_edges
);
4183 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4184 if (!e
->inline_failed
)
4185 res
|= propagate_info_to_inlined_callees (cs
, e
->callee
, new_edges
);
4187 update_jump_functions_after_inlining (cs
, e
);
4188 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4189 update_jump_functions_after_inlining (cs
, e
);
4194 /* Combine two controlled uses counts as done during inlining. */
4197 combine_controlled_uses_counters (int c
, int d
)
4199 if (c
== IPA_UNDESCRIBED_USE
|| d
== IPA_UNDESCRIBED_USE
)
4200 return IPA_UNDESCRIBED_USE
;
4205 /* Propagate number of controlled users from CS->caleee to the new root of the
4206 tree of inlined nodes. */
4209 propagate_controlled_uses (struct cgraph_edge
*cs
)
4211 ipa_edge_args
*args
= ipa_edge_args_sum
->get (cs
);
4214 struct cgraph_node
*new_root
= cs
->caller
->inlined_to
4215 ? cs
->caller
->inlined_to
: cs
->caller
;
4216 ipa_node_params
*new_root_info
= ipa_node_params_sum
->get (new_root
);
4217 ipa_node_params
*old_root_info
= ipa_node_params_sum
->get (cs
->callee
);
4223 count
= MIN (ipa_get_cs_argument_count (args
),
4224 ipa_get_param_count (old_root_info
));
4225 for (i
= 0; i
< count
; i
++)
4227 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
4228 struct ipa_cst_ref_desc
*rdesc
;
4230 if (jf
->type
== IPA_JF_PASS_THROUGH
4231 && !ipa_get_jf_pass_through_refdesc_decremented (jf
))
4234 src_idx
= ipa_get_jf_pass_through_formal_id (jf
);
4235 c
= ipa_get_controlled_uses (new_root_info
, src_idx
);
4236 d
= ipa_get_controlled_uses (old_root_info
, i
);
4238 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf
)
4239 == NOP_EXPR
|| c
== IPA_UNDESCRIBED_USE
);
4240 c
= combine_controlled_uses_counters (c
, d
);
4241 ipa_set_controlled_uses (new_root_info
, src_idx
, c
);
4243 if (c
!= IPA_UNDESCRIBED_USE
)
4245 lderef
= (ipa_get_param_load_dereferenced (new_root_info
, src_idx
)
4246 || ipa_get_param_load_dereferenced (old_root_info
, i
));
4247 ipa_set_param_load_dereferenced (new_root_info
, src_idx
, lderef
);
4250 if (c
== 0 && !lderef
&& new_root_info
->ipcp_orig_node
)
4252 struct cgraph_node
*n
;
4253 struct ipa_ref
*ref
;
4254 tree t
= new_root_info
->known_csts
[src_idx
];
4256 if (t
&& TREE_CODE (t
) == ADDR_EXPR
4257 && TREE_CODE (TREE_OPERAND (t
, 0)) == FUNCTION_DECL
4258 && (n
= cgraph_node::get (TREE_OPERAND (t
, 0)))
4259 && (ref
= new_root
->find_reference (n
, NULL
, 0,
4263 fprintf (dump_file
, "ipa-prop: Removing cloning-created "
4264 "reference from %s to %s.\n",
4265 new_root
->dump_name (),
4267 ref
->remove_reference ();
4271 else if (jf
->type
== IPA_JF_CONST
4272 && (rdesc
= jfunc_rdesc_usable (jf
)))
4274 int d
= ipa_get_controlled_uses (old_root_info
, i
);
4275 int c
= rdesc
->refcount
;
4276 tree cst
= ipa_get_jf_constant (jf
);
4277 rdesc
->refcount
= combine_controlled_uses_counters (c
, d
);
4278 if (rdesc
->refcount
!= IPA_UNDESCRIBED_USE
4279 && ipa_get_param_load_dereferenced (old_root_info
, i
)
4280 && TREE_CODE (cst
) == ADDR_EXPR
4281 && VAR_P (TREE_OPERAND (cst
, 0)))
4283 symtab_node
*n
= symtab_node::get (TREE_OPERAND (cst
, 0));
4284 new_root
->create_reference (n
, IPA_REF_LOAD
, NULL
);
4286 fprintf (dump_file
, "ipa-prop: Address IPA constant will reach "
4287 "a load so adding LOAD reference from %s to %s.\n",
4288 new_root
->dump_name (), n
->dump_name ());
4290 if (rdesc
->refcount
== 0)
4292 gcc_checking_assert (TREE_CODE (cst
) == ADDR_EXPR
4293 && ((TREE_CODE (TREE_OPERAND (cst
, 0))
4295 || VAR_P (TREE_OPERAND (cst
, 0))));
4297 symtab_node
*n
= symtab_node::get (TREE_OPERAND (cst
, 0));
4300 remove_described_reference (n
, rdesc
);
4301 cgraph_node
*clone
= cs
->caller
;
4302 while (clone
->inlined_to
4303 && clone
->ipcp_clone
4304 && clone
!= rdesc
->cs
->caller
)
4306 struct ipa_ref
*ref
;
4307 ref
= clone
->find_reference (n
, NULL
, 0, IPA_REF_ADDR
);
4311 fprintf (dump_file
, "ipa-prop: Removing "
4312 "cloning-created reference "
4314 clone
->dump_name (),
4316 ref
->remove_reference ();
4318 clone
= clone
->callers
->caller
;
4325 for (i
= ipa_get_param_count (old_root_info
);
4326 i
< ipa_get_cs_argument_count (args
);
4329 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
4331 if (jf
->type
== IPA_JF_CONST
)
4333 struct ipa_cst_ref_desc
*rdesc
= jfunc_rdesc_usable (jf
);
4335 rdesc
->refcount
= IPA_UNDESCRIBED_USE
;
4337 else if (jf
->type
== IPA_JF_PASS_THROUGH
)
4338 ipa_set_controlled_uses (new_root_info
,
4339 jf
->value
.pass_through
.formal_id
,
4340 IPA_UNDESCRIBED_USE
);
4344 /* Update jump functions and call note functions on inlining the call site CS.
4345 CS is expected to lead to a node already cloned by
4346 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
4347 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
4351 ipa_propagate_indirect_call_infos (struct cgraph_edge
*cs
,
4352 vec
<cgraph_edge
*> *new_edges
)
4355 /* Do nothing if the preparation phase has not been carried out yet
4356 (i.e. during early inlining). */
4357 if (!ipa_node_params_sum
)
4359 gcc_assert (ipa_edge_args_sum
);
4361 propagate_controlled_uses (cs
);
4362 changed
= propagate_info_to_inlined_callees (cs
, cs
->callee
, new_edges
);
4363 ipa_node_params_sum
->remove (cs
->callee
);
4365 ipa_edge_args
*args
= ipa_edge_args_sum
->get (cs
);
4369 if (args
->jump_functions
)
4371 struct ipa_jump_func
*jf
;
4373 FOR_EACH_VEC_ELT (*args
->jump_functions
, i
, jf
)
4374 if (jf
->type
== IPA_JF_CONST
4375 && ipa_get_jf_constant_rdesc (jf
))
4382 ipa_edge_args_sum
->remove (cs
);
4384 if (ipcp_transformation_sum
)
4385 ipcp_transformation_sum
->remove (cs
->callee
);
4390 /* Ensure that array of edge arguments infos is big enough to accommodate a
4391 structure for all edges and reallocates it if not. Also, allocate
4392 associated hash tables is they do not already exist. */
4395 ipa_check_create_edge_args (void)
4397 if (!ipa_edge_args_sum
)
4399 = (new (ggc_alloc_no_dtor
<ipa_edge_args_sum_t
> ())
4400 ipa_edge_args_sum_t (symtab
, true));
4401 if (!ipa_bits_hash_table
)
4402 ipa_bits_hash_table
= hash_table
<ipa_bit_ggc_hash_traits
>::create_ggc (37);
4403 if (!ipa_vr_hash_table
)
4404 ipa_vr_hash_table
= hash_table
<ipa_vr_ggc_hash_traits
>::create_ggc (37);
4407 /* Free all ipa_edge structures. */
4410 ipa_free_all_edge_args (void)
4412 if (!ipa_edge_args_sum
)
4415 ggc_delete (ipa_edge_args_sum
);
4416 ipa_edge_args_sum
= NULL
;
4419 /* Free all ipa_node_params structures. */
4422 ipa_free_all_node_params (void)
4424 if (ipa_node_params_sum
)
4425 ggc_delete (ipa_node_params_sum
);
4426 ipa_node_params_sum
= NULL
;
4429 /* Initialize IPA CP transformation summary and also allocate any necessary hash
4430 tables if they do not already exist. */
4433 ipcp_transformation_initialize (void)
4435 if (!ipa_bits_hash_table
)
4436 ipa_bits_hash_table
= hash_table
<ipa_bit_ggc_hash_traits
>::create_ggc (37);
4437 if (!ipa_vr_hash_table
)
4438 ipa_vr_hash_table
= hash_table
<ipa_vr_ggc_hash_traits
>::create_ggc (37);
4439 if (ipcp_transformation_sum
== NULL
)
4441 ipcp_transformation_sum
= ipcp_transformation_t::create_ggc (symtab
);
4442 ipcp_transformation_sum
->disable_insertion_hook ();
4446 /* Release the IPA CP transformation summary. */
4449 ipcp_free_transformation_sum (void)
4451 if (!ipcp_transformation_sum
)
4454 ipcp_transformation_sum
->~function_summary
<ipcp_transformation
*> ();
4455 ggc_free (ipcp_transformation_sum
);
4456 ipcp_transformation_sum
= NULL
;
4459 /* Set the aggregate replacements of NODE to be AGGVALS. */
4462 ipa_set_node_agg_value_chain (struct cgraph_node
*node
,
4463 vec
<ipa_argagg_value
, va_gc
> *aggs
)
4465 ipcp_transformation_initialize ();
4466 ipcp_transformation
*s
= ipcp_transformation_sum
->get_create (node
);
4467 s
->m_agg_values
= aggs
;
4470 /* Hook that is called by cgraph.cc when an edge is removed. Adjust reference
4471 count data structures accordingly. */
4474 ipa_edge_args_sum_t::remove (cgraph_edge
*cs
, ipa_edge_args
*args
)
4476 if (args
->jump_functions
)
4478 struct ipa_jump_func
*jf
;
4480 FOR_EACH_VEC_ELT (*args
->jump_functions
, i
, jf
)
4482 struct ipa_cst_ref_desc
*rdesc
;
4483 try_decrement_rdesc_refcount (jf
);
4484 if (jf
->type
== IPA_JF_CONST
4485 && (rdesc
= ipa_get_jf_constant_rdesc (jf
))
4492 /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
4493 reference count data strucutres accordingly. */
4496 ipa_edge_args_sum_t::duplicate (cgraph_edge
*src
, cgraph_edge
*dst
,
4497 ipa_edge_args
*old_args
, ipa_edge_args
*new_args
)
4501 new_args
->jump_functions
= vec_safe_copy (old_args
->jump_functions
);
4502 if (old_args
->polymorphic_call_contexts
)
4503 new_args
->polymorphic_call_contexts
4504 = vec_safe_copy (old_args
->polymorphic_call_contexts
);
4506 for (i
= 0; i
< vec_safe_length (old_args
->jump_functions
); i
++)
4508 struct ipa_jump_func
*src_jf
= ipa_get_ith_jump_func (old_args
, i
);
4509 struct ipa_jump_func
*dst_jf
= ipa_get_ith_jump_func (new_args
, i
);
4511 dst_jf
->agg
.items
= vec_safe_copy (dst_jf
->agg
.items
);
4513 if (src_jf
->type
== IPA_JF_CONST
)
4515 struct ipa_cst_ref_desc
*src_rdesc
= jfunc_rdesc_usable (src_jf
);
4518 dst_jf
->value
.constant
.rdesc
= NULL
;
4519 else if (src
->caller
== dst
->caller
)
4521 /* Creation of a speculative edge. If the source edge is the one
4522 grabbing a reference, we must create a new (duplicate)
4523 reference description. Otherwise they refer to the same
4524 description corresponding to a reference taken in a function
4525 src->caller is inlined to. In that case we just must
4526 increment the refcount. */
4527 if (src_rdesc
->cs
== src
)
4529 symtab_node
*n
= symtab_node_for_jfunc (src_jf
);
4530 gcc_checking_assert (n
);
4532 = src
->caller
->find_reference (n
, src
->call_stmt
,
4535 gcc_checking_assert (ref
);
4536 dst
->caller
->clone_reference (ref
, ref
->stmt
);
4538 ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
4539 dst_rdesc
->cs
= dst
;
4540 dst_rdesc
->refcount
= src_rdesc
->refcount
;
4541 dst_rdesc
->next_duplicate
= NULL
;
4542 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
4546 src_rdesc
->refcount
++;
4547 dst_jf
->value
.constant
.rdesc
= src_rdesc
;
4550 else if (src_rdesc
->cs
== src
)
4552 struct ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
4553 dst_rdesc
->cs
= dst
;
4554 dst_rdesc
->refcount
= src_rdesc
->refcount
;
4555 dst_rdesc
->next_duplicate
= src_rdesc
->next_duplicate
;
4556 src_rdesc
->next_duplicate
= dst_rdesc
;
4557 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
4561 struct ipa_cst_ref_desc
*dst_rdesc
;
4562 /* This can happen during inlining, when a JFUNC can refer to a
4563 reference taken in a function up in the tree of inline clones.
4564 We need to find the duplicate that refers to our tree of
4567 gcc_assert (dst
->caller
->inlined_to
);
4568 for (dst_rdesc
= src_rdesc
->next_duplicate
;
4570 dst_rdesc
= dst_rdesc
->next_duplicate
)
4572 struct cgraph_node
*top
;
4573 top
= dst_rdesc
->cs
->caller
->inlined_to
4574 ? dst_rdesc
->cs
->caller
->inlined_to
4575 : dst_rdesc
->cs
->caller
;
4576 if (dst
->caller
->inlined_to
== top
)
4579 gcc_assert (dst_rdesc
);
4580 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
4583 else if (dst_jf
->type
== IPA_JF_PASS_THROUGH
4584 && src
->caller
== dst
->caller
)
4586 struct cgraph_node
*inline_root
= dst
->caller
->inlined_to
4587 ? dst
->caller
->inlined_to
: dst
->caller
;
4588 ipa_node_params
*root_info
= ipa_node_params_sum
->get (inline_root
);
4589 int idx
= ipa_get_jf_pass_through_formal_id (dst_jf
);
4591 int c
= ipa_get_controlled_uses (root_info
, idx
);
4592 if (c
!= IPA_UNDESCRIBED_USE
)
4595 ipa_set_controlled_uses (root_info
, idx
, c
);
4601 /* Analyze newly added function into callgraph. */
4604 ipa_add_new_function (cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
4606 if (node
->has_gimple_body_p ())
4607 ipa_analyze_node (node
);
4610 /* Hook that is called by summary when a node is duplicated. */
4613 ipa_node_params_t::duplicate(cgraph_node
*, cgraph_node
*,
4614 ipa_node_params
*old_info
,
4615 ipa_node_params
*new_info
)
4617 new_info
->descriptors
= vec_safe_copy (old_info
->descriptors
);
4618 new_info
->lattices
= NULL
;
4619 new_info
->ipcp_orig_node
= old_info
->ipcp_orig_node
;
4620 new_info
->known_csts
= old_info
->known_csts
.copy ();
4621 new_info
->known_contexts
= old_info
->known_contexts
.copy ();
4623 new_info
->analysis_done
= old_info
->analysis_done
;
4624 new_info
->node_enqueued
= old_info
->node_enqueued
;
4625 new_info
->versionable
= old_info
->versionable
;
4628 /* Duplication of ipcp transformation summaries. */
4631 ipcp_transformation_t::duplicate(cgraph_node
*, cgraph_node
*dst
,
4632 ipcp_transformation
*src_trans
,
4633 ipcp_transformation
*dst_trans
)
4635 /* Avoid redundant work of duplicating vectors we will never use. */
4636 if (dst
->inlined_to
)
4638 dst_trans
->m_agg_values
= vec_safe_copy (src_trans
->m_agg_values
);
4639 dst_trans
->bits
= vec_safe_copy (src_trans
->bits
);
4640 dst_trans
->m_vr
= vec_safe_copy (src_trans
->m_vr
);
4643 /* Register our cgraph hooks if they are not already there. */
4646 ipa_register_cgraph_hooks (void)
4648 ipa_check_create_node_params ();
4649 ipa_check_create_edge_args ();
4651 function_insertion_hook_holder
=
4652 symtab
->add_cgraph_insertion_hook (&ipa_add_new_function
, NULL
);
4655 /* Unregister our cgraph hooks if they are not already there. */
4658 ipa_unregister_cgraph_hooks (void)
4660 if (function_insertion_hook_holder
)
4661 symtab
->remove_cgraph_insertion_hook (function_insertion_hook_holder
);
4662 function_insertion_hook_holder
= NULL
;
4665 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4666 longer needed after ipa-cp. */
4669 ipa_free_all_structures_after_ipa_cp (void)
4671 if (!optimize
&& !in_lto_p
)
4673 ipa_free_all_edge_args ();
4674 ipa_free_all_node_params ();
4675 ipcp_sources_pool
.release ();
4676 ipcp_cst_values_pool
.release ();
4677 ipcp_poly_ctx_values_pool
.release ();
4678 ipcp_agg_lattice_pool
.release ();
4679 ipa_unregister_cgraph_hooks ();
4680 ipa_refdesc_pool
.release ();
4684 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4685 longer needed after indirect inlining. */
4688 ipa_free_all_structures_after_iinln (void)
4690 ipa_free_all_edge_args ();
4691 ipa_free_all_node_params ();
4692 ipa_unregister_cgraph_hooks ();
4693 ipcp_sources_pool
.release ();
4694 ipcp_cst_values_pool
.release ();
4695 ipcp_poly_ctx_values_pool
.release ();
4696 ipcp_agg_lattice_pool
.release ();
4697 ipa_refdesc_pool
.release ();
4700 /* Print ipa_tree_map data structures of all functions in the
4704 ipa_print_node_params (FILE *f
, struct cgraph_node
*node
)
4707 class ipa_node_params
*info
;
4709 if (!node
->definition
)
4711 info
= ipa_node_params_sum
->get (node
);
4712 fprintf (f
, " function %s parameter descriptors:\n", node
->dump_name ());
4715 fprintf (f
, " no params return\n");
4718 count
= ipa_get_param_count (info
);
4719 for (i
= 0; i
< count
; i
++)
4724 ipa_dump_param (f
, info
, i
);
4725 if (ipa_is_param_used (info
, i
))
4726 fprintf (f
, " used");
4727 if (ipa_is_param_used_by_ipa_predicates (info
, i
))
4728 fprintf (f
, " used_by_ipa_predicates");
4729 if (ipa_is_param_used_by_indirect_call (info
, i
))
4730 fprintf (f
, " used_by_indirect_call");
4731 if (ipa_is_param_used_by_polymorphic_call (info
, i
))
4732 fprintf (f
, " used_by_polymorphic_call");
4733 c
= ipa_get_controlled_uses (info
, i
);
4734 if (c
== IPA_UNDESCRIBED_USE
)
4735 fprintf (f
, " undescribed_use");
4737 fprintf (f
, " controlled_uses=%i %s", c
,
4738 ipa_get_param_load_dereferenced (info
, i
)
4739 ? "(load_dereferenced)" : "");
4744 /* Print ipa_tree_map data structures of all functions in the
4748 ipa_print_all_params (FILE * f
)
4750 struct cgraph_node
*node
;
4752 fprintf (f
, "\nFunction parameters:\n");
4753 FOR_EACH_FUNCTION (node
)
4754 ipa_print_node_params (f
, node
);
4757 /* Stream out jump function JUMP_FUNC to OB. */
4760 ipa_write_jump_function (struct output_block
*ob
,
4761 struct ipa_jump_func
*jump_func
)
4763 struct ipa_agg_jf_item
*item
;
4764 struct bitpack_d bp
;
4768 /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4769 as well as WPA memory by handling them specially. */
4770 if (jump_func
->type
== IPA_JF_CONST
4771 && TREE_CODE (jump_func
->value
.constant
.value
) == ADDR_EXPR
)
4774 streamer_write_uhwi (ob
, jump_func
->type
* 2 + flag
);
4775 switch (jump_func
->type
)
4777 case IPA_JF_UNKNOWN
:
4781 EXPR_LOCATION (jump_func
->value
.constant
.value
) == UNKNOWN_LOCATION
);
4782 stream_write_tree (ob
,
4784 ? TREE_OPERAND (jump_func
->value
.constant
.value
, 0)
4785 : jump_func
->value
.constant
.value
, true);
4787 case IPA_JF_PASS_THROUGH
:
4788 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.operation
);
4789 if (jump_func
->value
.pass_through
.operation
== NOP_EXPR
)
4791 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4792 bp
= bitpack_create (ob
->main_stream
);
4793 bp_pack_value (&bp
, jump_func
->value
.pass_through
.agg_preserved
, 1);
4794 gcc_assert (!jump_func
->value
.pass_through
.refdesc_decremented
);
4795 streamer_write_bitpack (&bp
);
4797 else if (TREE_CODE_CLASS (jump_func
->value
.pass_through
.operation
)
4799 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4802 stream_write_tree (ob
, jump_func
->value
.pass_through
.operand
, true);
4803 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4806 case IPA_JF_ANCESTOR
:
4807 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.offset
);
4808 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.formal_id
);
4809 bp
= bitpack_create (ob
->main_stream
);
4810 bp_pack_value (&bp
, jump_func
->value
.ancestor
.agg_preserved
, 1);
4811 bp_pack_value (&bp
, jump_func
->value
.ancestor
.keep_null
, 1);
4812 streamer_write_bitpack (&bp
);
4815 fatal_error (UNKNOWN_LOCATION
, "invalid jump function in LTO stream");
4818 count
= vec_safe_length (jump_func
->agg
.items
);
4819 streamer_write_uhwi (ob
, count
);
4822 bp
= bitpack_create (ob
->main_stream
);
4823 bp_pack_value (&bp
, jump_func
->agg
.by_ref
, 1);
4824 streamer_write_bitpack (&bp
);
4827 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, i
, item
)
4829 stream_write_tree (ob
, item
->type
, true);
4830 streamer_write_uhwi (ob
, item
->offset
);
4831 streamer_write_uhwi (ob
, item
->jftype
);
4832 switch (item
->jftype
)
4834 case IPA_JF_UNKNOWN
:
4837 stream_write_tree (ob
, item
->value
.constant
, true);
4839 case IPA_JF_PASS_THROUGH
:
4840 case IPA_JF_LOAD_AGG
:
4841 streamer_write_uhwi (ob
, item
->value
.pass_through
.operation
);
4842 streamer_write_uhwi (ob
, item
->value
.pass_through
.formal_id
);
4843 if (TREE_CODE_CLASS (item
->value
.pass_through
.operation
)
4845 stream_write_tree (ob
, item
->value
.pass_through
.operand
, true);
4846 if (item
->jftype
== IPA_JF_LOAD_AGG
)
4848 stream_write_tree (ob
, item
->value
.load_agg
.type
, true);
4849 streamer_write_uhwi (ob
, item
->value
.load_agg
.offset
);
4850 bp
= bitpack_create (ob
->main_stream
);
4851 bp_pack_value (&bp
, item
->value
.load_agg
.by_ref
, 1);
4852 streamer_write_bitpack (&bp
);
4856 fatal_error (UNKNOWN_LOCATION
,
4857 "invalid jump function in LTO stream");
4861 bp
= bitpack_create (ob
->main_stream
);
4862 bp_pack_value (&bp
, !!jump_func
->bits
, 1);
4863 streamer_write_bitpack (&bp
);
4864 if (jump_func
->bits
)
4866 streamer_write_widest_int (ob
, jump_func
->bits
->value
);
4867 streamer_write_widest_int (ob
, jump_func
->bits
->mask
);
4869 if (jump_func
->m_vr
)
4870 jump_func
->m_vr
->streamer_write (ob
);
4873 bp_pack_value (&bp
, false, 1);
4874 streamer_write_bitpack (&bp
);
4878 /* Read in jump function JUMP_FUNC from IB. */
4881 ipa_read_jump_function (class lto_input_block
*ib
,
4882 struct ipa_jump_func
*jump_func
,
4883 struct cgraph_edge
*cs
,
4884 class data_in
*data_in
,
4887 enum jump_func_type jftype
;
4888 enum tree_code operation
;
4890 int val
= streamer_read_uhwi (ib
);
4891 bool flag
= val
& 1;
4893 jftype
= (enum jump_func_type
) (val
/ 2);
4896 case IPA_JF_UNKNOWN
:
4897 ipa_set_jf_unknown (jump_func
);
4901 tree t
= stream_read_tree (ib
, data_in
);
4902 if (flag
&& prevails
)
4903 t
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (t
)), t
);
4904 ipa_set_jf_constant (jump_func
, t
, cs
);
4907 case IPA_JF_PASS_THROUGH
:
4908 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
4909 if (operation
== NOP_EXPR
)
4911 int formal_id
= streamer_read_uhwi (ib
);
4912 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4913 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4914 ipa_set_jf_simple_pass_through (jump_func
, formal_id
, agg_preserved
);
4916 else if (TREE_CODE_CLASS (operation
) == tcc_unary
)
4918 int formal_id
= streamer_read_uhwi (ib
);
4919 ipa_set_jf_unary_pass_through (jump_func
, formal_id
, operation
);
4923 tree operand
= stream_read_tree (ib
, data_in
);
4924 int formal_id
= streamer_read_uhwi (ib
);
4925 ipa_set_jf_arith_pass_through (jump_func
, formal_id
, operand
,
4929 case IPA_JF_ANCESTOR
:
4931 HOST_WIDE_INT offset
= streamer_read_uhwi (ib
);
4932 int formal_id
= streamer_read_uhwi (ib
);
4933 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4934 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4935 bool keep_null
= bp_unpack_value (&bp
, 1);
4936 ipa_set_ancestor_jf (jump_func
, offset
, formal_id
, agg_preserved
,
4941 fatal_error (UNKNOWN_LOCATION
, "invalid jump function in LTO stream");
4944 count
= streamer_read_uhwi (ib
);
4947 jump_func
->agg
.items
= NULL
;
4948 vec_safe_reserve (jump_func
->agg
.items
, count
, true);
4952 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4953 jump_func
->agg
.by_ref
= bp_unpack_value (&bp
, 1);
4955 for (i
= 0; i
< count
; i
++)
4957 struct ipa_agg_jf_item item
;
4958 item
.type
= stream_read_tree (ib
, data_in
);
4959 item
.offset
= streamer_read_uhwi (ib
);
4960 item
.jftype
= (enum jump_func_type
) streamer_read_uhwi (ib
);
4962 switch (item
.jftype
)
4964 case IPA_JF_UNKNOWN
:
4967 item
.value
.constant
= stream_read_tree (ib
, data_in
);
4969 case IPA_JF_PASS_THROUGH
:
4970 case IPA_JF_LOAD_AGG
:
4971 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
4972 item
.value
.pass_through
.operation
= operation
;
4973 item
.value
.pass_through
.formal_id
= streamer_read_uhwi (ib
);
4974 if (TREE_CODE_CLASS (operation
) == tcc_unary
)
4975 item
.value
.pass_through
.operand
= NULL_TREE
;
4977 item
.value
.pass_through
.operand
= stream_read_tree (ib
, data_in
);
4978 if (item
.jftype
== IPA_JF_LOAD_AGG
)
4980 struct bitpack_d bp
;
4981 item
.value
.load_agg
.type
= stream_read_tree (ib
, data_in
);
4982 item
.value
.load_agg
.offset
= streamer_read_uhwi (ib
);
4983 bp
= streamer_read_bitpack (ib
);
4984 item
.value
.load_agg
.by_ref
= bp_unpack_value (&bp
, 1);
4988 fatal_error (UNKNOWN_LOCATION
,
4989 "invalid jump function in LTO stream");
4992 jump_func
->agg
.items
->quick_push (item
);
4995 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4996 bool bits_known
= bp_unpack_value (&bp
, 1);
4999 widest_int value
= streamer_read_widest_int (ib
);
5000 widest_int mask
= streamer_read_widest_int (ib
);
5002 ipa_set_jfunc_bits (jump_func
, value
, mask
);
5005 jump_func
->bits
= NULL
;
5008 vr
.streamer_read (ib
, data_in
);
5012 ipa_set_jfunc_vr (jump_func
, vr
);
5015 jump_func
->m_vr
= NULL
;
5018 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
5019 relevant to indirect inlining to OB. */
5022 ipa_write_indirect_edge_info (struct output_block
*ob
,
5023 struct cgraph_edge
*cs
)
5025 class cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
5026 struct bitpack_d bp
;
5028 streamer_write_hwi (ob
, ii
->param_index
);
5029 bp
= bitpack_create (ob
->main_stream
);
5030 bp_pack_value (&bp
, ii
->polymorphic
, 1);
5031 bp_pack_value (&bp
, ii
->agg_contents
, 1);
5032 bp_pack_value (&bp
, ii
->member_ptr
, 1);
5033 bp_pack_value (&bp
, ii
->by_ref
, 1);
5034 bp_pack_value (&bp
, ii
->guaranteed_unmodified
, 1);
5035 bp_pack_value (&bp
, ii
->vptr_changed
, 1);
5036 streamer_write_bitpack (&bp
);
5037 if (ii
->agg_contents
|| ii
->polymorphic
)
5038 streamer_write_hwi (ob
, ii
->offset
);
5040 gcc_assert (ii
->offset
== 0);
5042 if (ii
->polymorphic
)
5044 streamer_write_hwi (ob
, ii
->otr_token
);
5045 stream_write_tree (ob
, ii
->otr_type
, true);
5046 ii
->context
.stream_out (ob
);
5050 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
5051 relevant to indirect inlining from IB. */
5054 ipa_read_indirect_edge_info (class lto_input_block
*ib
,
5055 class data_in
*data_in
,
5056 struct cgraph_edge
*cs
,
5057 class ipa_node_params
*info
)
5059 class cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
5060 struct bitpack_d bp
;
5062 ii
->param_index
= (int) streamer_read_hwi (ib
);
5063 bp
= streamer_read_bitpack (ib
);
5064 ii
->polymorphic
= bp_unpack_value (&bp
, 1);
5065 ii
->agg_contents
= bp_unpack_value (&bp
, 1);
5066 ii
->member_ptr
= bp_unpack_value (&bp
, 1);
5067 ii
->by_ref
= bp_unpack_value (&bp
, 1);
5068 ii
->guaranteed_unmodified
= bp_unpack_value (&bp
, 1);
5069 ii
->vptr_changed
= bp_unpack_value (&bp
, 1);
5070 if (ii
->agg_contents
|| ii
->polymorphic
)
5071 ii
->offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
5074 if (ii
->polymorphic
)
5076 ii
->otr_token
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
5077 ii
->otr_type
= stream_read_tree (ib
, data_in
);
5078 ii
->context
.stream_in (ib
, data_in
);
5080 if (info
&& ii
->param_index
>= 0)
5082 if (ii
->polymorphic
)
5083 ipa_set_param_used_by_polymorphic_call (info
,
5084 ii
->param_index
, true);
5085 ipa_set_param_used_by_indirect_call (info
,
5086 ii
->param_index
, true);
5090 /* Stream out NODE info to OB. */
5093 ipa_write_node_info (struct output_block
*ob
, struct cgraph_node
*node
)
5096 lto_symtab_encoder_t encoder
;
5097 ipa_node_params
*info
= ipa_node_params_sum
->get (node
);
5099 struct cgraph_edge
*e
;
5100 struct bitpack_d bp
;
5102 encoder
= ob
->decl_state
->symtab_node_encoder
;
5103 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
5104 streamer_write_uhwi (ob
, node_ref
);
5106 streamer_write_uhwi (ob
, ipa_get_param_count (info
));
5107 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
5108 streamer_write_uhwi (ob
, ipa_get_param_move_cost (info
, j
));
5109 bp
= bitpack_create (ob
->main_stream
);
5110 gcc_assert (info
->analysis_done
5111 || ipa_get_param_count (info
) == 0);
5112 gcc_assert (!info
->node_enqueued
);
5113 gcc_assert (!info
->ipcp_orig_node
);
5114 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
5116 /* TODO: We could just not stream the bit in the undescribed case. */
5117 bool d
= (ipa_get_controlled_uses (info
, j
) != IPA_UNDESCRIBED_USE
)
5118 ? ipa_get_param_load_dereferenced (info
, j
) : true;
5119 bp_pack_value (&bp
, d
, 1);
5120 bp_pack_value (&bp
, ipa_is_param_used (info
, j
), 1);
5122 streamer_write_bitpack (&bp
);
5123 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
5125 streamer_write_hwi (ob
, ipa_get_controlled_uses (info
, j
));
5126 stream_write_tree (ob
, ipa_get_type (info
, j
), true);
5128 for (e
= node
->callees
; e
; e
= e
->next_callee
)
5130 ipa_edge_args
*args
= ipa_edge_args_sum
->get (e
);
5134 streamer_write_uhwi (ob
, 0);
5138 streamer_write_uhwi (ob
,
5139 ipa_get_cs_argument_count (args
) * 2
5140 + (args
->polymorphic_call_contexts
!= NULL
));
5141 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
5143 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
5144 if (args
->polymorphic_call_contexts
!= NULL
)
5145 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
5148 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
5150 ipa_edge_args
*args
= ipa_edge_args_sum
->get (e
);
5152 streamer_write_uhwi (ob
, 0);
5155 streamer_write_uhwi (ob
,
5156 ipa_get_cs_argument_count (args
) * 2
5157 + (args
->polymorphic_call_contexts
!= NULL
));
5158 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
5160 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
5161 if (args
->polymorphic_call_contexts
!= NULL
)
5162 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
5165 ipa_write_indirect_edge_info (ob
, e
);
5169 /* Stream in edge E from IB. */
5172 ipa_read_edge_info (class lto_input_block
*ib
,
5173 class data_in
*data_in
,
5174 struct cgraph_edge
*e
, bool prevails
)
5176 int count
= streamer_read_uhwi (ib
);
5177 bool contexts_computed
= count
& 1;
5183 && (e
->possibly_call_in_translation_unit_p ()
5184 /* Also stream in jump functions to builtins in hope that they
5185 will get fnspecs. */
5186 || fndecl_built_in_p (e
->callee
->decl
, BUILT_IN_NORMAL
)))
5188 ipa_edge_args
*args
= ipa_edge_args_sum
->get_create (e
);
5189 vec_safe_grow_cleared (args
->jump_functions
, count
, true);
5190 if (contexts_computed
)
5191 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
, true);
5192 for (int k
= 0; k
< count
; k
++)
5194 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
5196 if (contexts_computed
)
5197 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in
5203 for (int k
= 0; k
< count
; k
++)
5205 struct ipa_jump_func dummy
;
5206 ipa_read_jump_function (ib
, &dummy
, e
,
5208 if (contexts_computed
)
5210 class ipa_polymorphic_call_context ctx
;
5211 ctx
.stream_in (ib
, data_in
);
5217 /* Stream in NODE info from IB. */
5220 ipa_read_node_info (class lto_input_block
*ib
, struct cgraph_node
*node
,
5221 class data_in
*data_in
)
5224 struct cgraph_edge
*e
;
5225 struct bitpack_d bp
;
5226 bool prevails
= node
->prevailing_p ();
5227 ipa_node_params
*info
5228 = prevails
? ipa_node_params_sum
->get_create (node
) : NULL
;
5230 int param_count
= streamer_read_uhwi (ib
);
5233 ipa_alloc_node_params (node
, param_count
);
5234 for (k
= 0; k
< param_count
; k
++)
5235 (*info
->descriptors
)[k
].move_cost
= streamer_read_uhwi (ib
);
5236 if (ipa_get_param_count (info
) != 0)
5237 info
->analysis_done
= true;
5238 info
->node_enqueued
= false;
5241 for (k
= 0; k
< param_count
; k
++)
5242 streamer_read_uhwi (ib
);
5244 bp
= streamer_read_bitpack (ib
);
5245 for (k
= 0; k
< param_count
; k
++)
5247 bool load_dereferenced
= bp_unpack_value (&bp
, 1);
5248 bool used
= bp_unpack_value (&bp
, 1);
5252 ipa_set_param_load_dereferenced (info
, k
, load_dereferenced
);
5253 ipa_set_param_used (info
, k
, used
);
5256 for (k
= 0; k
< param_count
; k
++)
5258 int nuses
= streamer_read_hwi (ib
);
5259 tree type
= stream_read_tree (ib
, data_in
);
5263 ipa_set_controlled_uses (info
, k
, nuses
);
5264 (*info
->descriptors
)[k
].decl_or_type
= type
;
5267 for (e
= node
->callees
; e
; e
= e
->next_callee
)
5268 ipa_read_edge_info (ib
, data_in
, e
, prevails
);
5269 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
5271 ipa_read_edge_info (ib
, data_in
, e
, prevails
);
5272 ipa_read_indirect_edge_info (ib
, data_in
, e
, info
);
5276 /* Write jump functions for nodes in SET. */
5279 ipa_prop_write_jump_functions (void)
5281 struct output_block
*ob
;
5282 unsigned int count
= 0;
5283 lto_symtab_encoder_iterator lsei
;
5284 lto_symtab_encoder_t encoder
;
5286 if (!ipa_node_params_sum
|| !ipa_edge_args_sum
)
5289 ob
= create_output_block (LTO_section_jump_functions
);
5290 encoder
= ob
->decl_state
->symtab_node_encoder
;
5292 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5293 lsei_next_function_in_partition (&lsei
))
5295 cgraph_node
*node
= lsei_cgraph_node (lsei
);
5296 if (node
->has_gimple_body_p ()
5297 && ipa_node_params_sum
->get (node
) != NULL
)
5301 streamer_write_uhwi (ob
, count
);
5303 /* Process all of the functions. */
5304 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5305 lsei_next_function_in_partition (&lsei
))
5307 cgraph_node
*node
= lsei_cgraph_node (lsei
);
5308 if (node
->has_gimple_body_p ()
5309 && ipa_node_params_sum
->get (node
) != NULL
)
5310 ipa_write_node_info (ob
, node
);
5312 streamer_write_char_stream (ob
->main_stream
, 0);
5313 produce_asm (ob
, NULL
);
5314 destroy_output_block (ob
);
5317 /* Read section in file FILE_DATA of length LEN with data DATA. */
5320 ipa_prop_read_section (struct lto_file_decl_data
*file_data
, const char *data
,
5323 const struct lto_function_header
*header
=
5324 (const struct lto_function_header
*) data
;
5325 const int cfg_offset
= sizeof (struct lto_function_header
);
5326 const int main_offset
= cfg_offset
+ header
->cfg_size
;
5327 const int string_offset
= main_offset
+ header
->main_size
;
5328 class data_in
*data_in
;
5332 lto_input_block
ib_main ((const char *) data
+ main_offset
,
5333 header
->main_size
, file_data
);
5336 lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
5337 header
->string_size
, vNULL
);
5338 count
= streamer_read_uhwi (&ib_main
);
5340 for (i
= 0; i
< count
; i
++)
5343 struct cgraph_node
*node
;
5344 lto_symtab_encoder_t encoder
;
5346 index
= streamer_read_uhwi (&ib_main
);
5347 encoder
= file_data
->symtab_node_encoder
;
5348 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
5350 gcc_assert (node
->definition
);
5351 ipa_read_node_info (&ib_main
, node
, data_in
);
5353 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
5355 lto_data_in_delete (data_in
);
5358 /* Read ipcp jump functions. */
5361 ipa_prop_read_jump_functions (void)
5363 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
5364 struct lto_file_decl_data
*file_data
;
5367 ipa_check_create_node_params ();
5368 ipa_check_create_edge_args ();
5369 ipa_register_cgraph_hooks ();
5371 while ((file_data
= file_data_vec
[j
++]))
5375 = lto_get_summary_section_data (file_data
, LTO_section_jump_functions
,
5378 ipa_prop_read_section (file_data
, data
, len
);
5382 /* Return true if the IPA-CP transformation summary TS is non-NULL and contains
5385 useful_ipcp_transformation_info_p (ipcp_transformation
*ts
)
5389 if (!vec_safe_is_empty (ts
->m_agg_values
)
5390 || !vec_safe_is_empty (ts
->bits
)
5391 || !vec_safe_is_empty (ts
->m_vr
))
5396 /* Write into OB IPA-CP transfromation summary TS describing NODE. */
5399 write_ipcp_transformation_info (output_block
*ob
, cgraph_node
*node
,
5400 ipcp_transformation
*ts
)
5402 lto_symtab_encoder_t encoder
= ob
->decl_state
->symtab_node_encoder
;
5403 int node_ref
= lto_symtab_encoder_encode (encoder
, node
);
5404 streamer_write_uhwi (ob
, node_ref
);
5406 streamer_write_uhwi (ob
, vec_safe_length (ts
->m_agg_values
));
5407 for (const ipa_argagg_value
&av
: ts
->m_agg_values
)
5409 struct bitpack_d bp
;
5411 stream_write_tree (ob
, av
.value
, true);
5412 streamer_write_uhwi (ob
, av
.unit_offset
);
5413 streamer_write_uhwi (ob
, av
.index
);
5415 bp
= bitpack_create (ob
->main_stream
);
5416 bp_pack_value (&bp
, av
.by_ref
, 1);
5417 streamer_write_bitpack (&bp
);
5420 streamer_write_uhwi (ob
, vec_safe_length (ts
->m_vr
));
5421 for (const ipa_vr
&parm_vr
: ts
->m_vr
)
5422 parm_vr
.streamer_write (ob
);
5424 streamer_write_uhwi (ob
, vec_safe_length (ts
->bits
));
5425 for (const ipa_bits
*bits_jfunc
: ts
->bits
)
5427 struct bitpack_d bp
= bitpack_create (ob
->main_stream
);
5428 bp_pack_value (&bp
, !!bits_jfunc
, 1);
5429 streamer_write_bitpack (&bp
);
5432 streamer_write_widest_int (ob
, bits_jfunc
->value
);
5433 streamer_write_widest_int (ob
, bits_jfunc
->mask
);
5438 /* Stream in the aggregate value replacement chain for NODE from IB. */
5441 read_ipcp_transformation_info (lto_input_block
*ib
, cgraph_node
*node
,
5444 unsigned int count
, i
;
5445 ipcp_transformation_initialize ();
5446 ipcp_transformation
*ts
= ipcp_transformation_sum
->get_create (node
);
5448 count
= streamer_read_uhwi (ib
);
5451 vec_safe_grow_cleared (ts
->m_agg_values
, count
, true);
5452 for (i
= 0; i
<count
; i
++)
5454 ipa_argagg_value
*av
= &(*ts
->m_agg_values
)[i
];;
5456 av
->value
= stream_read_tree (ib
, data_in
);
5457 av
->unit_offset
= streamer_read_uhwi (ib
);
5458 av
->index
= streamer_read_uhwi (ib
);
5460 bitpack_d bp
= streamer_read_bitpack (ib
);
5461 av
->by_ref
= bp_unpack_value (&bp
, 1);
5465 count
= streamer_read_uhwi (ib
);
5468 vec_safe_grow_cleared (ts
->m_vr
, count
, true);
5469 for (i
= 0; i
< count
; i
++)
5472 parm_vr
= &(*ts
->m_vr
)[i
];
5473 parm_vr
->streamer_read (ib
, data_in
);
5476 count
= streamer_read_uhwi (ib
);
5479 vec_safe_grow_cleared (ts
->bits
, count
, true);
5480 for (i
= 0; i
< count
; i
++)
5482 struct bitpack_d bp
= streamer_read_bitpack (ib
);
5483 bool known
= bp_unpack_value (&bp
, 1);
5486 const widest_int value
= streamer_read_widest_int (ib
);
5487 const widest_int mask
= streamer_read_widest_int (ib
);
5489 = ipa_get_ipa_bits_for_value (value
, mask
);
5490 (*ts
->bits
)[i
] = bits
;
5496 /* Write all aggregate replacement for nodes in set. */
5499 ipcp_write_transformation_summaries (void)
5501 struct output_block
*ob
;
5502 unsigned int count
= 0;
5503 lto_symtab_encoder_t encoder
;
5505 ob
= create_output_block (LTO_section_ipcp_transform
);
5506 encoder
= ob
->decl_state
->symtab_node_encoder
;
5509 for (int i
= 0; i
< lto_symtab_encoder_size (encoder
); i
++)
5511 symtab_node
*snode
= lto_symtab_encoder_deref (encoder
, i
);
5512 cgraph_node
*cnode
= dyn_cast
<cgraph_node
*> (snode
);
5515 ipcp_transformation
*ts
= ipcp_get_transformation_summary (cnode
);
5516 if (useful_ipcp_transformation_info_p (ts
)
5517 && lto_symtab_encoder_encode_body_p (encoder
, cnode
))
5521 streamer_write_uhwi (ob
, count
);
5523 for (int i
= 0; i
< lto_symtab_encoder_size (encoder
); i
++)
5525 symtab_node
*snode
= lto_symtab_encoder_deref (encoder
, i
);
5526 cgraph_node
*cnode
= dyn_cast
<cgraph_node
*> (snode
);
5529 ipcp_transformation
*ts
= ipcp_get_transformation_summary (cnode
);
5530 if (useful_ipcp_transformation_info_p (ts
)
5531 && lto_symtab_encoder_encode_body_p (encoder
, cnode
))
5532 write_ipcp_transformation_info (ob
, cnode
, ts
);
5534 streamer_write_char_stream (ob
->main_stream
, 0);
5535 produce_asm (ob
, NULL
);
5536 destroy_output_block (ob
);
5539 /* Read replacements section in file FILE_DATA of length LEN with data
5543 read_replacements_section (struct lto_file_decl_data
*file_data
,
5547 const struct lto_function_header
*header
=
5548 (const struct lto_function_header
*) data
;
5549 const int cfg_offset
= sizeof (struct lto_function_header
);
5550 const int main_offset
= cfg_offset
+ header
->cfg_size
;
5551 const int string_offset
= main_offset
+ header
->main_size
;
5552 class data_in
*data_in
;
5556 lto_input_block
ib_main ((const char *) data
+ main_offset
,
5557 header
->main_size
, file_data
);
5559 data_in
= lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
5560 header
->string_size
, vNULL
);
5561 count
= streamer_read_uhwi (&ib_main
);
5563 for (i
= 0; i
< count
; i
++)
5566 struct cgraph_node
*node
;
5567 lto_symtab_encoder_t encoder
;
5569 index
= streamer_read_uhwi (&ib_main
);
5570 encoder
= file_data
->symtab_node_encoder
;
5571 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
5573 read_ipcp_transformation_info (&ib_main
, node
, data_in
);
5575 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
5577 lto_data_in_delete (data_in
);
5580 /* Read IPA-CP aggregate replacements. */
5583 ipcp_read_transformation_summaries (void)
5585 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
5586 struct lto_file_decl_data
*file_data
;
5589 while ((file_data
= file_data_vec
[j
++]))
5593 = lto_get_summary_section_data (file_data
, LTO_section_ipcp_transform
,
5596 read_replacements_section (file_data
, data
, len
);
5600 /* Adjust the aggregate replacements in TS to reflect any parameter removals
5601 which might have already taken place. If after adjustments there are no
5602 aggregate replacements left, the m_agg_values will be set to NULL. In other
5603 cases, it may be shrunk. */
5606 adjust_agg_replacement_values (cgraph_node
*node
, ipcp_transformation
*ts
)
5608 clone_info
*cinfo
= clone_info::get (node
);
5609 if (!cinfo
|| !cinfo
->param_adjustments
)
5612 auto_vec
<int, 16> new_indices
;
5613 cinfo
->param_adjustments
->get_updated_indices (&new_indices
);
5614 bool removed_item
= false;
5615 unsigned dst_index
= 0;
5616 unsigned count
= ts
->m_agg_values
->length ();
5617 for (unsigned i
= 0; i
< count
; i
++)
5619 ipa_argagg_value
*v
= &(*ts
->m_agg_values
)[i
];
5620 gcc_checking_assert (v
->index
>= 0);
5623 if ((unsigned) v
->index
< new_indices
.length ())
5624 new_idx
= new_indices
[v
->index
];
5630 (*ts
->m_agg_values
)[dst_index
] = *v
;
5634 removed_item
= true;
5639 ggc_free (ts
->m_agg_values
);
5640 ts
->m_agg_values
= NULL
;
5642 else if (removed_item
)
5643 ts
->m_agg_values
->truncate (dst_index
);
5648 /* Dominator walker driving the ipcp modification phase. */
5650 class ipcp_modif_dom_walker
: public dom_walker
5653 ipcp_modif_dom_walker (struct ipa_func_body_info
*fbi
,
5654 vec
<ipa_param_descriptor
, va_gc
> *descs
,
5655 ipcp_transformation
*ts
, bool *sc
)
5656 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
), m_descriptors (descs
),
5657 m_ts (ts
), m_something_changed (sc
) {}
5659 edge
before_dom_children (basic_block
) final override
;
5661 { return gimple_purge_all_dead_eh_edges (m_need_eh_cleanup
); }
5664 struct ipa_func_body_info
*m_fbi
;
5665 vec
<ipa_param_descriptor
, va_gc
> *m_descriptors
;
5666 ipcp_transformation
*m_ts
;
5667 bool *m_something_changed
;
5668 auto_bitmap m_need_eh_cleanup
;
5672 ipcp_modif_dom_walker::before_dom_children (basic_block bb
)
5674 gimple_stmt_iterator gsi
;
5675 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5677 gimple
*stmt
= gsi_stmt (gsi
);
5679 HOST_WIDE_INT bit_offset
;
5684 if (!gimple_assign_load_p (stmt
))
5686 rhs
= gimple_assign_rhs1 (stmt
);
5687 if (!is_gimple_reg_type (TREE_TYPE (rhs
)))
5692 while (handled_component_p (t
))
5694 /* V_C_E can do things like convert an array of integers to one
5695 bigger integer and similar things we do not handle below. */
5696 if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
5701 t
= TREE_OPERAND (t
, 0);
5706 if (!ipa_load_from_parm_agg (m_fbi
, m_descriptors
, stmt
, rhs
, &index
,
5707 &bit_offset
, &size
, &by_ref
))
5709 unsigned unit_offset
= bit_offset
/ BITS_PER_UNIT
;
5710 ipa_argagg_value_list
avl (m_ts
);
5711 tree v
= avl
.get_value (index
, unit_offset
, by_ref
);
5714 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v
))), size
))
5717 gcc_checking_assert (is_gimple_ip_invariant (v
));
5718 if (!useless_type_conversion_p (TREE_TYPE (rhs
), TREE_TYPE (v
)))
5720 if (fold_convertible_p (TREE_TYPE (rhs
), v
))
5721 val
= fold_build1 (NOP_EXPR
, TREE_TYPE (rhs
), v
);
5722 else if (TYPE_SIZE (TREE_TYPE (rhs
))
5723 == TYPE_SIZE (TREE_TYPE (v
)))
5724 val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (rhs
), v
);
5729 fprintf (dump_file
, " const ");
5730 print_generic_expr (dump_file
, v
);
5731 fprintf (dump_file
, " can't be converted to type of ");
5732 print_generic_expr (dump_file
, rhs
);
5733 fprintf (dump_file
, "\n");
5741 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5743 fprintf (dump_file
, "Modifying stmt:\n ");
5744 print_gimple_stmt (dump_file
, stmt
, 0);
5746 gimple_assign_set_rhs_from_tree (&gsi
, val
);
5749 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5751 fprintf (dump_file
, "into:\n ");
5752 print_gimple_stmt (dump_file
, stmt
, 0);
5753 fprintf (dump_file
, "\n");
5756 *m_something_changed
= true;
5757 if (maybe_clean_eh_stmt (stmt
))
5758 bitmap_set_bit (m_need_eh_cleanup
, bb
->index
);
5763 /* If IPA-CP discovered a constant in parameter PARM at OFFSET of a given SIZE
5764 - whether passed by reference or not is given by BY_REF - return that
5765 constant. Otherwise return NULL_TREE. */
5768 ipcp_get_aggregate_const (struct function
*func
, tree parm
, bool by_ref
,
5769 HOST_WIDE_INT bit_offset
, HOST_WIDE_INT bit_size
)
5771 cgraph_node
*node
= cgraph_node::get (func
->decl
);
5772 ipcp_transformation
*ts
= ipcp_get_transformation_summary (node
);
5774 if (!ts
|| !ts
->m_agg_values
)
5777 int index
= ts
->get_param_index (func
->decl
, parm
);
5781 ipa_argagg_value_list
avl (ts
);
5782 unsigned unit_offset
= bit_offset
/ BITS_PER_UNIT
;
5783 tree v
= avl
.get_value (index
, unit_offset
, by_ref
);
5785 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v
))), bit_size
))
5791 /* Return true if we have recorded VALUE and MASK about PARM.
5792 Set VALUE and MASk accordingly. */
5795 ipcp_get_parm_bits (tree parm
, tree
*value
, widest_int
*mask
)
5797 cgraph_node
*cnode
= cgraph_node::get (current_function_decl
);
5798 ipcp_transformation
*ts
= ipcp_get_transformation_summary (cnode
);
5799 if (!ts
|| vec_safe_length (ts
->bits
) == 0)
5802 int i
= ts
->get_param_index (current_function_decl
, parm
);
5805 clone_info
*cinfo
= clone_info::get (cnode
);
5806 if (cinfo
&& cinfo
->param_adjustments
)
5808 i
= cinfo
->param_adjustments
->get_original_index (i
);
5813 vec
<ipa_bits
*, va_gc
> &bits
= *ts
->bits
;
5816 *mask
= bits
[i
]->mask
;
5817 *value
= wide_int_to_tree (TREE_TYPE (parm
), bits
[i
]->value
);
5821 /* Update bits info of formal parameters of NODE as described in TS. */
5824 ipcp_update_bits (struct cgraph_node
*node
, ipcp_transformation
*ts
)
5826 if (vec_safe_is_empty (ts
->bits
))
5828 vec
<ipa_bits
*, va_gc
> &bits
= *ts
->bits
;
5829 unsigned count
= bits
.length ();
5833 auto_vec
<int, 16> new_indices
;
5834 bool need_remapping
= false;
5835 clone_info
*cinfo
= clone_info::get (node
);
5836 if (cinfo
&& cinfo
->param_adjustments
)
5838 cinfo
->param_adjustments
->get_updated_indices (&new_indices
);
5839 need_remapping
= true;
5841 auto_vec
<tree
, 16> parm_decls
;
5842 push_function_arg_decls (&parm_decls
, node
->decl
);
5844 for (unsigned i
= 0; i
< count
; ++i
)
5849 if (i
>= new_indices
.length ())
5851 int idx
= new_indices
[i
];
5854 parm
= parm_decls
[idx
];
5857 parm
= parm_decls
[i
];
5858 gcc_checking_assert (parm
);
5862 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm
))
5863 || POINTER_TYPE_P (TREE_TYPE (parm
)))
5864 || !is_gimple_reg (parm
))
5867 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
), parm
);
5873 fprintf (dump_file
, "Adjusting mask for param %u to ", i
);
5874 print_hex (bits
[i
]->mask
, dump_file
);
5875 fprintf (dump_file
, "\n");
5878 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef
)))
5880 unsigned prec
= TYPE_PRECISION (TREE_TYPE (ddef
));
5881 signop sgn
= TYPE_SIGN (TREE_TYPE (ddef
));
5882 wide_int mask
= wide_int::from (bits
[i
]->mask
, prec
, UNSIGNED
);
5883 wide_int value
= wide_int::from (bits
[i
]->value
, prec
, sgn
);
5884 set_bitmask (ddef
, value
, mask
);
5888 unsigned tem
= bits
[i
]->mask
.to_uhwi ();
5889 unsigned HOST_WIDE_INT bitpos
= bits
[i
]->value
.to_uhwi ();
5890 unsigned align
= tem
& -tem
;
5891 unsigned misalign
= bitpos
& (align
- 1);
5896 fprintf (dump_file
, "Adjusting align: %u, misalign: %u\n", align
, misalign
);
5898 unsigned old_align
, old_misalign
;
5899 struct ptr_info_def
*pi
= get_ptr_info (ddef
);
5900 bool old_known
= get_ptr_info_alignment (pi
, &old_align
, &old_misalign
);
5903 && old_align
> align
)
5907 fprintf (dump_file
, "But alignment was already %u.\n", old_align
);
5908 if ((old_misalign
& (align
- 1)) != misalign
)
5909 fprintf (dump_file
, "old_misalign (%u) and misalign (%u) mismatch\n",
5910 old_misalign
, misalign
);
5916 && ((misalign
& (old_align
- 1)) != old_misalign
)
5918 fprintf (dump_file
, "old_misalign (%u) and misalign (%u) mismatch\n",
5919 old_misalign
, misalign
);
5921 set_ptr_info_alignment (pi
, align
, misalign
);
5927 /* Update value range of formal parameters of NODE as described in TS. */
5930 ipcp_update_vr (struct cgraph_node
*node
, ipcp_transformation
*ts
)
5932 if (vec_safe_is_empty (ts
->m_vr
))
5934 const vec
<ipa_vr
, va_gc
> &vr
= *ts
->m_vr
;
5935 unsigned count
= vr
.length ();
5939 auto_vec
<int, 16> new_indices
;
5940 bool need_remapping
= false;
5941 clone_info
*cinfo
= clone_info::get (node
);
5942 if (cinfo
&& cinfo
->param_adjustments
)
5944 cinfo
->param_adjustments
->get_updated_indices (&new_indices
);
5945 need_remapping
= true;
5947 auto_vec
<tree
, 16> parm_decls
;
5948 push_function_arg_decls (&parm_decls
, node
->decl
);
5950 for (unsigned i
= 0; i
< count
; ++i
)
5956 if (i
>= new_indices
.length ())
5958 remapped_idx
= new_indices
[i
];
5959 if (remapped_idx
< 0)
5965 parm
= parm_decls
[remapped_idx
];
5967 gcc_checking_assert (parm
);
5968 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
), parm
);
5970 if (!ddef
|| !is_gimple_reg (parm
))
5973 if (vr
[i
].known_p ())
5976 vr
[i
].get_vrange (tmp
);
5978 if (!tmp
.undefined_p () && !tmp
.varying_p ())
5982 fprintf (dump_file
, "Setting value range of param %u "
5983 "(now %i) ", i
, remapped_idx
);
5984 tmp
.dump (dump_file
);
5985 fprintf (dump_file
, "]\n");
5987 set_range_info (ddef
, tmp
);
5993 /* IPCP transformation phase doing propagation of aggregate values. */
5996 ipcp_transform_function (struct cgraph_node
*node
)
5998 struct ipa_func_body_info fbi
;
6001 gcc_checking_assert (cfun
);
6002 gcc_checking_assert (current_function_decl
);
6005 fprintf (dump_file
, "Modification phase of node %s\n",
6006 node
->dump_name ());
6008 ipcp_transformation
*ts
= ipcp_get_transformation_summary (node
);
6010 || (vec_safe_is_empty (ts
->m_agg_values
)
6011 && vec_safe_is_empty (ts
->bits
)
6012 && vec_safe_is_empty (ts
->m_vr
)))
6015 ts
->maybe_create_parm_idx_map (cfun
->decl
);
6016 ipcp_update_bits (node
, ts
);
6017 ipcp_update_vr (node
, ts
);
6018 if (vec_safe_is_empty (ts
->m_agg_values
))
6020 param_count
= count_formal_params (node
->decl
);
6021 if (param_count
== 0)
6024 adjust_agg_replacement_values (node
, ts
);
6025 if (vec_safe_is_empty (ts
->m_agg_values
))
6028 fprintf (dump_file
, " All affected aggregate parameters were either "
6029 "removed or converted into scalars, phase done.\n");
6034 fprintf (dump_file
, " Aggregate replacements:");
6035 ipa_argagg_value_list
avs (ts
);
6036 avs
.dump (dump_file
);
6041 fbi
.bb_infos
= vNULL
;
6042 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
), true);
6043 fbi
.param_count
= param_count
;
6044 fbi
.aa_walk_budget
= opt_for_fn (node
->decl
, param_ipa_max_aa_steps
);
6046 vec
<ipa_param_descriptor
, va_gc
> *descriptors
= NULL
;
6047 vec_safe_grow_cleared (descriptors
, param_count
, true);
6048 ipa_populate_param_decls (node
, *descriptors
);
6049 bool modified_mem_access
= false;
6050 calculate_dominance_info (CDI_DOMINATORS
);
6051 ipcp_modif_dom_walker
walker (&fbi
, descriptors
, ts
, &modified_mem_access
);
6052 walker
.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
6053 free_dominance_info (CDI_DOMINATORS
);
6054 bool cfg_changed
= walker
.cleanup_eh ();
6057 struct ipa_bb_info
*bi
;
6058 FOR_EACH_VEC_ELT (fbi
.bb_infos
, i
, bi
)
6059 free_ipa_bb_info (bi
);
6060 fbi
.bb_infos
.release ();
6062 vec_free (descriptors
);
6064 delete_unreachable_blocks_update_callgraph (node
, false);
6066 return modified_mem_access
? TODO_update_ssa_only_virtuals
: 0;
6070 #include "gt-ipa-prop.h"