1 /* Interprocedural analyses.
2 Copyright (C) 2005-2024 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
30 #include "tree-streamer.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-iterator.h"
35 #include "gimple-fold.h"
38 #include "stor-layout.h"
39 #include "print-tree.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
50 #include "ipa-utils.h"
54 #include "tree-cfgcleanup.h"
56 #include "symtab-clones.h"
57 #include "attr-fnspec.h"
58 #include "gimple-range.h"
59 #include "value-range-storage.h"
61 /* Function summary where the parameter infos are actually stored. */
62 ipa_node_params_t
*ipa_node_params_sum
= NULL
;
64 function_summary
<ipcp_transformation
*> *ipcp_transformation_sum
= NULL
;
66 /* Edge summary for IPA-CP edge information. */
67 ipa_edge_args_sum_t
*ipa_edge_args_sum
;
69 /* Traits for a hash table for reusing ranges. */
71 struct ipa_vr_ggc_hash_traits
: public ggc_cache_remove
<ipa_vr
*>
73 typedef ipa_vr
*value_type
;
74 typedef const vrange
*compare_type
;
76 hash (const ipa_vr
*p
)
78 // This never get called, except in the verification code, as
79 // ipa_get_value_range() calculates the hash itself. This
80 // function is mostly here for completness' sake.
84 add_vrange (vr
, hstate
);
88 equal (const ipa_vr
*a
, const vrange
*b
)
90 return a
->equal_p (*b
);
92 static const bool empty_zero_p
= true;
94 mark_empty (ipa_vr
*&p
)
99 is_empty (const ipa_vr
*p
)
104 is_deleted (const ipa_vr
*p
)
106 return p
== reinterpret_cast<const ipa_vr
*> (1);
109 mark_deleted (ipa_vr
*&p
)
111 p
= reinterpret_cast<ipa_vr
*> (1);
115 /* Hash table for avoid repeated allocations of equal ranges. */
116 static GTY ((cache
)) hash_table
<ipa_vr_ggc_hash_traits
> *ipa_vr_hash_table
;
118 /* Holders of ipa cgraph hooks: */
119 static struct cgraph_node_hook_list
*function_insertion_hook_holder
;
121 /* Description of a reference to an IPA constant. */
122 struct ipa_cst_ref_desc
124 /* Edge that corresponds to the statement which took the reference. */
125 struct cgraph_edge
*cs
;
126 /* Linked list of duplicates created when call graph edges are cloned. */
127 struct ipa_cst_ref_desc
*next_duplicate
;
128 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
129 if out of control. */
133 /* Allocation pool for reference descriptions. */
135 static object_allocator
<ipa_cst_ref_desc
> ipa_refdesc_pool
136 ("IPA-PROP ref descriptions");
144 ipa_vr::ipa_vr (const vrange
&r
)
145 : m_storage (ggc_alloc_vrange_storage (r
)),
151 ipa_vr::equal_p (const vrange
&r
) const
153 gcc_checking_assert (!r
.undefined_p ());
154 return (types_compatible_p (m_type
, r
.type ()) && m_storage
->equal_p (r
));
158 ipa_vr::get_vrange (Value_Range
&r
) const
161 m_storage
->get_vrange (r
, m_type
);
165 ipa_vr::set_unknown ()
168 ggc_free (m_storage
);
174 ipa_vr::streamer_read (lto_input_block
*ib
, data_in
*data_in
)
176 struct bitpack_d bp
= streamer_read_bitpack (ib
);
177 bool known
= bp_unpack_value (&bp
, 1);
181 streamer_read_value_range (ib
, data_in
, vr
);
182 if (!m_storage
|| !m_storage
->fits_p (vr
))
185 ggc_free (m_storage
);
186 m_storage
= ggc_alloc_vrange_storage (vr
);
188 m_storage
->set_vrange (vr
);
199 ipa_vr::streamer_write (output_block
*ob
) const
201 struct bitpack_d bp
= bitpack_create (ob
->main_stream
);
202 bp_pack_value (&bp
, !!m_storage
, 1);
203 streamer_write_bitpack (&bp
);
206 Value_Range
vr (m_type
);
207 m_storage
->get_vrange (vr
, m_type
);
208 streamer_write_vrange (ob
, vr
);
213 ipa_vr::dump (FILE *out
) const
217 Value_Range
vr (m_type
);
218 m_storage
->get_vrange (vr
, m_type
);
222 fprintf (out
, "NO RANGE");
225 // These stubs are because we use an ipa_vr in a hash_traits and
226 // hash-traits.h defines an extern of gt_ggc_mx (T &) instead of
227 // picking up the gt_ggc_mx (T *) version.
229 gt_pch_nx (ipa_vr
*&x
)
231 return gt_pch_nx ((ipa_vr
*) x
);
235 gt_ggc_mx (ipa_vr
*&x
)
237 return gt_ggc_mx ((ipa_vr
*) x
);
240 /* Analysis summery of function call return value. */
241 struct GTY(()) ipa_return_value_summary
243 /* Known value range.
244 This needs to be wrapped in struccture due to specific way
245 we allocate ipa_vr. */
249 /* Function summary for return values. */
250 class ipa_return_value_sum_t
: public function_summary
<ipa_return_value_summary
*>
253 ipa_return_value_sum_t (symbol_table
*table
, bool ggc
):
254 function_summary
<ipa_return_value_summary
*> (table
, ggc
) { }
256 /* Hook that is called by summary when a node is duplicated. */
257 void duplicate (cgraph_node
*,
259 ipa_return_value_summary
*data
,
260 ipa_return_value_summary
*data2
) final override
266 /* Variable hoding the return value summary. */
267 static GTY(()) function_summary
<ipa_return_value_summary
*> *ipa_return_value_sum
;
270 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
271 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
274 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node
*node
)
276 tree fs_opts
= DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node
->decl
);
280 return !opt_for_fn (node
->decl
, optimize
) || !opt_for_fn (node
->decl
, flag_ipa_cp
);
283 /* Return index of the formal whose tree is PTREE in function which corresponds
287 ipa_get_param_decl_index_1 (vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
292 count
= vec_safe_length (descriptors
);
293 for (i
= 0; i
< count
; i
++)
294 if ((*descriptors
)[i
].decl_or_type
== ptree
)
300 /* Return index of the formal whose tree is PTREE in function which corresponds
304 ipa_get_param_decl_index (class ipa_node_params
*info
, tree ptree
)
306 return ipa_get_param_decl_index_1 (info
->descriptors
, ptree
);
309 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
313 ipa_populate_param_decls (struct cgraph_node
*node
,
314 vec
<ipa_param_descriptor
, va_gc
> &descriptors
)
322 gcc_assert (gimple_has_body_p (fndecl
));
323 fnargs
= DECL_ARGUMENTS (fndecl
);
325 for (parm
= fnargs
; parm
; parm
= DECL_CHAIN (parm
))
327 descriptors
[param_num
].decl_or_type
= parm
;
328 unsigned int cost
= estimate_move_cost (TREE_TYPE (parm
), true);
329 descriptors
[param_num
].move_cost
= cost
;
330 /* Watch overflow, move_cost is a bitfield. */
331 gcc_checking_assert (cost
== descriptors
[param_num
].move_cost
);
336 /* Return how many formal parameters FNDECL has. */
339 count_formal_params (tree fndecl
)
343 gcc_assert (gimple_has_body_p (fndecl
));
345 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
351 /* Return the declaration of Ith formal parameter of the function corresponding
352 to INFO. Note there is no setter function as this array is built just once
353 using ipa_initialize_node_params. */
356 ipa_dump_param (FILE *file
, class ipa_node_params
*info
, int i
)
358 fprintf (file
, "param #%i", i
);
359 if ((*info
->descriptors
)[i
].decl_or_type
)
362 print_generic_expr (file
, (*info
->descriptors
)[i
].decl_or_type
);
366 /* If necessary, allocate vector of parameter descriptors in info of NODE.
367 Return true if they were allocated, false if not. */
370 ipa_alloc_node_params (struct cgraph_node
*node
, int param_count
)
372 ipa_node_params
*info
= ipa_node_params_sum
->get_create (node
);
374 if (!info
->descriptors
&& param_count
)
376 vec_safe_grow_cleared (info
->descriptors
, param_count
, true);
383 /* Initialize the ipa_node_params structure associated with NODE by counting
384 the function parameters, creating the descriptors and populating their
388 ipa_initialize_node_params (struct cgraph_node
*node
)
390 ipa_node_params
*info
= ipa_node_params_sum
->get_create (node
);
392 if (!info
->descriptors
393 && ipa_alloc_node_params (node
, count_formal_params (node
->decl
)))
394 ipa_populate_param_decls (node
, *info
->descriptors
);
397 /* Print VAL which is extracted from a jump function to F. */
400 ipa_print_constant_value (FILE *f
, tree val
)
402 print_generic_expr (f
, val
);
404 /* This is in keeping with values_equal_for_ipcp_p. */
405 if (TREE_CODE (val
) == ADDR_EXPR
406 && (TREE_CODE (TREE_OPERAND (val
, 0)) == CONST_DECL
407 || (TREE_CODE (TREE_OPERAND (val
, 0)) == VAR_DECL
408 && DECL_IN_CONSTANT_POOL (TREE_OPERAND (val
, 0)))))
411 print_generic_expr (f
, DECL_INITIAL (TREE_OPERAND (val
, 0)));
415 /* Print the jump functions associated with call graph edge CS to file F. */
418 ipa_print_node_jump_functions_for_edge (FILE *f
, struct cgraph_edge
*cs
)
420 ipa_edge_args
*args
= ipa_edge_args_sum
->get (cs
);
421 int count
= ipa_get_cs_argument_count (args
);
423 for (int i
= 0; i
< count
; i
++)
425 struct ipa_jump_func
*jump_func
;
426 enum jump_func_type type
;
428 jump_func
= ipa_get_ith_jump_func (args
, i
);
429 type
= jump_func
->type
;
431 fprintf (f
, " param %d: ", i
);
432 if (type
== IPA_JF_UNKNOWN
)
433 fprintf (f
, "UNKNOWN\n");
434 else if (type
== IPA_JF_CONST
)
436 fprintf (f
, "CONST: ");
437 ipa_print_constant_value (f
, jump_func
->value
.constant
.value
);
440 else if (type
== IPA_JF_PASS_THROUGH
)
442 fprintf (f
, "PASS THROUGH: ");
443 fprintf (f
, "%d, op %s",
444 jump_func
->value
.pass_through
.formal_id
,
445 get_tree_code_name(jump_func
->value
.pass_through
.operation
));
446 if (jump_func
->value
.pass_through
.operation
!= NOP_EXPR
)
449 print_generic_expr (f
, jump_func
->value
.pass_through
.operand
);
451 if (jump_func
->value
.pass_through
.agg_preserved
)
452 fprintf (f
, ", agg_preserved");
453 if (jump_func
->value
.pass_through
.refdesc_decremented
)
454 fprintf (f
, ", refdesc_decremented");
457 else if (type
== IPA_JF_ANCESTOR
)
459 fprintf (f
, "ANCESTOR: ");
460 fprintf (f
, "%d, offset " HOST_WIDE_INT_PRINT_DEC
,
461 jump_func
->value
.ancestor
.formal_id
,
462 jump_func
->value
.ancestor
.offset
);
463 if (jump_func
->value
.ancestor
.agg_preserved
)
464 fprintf (f
, ", agg_preserved");
465 if (jump_func
->value
.ancestor
.keep_null
)
466 fprintf (f
, ", keep_null");
470 if (jump_func
->agg
.items
)
472 struct ipa_agg_jf_item
*item
;
475 fprintf (f
, " Aggregate passed by %s:\n",
476 jump_func
->agg
.by_ref
? "reference" : "value");
477 FOR_EACH_VEC_ELT (*jump_func
->agg
.items
, j
, item
)
479 fprintf (f
, " offset: " HOST_WIDE_INT_PRINT_DEC
", ",
481 fprintf (f
, "type: ");
482 print_generic_expr (f
, item
->type
);
484 if (item
->jftype
== IPA_JF_PASS_THROUGH
)
485 fprintf (f
, "PASS THROUGH: %d,",
486 item
->value
.pass_through
.formal_id
);
487 else if (item
->jftype
== IPA_JF_LOAD_AGG
)
489 fprintf (f
, "LOAD AGG: %d",
490 item
->value
.pass_through
.formal_id
);
491 fprintf (f
, " [offset: " HOST_WIDE_INT_PRINT_DEC
", by %s],",
492 item
->value
.load_agg
.offset
,
493 item
->value
.load_agg
.by_ref
? "reference"
497 if (item
->jftype
== IPA_JF_PASS_THROUGH
498 || item
->jftype
== IPA_JF_LOAD_AGG
)
500 fprintf (f
, " op %s",
501 get_tree_code_name (item
->value
.pass_through
.operation
));
502 if (item
->value
.pass_through
.operation
!= NOP_EXPR
)
505 print_generic_expr (f
, item
->value
.pass_through
.operand
);
508 else if (item
->jftype
== IPA_JF_CONST
)
510 fprintf (f
, "CONST: ");
511 ipa_print_constant_value (f
, item
->value
.constant
);
513 else if (item
->jftype
== IPA_JF_UNKNOWN
)
514 fprintf (f
, "UNKNOWN: " HOST_WIDE_INT_PRINT_DEC
" bits",
515 tree_to_uhwi (TYPE_SIZE (item
->type
)));
520 class ipa_polymorphic_call_context
*ctx
521 = ipa_get_ith_polymorhic_call_context (args
, i
);
522 if (ctx
&& !ctx
->useless_p ())
524 fprintf (f
, " Context: ");
525 ctx
->dump (dump_file
);
530 jump_func
->m_vr
->dump (f
);
534 fprintf (f
, " Unknown VR\n");
539 /* Print the jump functions of all arguments on all call graph edges going from
543 ipa_print_node_jump_functions (FILE *f
, struct cgraph_node
*node
)
545 struct cgraph_edge
*cs
;
547 fprintf (f
, " Jump functions of caller %s:\n", node
->dump_name ());
548 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
551 fprintf (f
, " callsite %s -> %s : \n",
553 cs
->callee
->dump_name ());
554 if (!ipa_edge_args_info_available_for_edge_p (cs
))
555 fprintf (f
, " no arg info\n");
557 ipa_print_node_jump_functions_for_edge (f
, cs
);
560 for (cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
562 class cgraph_indirect_call_info
*ii
;
564 ii
= cs
->indirect_info
;
565 if (ii
->agg_contents
)
566 fprintf (f
, " indirect %s callsite, calling param %i, "
567 "offset " HOST_WIDE_INT_PRINT_DEC
", %s",
568 ii
->member_ptr
? "member ptr" : "aggregate",
569 ii
->param_index
, ii
->offset
,
570 ii
->by_ref
? "by reference" : "by_value");
572 fprintf (f
, " indirect %s callsite, calling param %i, "
573 "offset " HOST_WIDE_INT_PRINT_DEC
,
574 ii
->polymorphic
? "polymorphic" : "simple", ii
->param_index
,
579 fprintf (f
, ", for stmt ");
580 print_gimple_stmt (f
, cs
->call_stmt
, 0, TDF_SLIM
);
585 ii
->context
.dump (f
);
586 if (!ipa_edge_args_info_available_for_edge_p (cs
))
587 fprintf (f
, " no arg info\n");
589 ipa_print_node_jump_functions_for_edge (f
, cs
);
593 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
596 ipa_print_all_jump_functions (FILE *f
)
598 struct cgraph_node
*node
;
600 fprintf (f
, "\nJump functions:\n");
601 FOR_EACH_FUNCTION (node
)
603 ipa_print_node_jump_functions (f
, node
);
607 /* Set jfunc to be a know-really nothing jump function. */
610 ipa_set_jf_unknown (struct ipa_jump_func
*jfunc
)
612 jfunc
->type
= IPA_JF_UNKNOWN
;
615 /* Set JFUNC to be a copy of another jmp (to be used by jump function
616 combination code). The two functions will share their rdesc. */
619 ipa_set_jf_cst_copy (struct ipa_jump_func
*dst
,
620 struct ipa_jump_func
*src
)
623 gcc_checking_assert (src
->type
== IPA_JF_CONST
);
624 dst
->type
= IPA_JF_CONST
;
625 dst
->value
.constant
= src
->value
.constant
;
628 /* Set JFUNC to be a constant jmp function. */
631 ipa_set_jf_constant (struct ipa_jump_func
*jfunc
, tree constant
,
632 struct cgraph_edge
*cs
)
634 jfunc
->type
= IPA_JF_CONST
;
635 jfunc
->value
.constant
.value
= unshare_expr_without_location (constant
);
637 if (TREE_CODE (constant
) == ADDR_EXPR
638 && (TREE_CODE (TREE_OPERAND (constant
, 0)) == FUNCTION_DECL
639 || (VAR_P (TREE_OPERAND (constant
, 0))
640 && TREE_STATIC (TREE_OPERAND (constant
, 0)))))
642 struct ipa_cst_ref_desc
*rdesc
;
644 rdesc
= ipa_refdesc_pool
.allocate ();
646 rdesc
->next_duplicate
= NULL
;
648 jfunc
->value
.constant
.rdesc
= rdesc
;
651 jfunc
->value
.constant
.rdesc
= NULL
;
654 /* Set JFUNC to be a simple pass-through jump function. */
656 ipa_set_jf_simple_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
659 jfunc
->type
= IPA_JF_PASS_THROUGH
;
660 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
661 jfunc
->value
.pass_through
.formal_id
= formal_id
;
662 jfunc
->value
.pass_through
.operation
= NOP_EXPR
;
663 jfunc
->value
.pass_through
.agg_preserved
= agg_preserved
;
664 jfunc
->value
.pass_through
.refdesc_decremented
= false;
667 /* Set JFUNC to be an unary pass through jump function. */
670 ipa_set_jf_unary_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
671 enum tree_code operation
)
673 jfunc
->type
= IPA_JF_PASS_THROUGH
;
674 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
675 jfunc
->value
.pass_through
.formal_id
= formal_id
;
676 jfunc
->value
.pass_through
.operation
= operation
;
677 jfunc
->value
.pass_through
.agg_preserved
= false;
678 jfunc
->value
.pass_through
.refdesc_decremented
= false;
680 /* Set JFUNC to be an arithmetic pass through jump function. */
683 ipa_set_jf_arith_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
684 tree operand
, enum tree_code operation
)
686 jfunc
->type
= IPA_JF_PASS_THROUGH
;
687 jfunc
->value
.pass_through
.operand
= unshare_expr_without_location (operand
);
688 jfunc
->value
.pass_through
.formal_id
= formal_id
;
689 jfunc
->value
.pass_through
.operation
= operation
;
690 jfunc
->value
.pass_through
.agg_preserved
= false;
691 jfunc
->value
.pass_through
.refdesc_decremented
= false;
694 /* Set JFUNC to be an ancestor jump function. */
697 ipa_set_ancestor_jf (struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
,
698 int formal_id
, bool agg_preserved
, bool keep_null
)
700 jfunc
->type
= IPA_JF_ANCESTOR
;
701 jfunc
->value
.ancestor
.formal_id
= formal_id
;
702 jfunc
->value
.ancestor
.offset
= offset
;
703 jfunc
->value
.ancestor
.agg_preserved
= agg_preserved
;
704 jfunc
->value
.ancestor
.keep_null
= keep_null
;
707 /* Get IPA BB information about the given BB. FBI is the context of analyzis
708 of this function body. */
710 static struct ipa_bb_info
*
711 ipa_get_bb_info (struct ipa_func_body_info
*fbi
, basic_block bb
)
713 gcc_checking_assert (fbi
);
714 return &fbi
->bb_infos
[bb
->index
];
717 /* Structure to be passed in between detect_type_change and
718 check_stmt_for_type_change. */
720 struct prop_type_change_info
722 /* Offset into the object where there is the virtual method pointer we are
724 HOST_WIDE_INT offset
;
725 /* The declaration or SSA_NAME pointer of the base that we are checking for
728 /* Set to true if dynamic type change has been detected. */
729 bool type_maybe_changed
;
732 /* Return true if STMT can modify a virtual method table pointer.
734 This function makes special assumptions about both constructors and
735 destructors which are all the functions that are allowed to alter the VMT
736 pointers. It assumes that destructors begin with assignment into all VMT
737 pointers and that constructors essentially look in the following way:
739 1) The very first thing they do is that they call constructors of ancestor
740 sub-objects that have them.
742 2) Then VMT pointers of this and all its ancestors is set to new values
743 corresponding to the type corresponding to the constructor.
745 3) Only afterwards, other stuff such as constructor of member sub-objects
746 and the code written by the user is run. Only this may include calling
747 virtual functions, directly or indirectly.
749 There is no way to call a constructor of an ancestor sub-object in any
752 This means that we do not have to care whether constructors get the correct
753 type information because they will always change it (in fact, if we define
754 the type to be given by the VMT pointer, it is undefined).
756 The most important fact to derive from the above is that if, for some
757 statement in the section 3, we try to detect whether the dynamic type has
758 changed, we can safely ignore all calls as we examine the function body
759 backwards until we reach statements in section 2 because these calls cannot
760 be ancestor constructors or destructors (if the input is not bogus) and so
761 do not change the dynamic type (this holds true only for automatically
762 allocated objects but at the moment we devirtualize only these). We then
763 must detect that statements in section 2 change the dynamic type and can try
764 to derive the new type. That is enough and we can stop, we will never see
765 the calls into constructors of sub-objects in this code. Therefore we can
766 safely ignore all call statements that we traverse.
770 stmt_may_be_vtbl_ptr_store (gimple
*stmt
)
772 if (is_gimple_call (stmt
))
774 if (gimple_clobber_p (stmt
))
776 else if (is_gimple_assign (stmt
))
778 tree lhs
= gimple_assign_lhs (stmt
);
780 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs
)))
782 if (flag_strict_aliasing
783 && !POINTER_TYPE_P (TREE_TYPE (lhs
)))
786 if (TREE_CODE (lhs
) == COMPONENT_REF
787 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
789 /* In the future we might want to use get_ref_base_and_extent to find
790 if there is a field corresponding to the offset and if so, proceed
791 almost like if it was a component ref. */
797 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
798 to check whether a particular statement may modify the virtual table
799 pointerIt stores its result into DATA, which points to a
800 prop_type_change_info structure. */
803 check_stmt_for_type_change (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef
, void *data
)
805 gimple
*stmt
= SSA_NAME_DEF_STMT (vdef
);
806 struct prop_type_change_info
*tci
= (struct prop_type_change_info
*) data
;
808 if (stmt_may_be_vtbl_ptr_store (stmt
))
810 tci
->type_maybe_changed
= true;
817 /* See if ARG is PARAM_DECl describing instance passed by pointer
818 or reference in FUNCTION. Return false if the dynamic type may change
819 in between beggining of the function until CALL is invoked.
821 Generally functions are not allowed to change type of such instances,
822 but they call destructors. We assume that methods cannot destroy the THIS
823 pointer. Also as a special cases, constructor and destructors may change
824 type of the THIS pointer. */
827 param_type_may_change_p (tree function
, tree arg
, gimple
*call
)
829 /* Pure functions cannot do any changes on the dynamic type;
830 that require writting to memory. */
831 if (flags_from_decl_or_type (function
) & (ECF_PURE
| ECF_CONST
))
833 /* We need to check if we are within inlined consturctor
834 or destructor (ideally we would have way to check that the
835 inline cdtor is actually working on ARG, but we don't have
836 easy tie on this, so punt on all non-pure cdtors.
837 We may also record the types of cdtors and once we know type
838 of the instance match them.
840 Also code unification optimizations may merge calls from
841 different blocks making return values unreliable. So
842 do nothing during late optimization. */
843 if (DECL_STRUCT_FUNCTION (function
)->after_inlining
)
845 if (TREE_CODE (arg
) == SSA_NAME
846 && SSA_NAME_IS_DEFAULT_DEF (arg
)
847 && TREE_CODE (SSA_NAME_VAR (arg
)) == PARM_DECL
)
849 /* Normal (non-THIS) argument. */
850 if ((SSA_NAME_VAR (arg
) != DECL_ARGUMENTS (function
)
851 || TREE_CODE (TREE_TYPE (function
)) != METHOD_TYPE
)
852 /* THIS pointer of an method - here we want to watch constructors
853 and destructors as those definitely may change the dynamic
855 || (TREE_CODE (TREE_TYPE (function
)) == METHOD_TYPE
856 && !DECL_CXX_CONSTRUCTOR_P (function
)
857 && !DECL_CXX_DESTRUCTOR_P (function
)
858 && (SSA_NAME_VAR (arg
) == DECL_ARGUMENTS (function
))))
860 /* Walk the inline stack and watch out for ctors/dtors. */
861 for (tree block
= gimple_block (call
); block
&& TREE_CODE (block
) == BLOCK
;
862 block
= BLOCK_SUPERCONTEXT (block
))
863 if (inlined_polymorphic_ctor_dtor_block_p (block
, false))
871 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
872 callsite CALL) by looking for assignments to its virtual table pointer. If
873 it is, return true. ARG is the object itself (not a pointer
874 to it, unless dereferenced). BASE is the base of the memory access as
875 returned by get_ref_base_and_extent, as is the offset.
877 This is helper function for detect_type_change and detect_type_change_ssa
878 that does the heavy work which is usually unnecesary. */
881 detect_type_change_from_memory_writes (ipa_func_body_info
*fbi
, tree arg
,
882 tree base
, tree comp_type
, gcall
*call
,
883 HOST_WIDE_INT offset
)
885 struct prop_type_change_info tci
;
888 gcc_checking_assert (DECL_P (arg
)
889 || TREE_CODE (arg
) == MEM_REF
890 || handled_component_p (arg
));
892 comp_type
= TYPE_MAIN_VARIANT (comp_type
);
894 /* Const calls cannot call virtual methods through VMT and so type changes do
896 if (!flag_devirtualize
|| !gimple_vuse (call
)
897 /* Be sure expected_type is polymorphic. */
899 || TREE_CODE (comp_type
) != RECORD_TYPE
900 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))
901 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))))
904 if (fbi
->aa_walk_budget
== 0)
907 ao_ref_init (&ao
, arg
);
910 ao
.size
= POINTER_SIZE
;
911 ao
.max_size
= ao
.size
;
914 tci
.object
= get_base_address (arg
);
915 tci
.type_maybe_changed
= false;
918 = walk_aliased_vdefs (&ao
, gimple_vuse (call
), check_stmt_for_type_change
,
919 &tci
, NULL
, NULL
, fbi
->aa_walk_budget
);
921 fbi
->aa_walk_budget
-= walked
;
923 fbi
->aa_walk_budget
= 0;
925 if (walked
>= 0 && !tci
.type_maybe_changed
)
931 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
932 If it is, return true. ARG is the object itself (not a pointer
933 to it, unless dereferenced). BASE is the base of the memory access as
934 returned by get_ref_base_and_extent, as is the offset. */
937 detect_type_change (ipa_func_body_info
*fbi
, tree arg
, tree base
,
938 tree comp_type
, gcall
*call
,
939 HOST_WIDE_INT offset
)
941 if (!flag_devirtualize
)
944 if (TREE_CODE (base
) == MEM_REF
945 && !param_type_may_change_p (current_function_decl
,
946 TREE_OPERAND (base
, 0),
949 return detect_type_change_from_memory_writes (fbi
, arg
, base
, comp_type
,
953 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
954 SSA name (its dereference will become the base and the offset is assumed to
958 detect_type_change_ssa (ipa_func_body_info
*fbi
, tree arg
, tree comp_type
,
961 gcc_checking_assert (TREE_CODE (arg
) == SSA_NAME
);
962 if (!flag_devirtualize
963 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
966 if (!param_type_may_change_p (current_function_decl
, arg
, call
))
969 arg
= build2 (MEM_REF
, ptr_type_node
, arg
,
970 build_int_cst (ptr_type_node
, 0));
972 return detect_type_change_from_memory_writes (fbi
, arg
, arg
, comp_type
,
976 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
977 boolean variable pointed to by DATA. */
980 mark_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
983 bool *b
= (bool *) data
;
988 /* Find the nearest valid aa status for parameter specified by INDEX that
991 static struct ipa_param_aa_status
*
992 find_dominating_aa_status (struct ipa_func_body_info
*fbi
, basic_block bb
,
997 bb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
1000 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
1001 if (!bi
->param_aa_statuses
.is_empty ()
1002 && bi
->param_aa_statuses
[index
].valid
)
1003 return &bi
->param_aa_statuses
[index
];
1007 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
1008 structures and/or intialize the result with a dominating description as
1011 static struct ipa_param_aa_status
*
1012 parm_bb_aa_status_for_bb (struct ipa_func_body_info
*fbi
, basic_block bb
,
1015 gcc_checking_assert (fbi
);
1016 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
1017 if (bi
->param_aa_statuses
.is_empty ())
1018 bi
->param_aa_statuses
.safe_grow_cleared (fbi
->param_count
, true);
1019 struct ipa_param_aa_status
*paa
= &bi
->param_aa_statuses
[index
];
1022 gcc_checking_assert (!paa
->parm_modified
1023 && !paa
->ref_modified
1024 && !paa
->pt_modified
);
1025 struct ipa_param_aa_status
*dom_paa
;
1026 dom_paa
= find_dominating_aa_status (fbi
, bb
, index
);
1036 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
1037 a value known not to be modified in this function before reaching the
1038 statement STMT. FBI holds information about the function we have so far
1039 gathered but do not survive the summary building stage. */
1042 parm_preserved_before_stmt_p (struct ipa_func_body_info
*fbi
, int index
,
1043 gimple
*stmt
, tree parm_load
)
1045 struct ipa_param_aa_status
*paa
;
1046 bool modified
= false;
1049 tree base
= get_base_address (parm_load
);
1050 gcc_assert (TREE_CODE (base
) == PARM_DECL
);
1051 if (TREE_READONLY (base
))
1054 gcc_checking_assert (fbi
);
1055 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
1056 if (paa
->parm_modified
|| fbi
->aa_walk_budget
== 0)
1059 gcc_checking_assert (gimple_vuse (stmt
) != NULL_TREE
);
1060 ao_ref_init (&refd
, parm_load
);
1061 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
1062 &modified
, NULL
, NULL
,
1063 fbi
->aa_walk_budget
);
1067 fbi
->aa_walk_budget
= 0;
1070 fbi
->aa_walk_budget
-= walked
;
1071 if (paa
&& modified
)
1072 paa
->parm_modified
= true;
1076 /* If STMT is an assignment that loads a value from an parameter declaration,
1077 return the index of the parameter in ipa_node_params which has not been
1078 modified. Otherwise return -1. */
1081 load_from_unmodified_param (struct ipa_func_body_info
*fbi
,
1082 vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
1088 if (!gimple_assign_single_p (stmt
))
1091 op1
= gimple_assign_rhs1 (stmt
);
1092 if (TREE_CODE (op1
) != PARM_DECL
)
1095 index
= ipa_get_param_decl_index_1 (descriptors
, op1
);
1097 || !parm_preserved_before_stmt_p (fbi
, index
, stmt
, op1
))
1103 /* Return true if memory reference REF (which must be a load through parameter
1104 with INDEX) loads data that are known to be unmodified in this function
1105 before reaching statement STMT. */
1108 parm_ref_data_preserved_p (struct ipa_func_body_info
*fbi
,
1109 int index
, gimple
*stmt
, tree ref
)
1111 struct ipa_param_aa_status
*paa
;
1112 bool modified
= false;
1115 gcc_checking_assert (fbi
);
1116 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
1117 if (paa
->ref_modified
|| fbi
->aa_walk_budget
== 0)
1120 gcc_checking_assert (gimple_vuse (stmt
));
1121 ao_ref_init (&refd
, ref
);
1122 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
1123 &modified
, NULL
, NULL
,
1124 fbi
->aa_walk_budget
);
1128 fbi
->aa_walk_budget
= 0;
1131 fbi
->aa_walk_budget
-= walked
;
1133 paa
->ref_modified
= true;
1137 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1138 is known to be unmodified in this function before reaching call statement
1139 CALL into which it is passed. FBI describes the function body. */
1142 parm_ref_data_pass_through_p (struct ipa_func_body_info
*fbi
, int index
,
1143 gimple
*call
, tree parm
)
1145 bool modified
= false;
1148 /* It's unnecessary to calculate anything about memory contnets for a const
1149 function because it is not goin to use it. But do not cache the result
1150 either. Also, no such calculations for non-pointers. */
1151 if (!gimple_vuse (call
)
1152 || !POINTER_TYPE_P (TREE_TYPE (parm
)))
1155 struct ipa_param_aa_status
*paa
= parm_bb_aa_status_for_bb (fbi
,
1158 if (paa
->pt_modified
|| fbi
->aa_walk_budget
== 0)
1161 ao_ref_init_from_ptr_and_size (&refd
, parm
, NULL_TREE
);
1162 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (call
), mark_modified
,
1163 &modified
, NULL
, NULL
,
1164 fbi
->aa_walk_budget
);
1167 fbi
->aa_walk_budget
= 0;
1171 fbi
->aa_walk_budget
-= walked
;
1173 paa
->pt_modified
= true;
1177 /* Return true if we can prove that OP is a memory reference loading
1178 data from an aggregate passed as a parameter.
1180 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1181 false if it cannot prove that the value has not been modified before the
1182 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1183 if it cannot prove the value has not been modified, in that case it will
1184 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1186 INFO and PARMS_AINFO describe parameters of the current function (but the
1187 latter can be NULL), STMT is the load statement. If function returns true,
1188 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1189 within the aggregate and whether it is a load from a value passed by
1190 reference respectively.
1192 Return false if the offset divided by BITS_PER_UNIT would not fit into an
1196 ipa_load_from_parm_agg (struct ipa_func_body_info
*fbi
,
1197 vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
1198 gimple
*stmt
, tree op
, int *index_p
,
1199 HOST_WIDE_INT
*offset_p
, poly_int64
*size_p
,
1200 bool *by_ref_p
, bool *guaranteed_unmodified
)
1205 tree base
= get_ref_base_and_extent_hwi (op
, offset_p
, &size
, &reverse
);
1208 || (*offset_p
/ BITS_PER_UNIT
) > UINT_MAX
)
1211 /* We can not propagate across volatile loads. */
1212 if (TREE_THIS_VOLATILE (op
))
1217 int index
= ipa_get_param_decl_index_1 (descriptors
, base
);
1219 && parm_preserved_before_stmt_p (fbi
, index
, stmt
, op
))
1225 if (guaranteed_unmodified
)
1226 *guaranteed_unmodified
= true;
1232 if (TREE_CODE (base
) != MEM_REF
1233 || TREE_CODE (TREE_OPERAND (base
, 0)) != SSA_NAME
1234 || !integer_zerop (TREE_OPERAND (base
, 1)))
1237 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base
, 0)))
1239 tree parm
= SSA_NAME_VAR (TREE_OPERAND (base
, 0));
1240 index
= ipa_get_param_decl_index_1 (descriptors
, parm
);
1244 /* This branch catches situations where a pointer parameter is not a
1245 gimple register, for example:
1247 void hip7(S*) (struct S * p)
1249 void (*<T2e4>) (struct S *) D.1867;
1254 D.1867_2 = p.1_1->f;
1259 gimple
*def
= SSA_NAME_DEF_STMT (TREE_OPERAND (base
, 0));
1260 index
= load_from_unmodified_param (fbi
, descriptors
, def
);
1265 bool data_preserved
= parm_ref_data_preserved_p (fbi
, index
, stmt
, op
);
1266 if (!data_preserved
&& !guaranteed_unmodified
)
1273 if (guaranteed_unmodified
)
1274 *guaranteed_unmodified
= data_preserved
;
1280 /* If STMT is an assignment that loads a value from a parameter declaration,
1281 or from an aggregate passed as the parameter either by value or reference,
1282 return the index of the parameter in ipa_node_params. Otherwise return -1.
1284 FBI holds gathered information about the function. INFO describes
1285 parameters of the function, STMT is the assignment statement. If it is a
1286 memory load from an aggregate, *OFFSET_P is filled with offset within the
1287 aggregate, and *BY_REF_P specifies whether the aggregate is passed by
1291 load_from_unmodified_param_or_agg (struct ipa_func_body_info
*fbi
,
1292 class ipa_node_params
*info
,
1294 HOST_WIDE_INT
*offset_p
,
1297 int index
= load_from_unmodified_param (fbi
, info
->descriptors
, stmt
);
1300 /* Load value from a parameter declaration. */
1307 if (!gimple_assign_load_p (stmt
))
1310 tree rhs
= gimple_assign_rhs1 (stmt
);
1312 /* Skip memory reference containing VIEW_CONVERT_EXPR. */
1313 for (tree t
= rhs
; handled_component_p (t
); t
= TREE_OPERAND (t
, 0))
1314 if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
1317 /* Skip memory reference containing bit-field. */
1318 if (TREE_CODE (rhs
) == BIT_FIELD_REF
1319 || contains_bitfld_component_ref_p (rhs
))
1322 if (!ipa_load_from_parm_agg (fbi
, info
->descriptors
, stmt
, rhs
, &index
,
1323 offset_p
, &size
, by_ref_p
))
1326 gcc_assert (!maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (rhs
))),
1330 tree param_type
= ipa_get_type (info
, index
);
1332 if (!param_type
|| !AGGREGATE_TYPE_P (param_type
))
1335 else if (TREE_THIS_VOLATILE (rhs
))
1341 /* Walk pointer adjustemnts from OP (such as POINTER_PLUS and ADDR_EXPR)
1342 to find original pointer. Initialize RET to the pointer which results from
1344 If offset is known return true and initialize OFFSET_RET. */
1347 unadjusted_ptr_and_unit_offset (tree op
, tree
*ret
, poly_int64
*offset_ret
)
1349 poly_int64 offset
= 0;
1350 bool offset_known
= true;
1353 for (i
= 0; i
< param_ipa_jump_function_lookups
; i
++)
1355 if (TREE_CODE (op
) == ADDR_EXPR
)
1357 poly_int64 extra_offset
= 0;
1358 tree base
= get_addr_base_and_unit_offset (TREE_OPERAND (op
, 0),
1362 base
= get_base_address (TREE_OPERAND (op
, 0));
1363 if (TREE_CODE (base
) != MEM_REF
)
1365 offset_known
= false;
1369 if (TREE_CODE (base
) != MEM_REF
)
1371 offset
+= extra_offset
;
1373 op
= TREE_OPERAND (base
, 0);
1374 if (mem_ref_offset (base
).to_shwi (&extra_offset
))
1375 offset
+= extra_offset
;
1377 offset_known
= false;
1379 else if (TREE_CODE (op
) == SSA_NAME
1380 && !SSA_NAME_IS_DEFAULT_DEF (op
))
1382 gimple
*pstmt
= SSA_NAME_DEF_STMT (op
);
1384 if (gimple_assign_single_p (pstmt
))
1385 op
= gimple_assign_rhs1 (pstmt
);
1386 else if (is_gimple_assign (pstmt
)
1387 && gimple_assign_rhs_code (pstmt
) == POINTER_PLUS_EXPR
)
1389 poly_int64 extra_offset
= 0;
1390 if (ptrdiff_tree_p (gimple_assign_rhs2 (pstmt
),
1392 offset
+= extra_offset
;
1394 offset_known
= false;
1395 op
= gimple_assign_rhs1 (pstmt
);
1404 *offset_ret
= offset
;
1405 return offset_known
;
1408 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1409 of an assignment statement STMT, try to determine whether we are actually
1410 handling any of the following cases and construct an appropriate jump
1411 function into JFUNC if so:
1413 1) The passed value is loaded from a formal parameter which is not a gimple
1414 register (most probably because it is addressable, the value has to be
1415 scalar) and we can guarantee the value has not changed. This case can
1416 therefore be described by a simple pass-through jump function. For example:
1425 2) The passed value can be described by a simple arithmetic pass-through
1432 D.2064_4 = a.1(D) + 4;
1435 This case can also occur in combination of the previous one, e.g.:
1443 D.2064_4 = a.0_3 + 4;
1446 3) The passed value is an address of an object within another one (which
1447 also passed by reference). Such situations are described by an ancestor
1448 jump function and describe situations such as:
1450 B::foo() (struct B * const this)
1454 D.1845_2 = &this_1(D)->D.1748;
1457 INFO is the structure describing individual parameters access different
1458 stages of IPA optimizations. PARMS_AINFO contains the information that is
1459 only needed for intraprocedural analysis. */
1462 compute_complex_assign_jump_func (struct ipa_func_body_info
*fbi
,
1463 class ipa_node_params
*info
,
1464 struct ipa_jump_func
*jfunc
,
1465 gcall
*call
, gimple
*stmt
, tree name
,
1468 HOST_WIDE_INT offset
, size
;
1469 tree op1
, tc_ssa
, base
, ssa
;
1473 op1
= gimple_assign_rhs1 (stmt
);
1475 if (TREE_CODE (op1
) == SSA_NAME
)
1477 if (SSA_NAME_IS_DEFAULT_DEF (op1
))
1478 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (op1
));
1480 index
= load_from_unmodified_param (fbi
, info
->descriptors
,
1481 SSA_NAME_DEF_STMT (op1
));
1486 index
= load_from_unmodified_param (fbi
, info
->descriptors
, stmt
);
1487 tc_ssa
= gimple_assign_lhs (stmt
);
1492 switch (gimple_assign_rhs_class (stmt
))
1494 case GIMPLE_BINARY_RHS
:
1496 tree op2
= gimple_assign_rhs2 (stmt
);
1497 if (!is_gimple_ip_invariant (op2
)
1498 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt
))
1500 && !useless_type_conversion_p (TREE_TYPE (name
),
1504 ipa_set_jf_arith_pass_through (jfunc
, index
, op2
,
1505 gimple_assign_rhs_code (stmt
));
1508 case GIMPLE_SINGLE_RHS
:
1510 bool agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
,
1512 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1515 case GIMPLE_UNARY_RHS
:
1516 if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
)))
1517 ipa_set_jf_unary_pass_through (jfunc
, index
,
1518 gimple_assign_rhs_code (stmt
));
1524 if (TREE_CODE (op1
) != ADDR_EXPR
)
1526 op1
= TREE_OPERAND (op1
, 0);
1527 base
= get_ref_base_and_extent_hwi (op1
, &offset
, &size
, &reverse
);
1528 offset_int mem_offset
;
1530 || TREE_CODE (base
) != MEM_REF
1531 || !mem_ref_offset (base
).is_constant (&mem_offset
))
1533 offset
+= mem_offset
.to_short_addr () * BITS_PER_UNIT
;
1534 ssa
= TREE_OPERAND (base
, 0);
1535 if (TREE_CODE (ssa
) != SSA_NAME
1536 || !SSA_NAME_IS_DEFAULT_DEF (ssa
)
1540 /* Dynamic types are changed in constructors and destructors. */
1541 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (ssa
));
1542 if (index
>= 0 && param_type
&& POINTER_TYPE_P (param_type
))
1543 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1544 parm_ref_data_pass_through_p (fbi
, index
, call
, ssa
),
1548 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1551 iftmp.1_3 = &obj_2(D)->D.1762;
1553 The base of the MEM_REF must be a default definition SSA NAME of a
1554 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1555 whole MEM_REF expression is returned and the offset calculated from any
1556 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1557 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1560 get_ancestor_addr_info (gimple
*assign
, tree
*obj_p
, HOST_WIDE_INT
*offset
)
1563 tree expr
, parm
, obj
;
1566 if (!gimple_assign_single_p (assign
))
1568 expr
= gimple_assign_rhs1 (assign
);
1570 if (TREE_CODE (expr
) != ADDR_EXPR
)
1572 expr
= TREE_OPERAND (expr
, 0);
1574 expr
= get_ref_base_and_extent_hwi (expr
, offset
, &size
, &reverse
);
1576 offset_int mem_offset
;
1578 || TREE_CODE (expr
) != MEM_REF
1579 || !mem_ref_offset (expr
).is_constant (&mem_offset
))
1581 parm
= TREE_OPERAND (expr
, 0);
1582 if (TREE_CODE (parm
) != SSA_NAME
1583 || !SSA_NAME_IS_DEFAULT_DEF (parm
)
1584 || TREE_CODE (SSA_NAME_VAR (parm
)) != PARM_DECL
)
1587 *offset
+= mem_offset
.to_short_addr () * BITS_PER_UNIT
;
1593 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1594 statement PHI, try to find out whether NAME is in fact a
1595 multiple-inheritance typecast from a descendant into an ancestor of a formal
1596 parameter and thus can be described by an ancestor jump function and if so,
1597 write the appropriate function into JFUNC.
1599 Essentially we want to match the following pattern:
1607 iftmp.1_3 = &obj_2(D)->D.1762;
1610 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1611 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1615 compute_complex_ancestor_jump_func (struct ipa_func_body_info
*fbi
,
1616 class ipa_node_params
*info
,
1617 struct ipa_jump_func
*jfunc
,
1618 gcall
*call
, gphi
*phi
)
1620 HOST_WIDE_INT offset
;
1622 basic_block phi_bb
, assign_bb
, cond_bb
;
1623 tree tmp
, parm
, expr
, obj
;
1626 if (gimple_phi_num_args (phi
) != 2)
1629 if (integer_zerop (PHI_ARG_DEF (phi
, 1)))
1630 tmp
= PHI_ARG_DEF (phi
, 0);
1631 else if (integer_zerop (PHI_ARG_DEF (phi
, 0)))
1632 tmp
= PHI_ARG_DEF (phi
, 1);
1635 if (TREE_CODE (tmp
) != SSA_NAME
1636 || SSA_NAME_IS_DEFAULT_DEF (tmp
)
1637 || !POINTER_TYPE_P (TREE_TYPE (tmp
))
1638 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp
))) != RECORD_TYPE
)
1641 assign
= SSA_NAME_DEF_STMT (tmp
);
1642 assign_bb
= gimple_bb (assign
);
1643 if (!single_pred_p (assign_bb
))
1645 expr
= get_ancestor_addr_info (assign
, &obj
, &offset
);
1648 parm
= TREE_OPERAND (expr
, 0);
1649 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (parm
));
1653 cond_bb
= single_pred (assign_bb
);
1654 gcond
*cond
= safe_dyn_cast
<gcond
*> (*gsi_last_bb (cond_bb
));
1656 || gimple_cond_code (cond
) != NE_EXPR
1657 || gimple_cond_lhs (cond
) != parm
1658 || !integer_zerop (gimple_cond_rhs (cond
)))
1661 phi_bb
= gimple_bb (phi
);
1662 for (i
= 0; i
< 2; i
++)
1664 basic_block pred
= EDGE_PRED (phi_bb
, i
)->src
;
1665 if (pred
!= assign_bb
&& pred
!= cond_bb
)
1669 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1670 parm_ref_data_pass_through_p (fbi
, index
, call
, parm
),
1674 /* Inspect the given TYPE and return true iff it has the same structure (the
1675 same number of fields of the same types) as a C++ member pointer. If
1676 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1677 corresponding fields there. */
1680 type_like_member_ptr_p (tree type
, tree
*method_ptr
, tree
*delta
)
1684 if (TREE_CODE (type
) != RECORD_TYPE
)
1687 fld
= TYPE_FIELDS (type
);
1688 if (!fld
|| !POINTER_TYPE_P (TREE_TYPE (fld
))
1689 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld
))) != METHOD_TYPE
1690 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1696 fld
= DECL_CHAIN (fld
);
1697 if (!fld
|| INTEGRAL_TYPE_P (fld
)
1698 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1703 if (DECL_CHAIN (fld
))
1709 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1710 return the rhs of its defining statement, and this statement is stored in
1711 *RHS_STMT. Otherwise return RHS as it is. */
1714 get_ssa_def_if_simple_copy (tree rhs
, gimple
**rhs_stmt
)
1716 while (TREE_CODE (rhs
) == SSA_NAME
&& !SSA_NAME_IS_DEFAULT_DEF (rhs
))
1718 gimple
*def_stmt
= SSA_NAME_DEF_STMT (rhs
);
1720 if (gimple_assign_single_p (def_stmt
))
1721 rhs
= gimple_assign_rhs1 (def_stmt
);
1724 *rhs_stmt
= def_stmt
;
1729 /* Simple linked list, describing contents of an aggregate before call. */
1731 struct ipa_known_agg_contents_list
1733 /* Offset and size of the described part of the aggregate. */
1734 HOST_WIDE_INT offset
, size
;
1736 /* Type of the described part of the aggregate. */
1739 /* Known constant value or jump function data describing contents. */
1740 struct ipa_load_agg_data value
;
1742 /* Pointer to the next structure in the list. */
1743 struct ipa_known_agg_contents_list
*next
;
1746 /* Add an aggregate content item into a linked list of
1747 ipa_known_agg_contents_list structure, in which all elements
1748 are sorted ascendingly by offset. */
1751 add_to_agg_contents_list (struct ipa_known_agg_contents_list
**plist
,
1752 struct ipa_known_agg_contents_list
*item
)
1754 struct ipa_known_agg_contents_list
*list
= *plist
;
1756 for (; list
; list
= list
->next
)
1758 if (list
->offset
>= item
->offset
)
1761 plist
= &list
->next
;
1768 /* Check whether a given aggregate content is clobbered by certain element in
1769 a linked list of ipa_known_agg_contents_list. */
1772 clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list
*list
,
1773 struct ipa_known_agg_contents_list
*item
)
1775 for (; list
; list
= list
->next
)
1777 if (list
->offset
>= item
->offset
)
1778 return list
->offset
< item
->offset
+ item
->size
;
1780 if (list
->offset
+ list
->size
> item
->offset
)
1787 /* Build aggregate jump function from LIST, assuming there are exactly
1788 VALUE_COUNT entries there and that offset of the passed argument
1789 is ARG_OFFSET and store it into JFUNC. */
1792 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list
*list
,
1793 int value_count
, HOST_WIDE_INT arg_offset
,
1794 struct ipa_jump_func
*jfunc
)
1796 vec_safe_reserve (jfunc
->agg
.items
, value_count
, true);
1797 for (; list
; list
= list
->next
)
1799 struct ipa_agg_jf_item item
;
1800 tree operand
= list
->value
.pass_through
.operand
;
1802 if (list
->value
.pass_through
.formal_id
>= 0)
1804 /* Content value is derived from some formal paramerter. */
1805 if (list
->value
.offset
>= 0)
1806 item
.jftype
= IPA_JF_LOAD_AGG
;
1808 item
.jftype
= IPA_JF_PASS_THROUGH
;
1810 item
.value
.load_agg
= list
->value
;
1812 item
.value
.pass_through
.operand
1813 = unshare_expr_without_location (operand
);
1817 /* Content value is known constant. */
1818 item
.jftype
= IPA_JF_CONST
;
1819 item
.value
.constant
= unshare_expr_without_location (operand
);
1824 item
.type
= list
->type
;
1825 gcc_assert (tree_to_shwi (TYPE_SIZE (list
->type
)) == list
->size
);
1827 item
.offset
= list
->offset
- arg_offset
;
1828 gcc_assert ((item
.offset
% BITS_PER_UNIT
) == 0);
1830 jfunc
->agg
.items
->quick_push (item
);
1834 /* Given an assignment statement STMT, try to collect information into
1835 AGG_VALUE that will be used to construct jump function for RHS of the
1836 assignment, from which content value of an aggregate part comes.
1838 Besides constant and simple pass-through jump functions, also try to
1839 identify whether it matches the following pattern that can be described by
1840 a load-value-from-aggregate jump function, which is a derivative of simple
1841 pass-through jump function.
1847 *(q_5 + 4) = *(p_3(D) + 28) op 1;
1851 Here IPA_LOAD_AGG_DATA data structure is informative enough to describe
1852 constant, simple pass-through and load-vale-from-aggregate. If value
1853 is constant, it will be kept in field OPERAND, and field FORMAL_ID is
1854 set to -1. For simple pass-through and load-value-from-aggregate, field
1855 FORMAL_ID specifies the related formal parameter index, and field
1856 OFFSET can be used to distinguish them, -1 means simple pass-through,
1857 otherwise means load-value-from-aggregate. */
1860 analyze_agg_content_value (struct ipa_func_body_info
*fbi
,
1861 struct ipa_load_agg_data
*agg_value
,
1864 tree lhs
= gimple_assign_lhs (stmt
);
1865 tree rhs1
= gimple_assign_rhs1 (stmt
);
1866 enum tree_code code
;
1869 /* Initialize jump function data for the aggregate part. */
1870 memset (agg_value
, 0, sizeof (*agg_value
));
1871 agg_value
->pass_through
.operation
= NOP_EXPR
;
1872 agg_value
->pass_through
.formal_id
= -1;
1873 agg_value
->offset
= -1;
1875 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs
)) /* TODO: Support aggregate type. */
1876 || TREE_THIS_VOLATILE (lhs
)
1877 || TREE_CODE (lhs
) == BIT_FIELD_REF
1878 || contains_bitfld_component_ref_p (lhs
))
1881 /* Skip SSA copies. */
1882 while (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
1884 if (TREE_CODE (rhs1
) != SSA_NAME
|| SSA_NAME_IS_DEFAULT_DEF (rhs1
))
1887 stmt
= SSA_NAME_DEF_STMT (rhs1
);
1888 if (!is_gimple_assign (stmt
))
1891 rhs1
= gimple_assign_rhs1 (stmt
);
1894 if (gphi
*phi
= dyn_cast
<gphi
*> (stmt
))
1896 /* Also special case like the following (a is a formal parameter):
1898 _12 = *a_11(D).dim[0].stride;
1900 # iftmp.22_9 = PHI <_12(2), 1(3)>
1902 parm.6.dim[0].stride = iftmp.22_9;
1904 __x_MOD_foo (&parm.6, b_31(D));
1906 The aggregate function describing parm.6.dim[0].stride is encoded as a
1907 PASS-THROUGH jump function with ASSERT_EXPR operation whith operand 1
1908 (the constant from the PHI node). */
1910 if (gimple_phi_num_args (phi
) != 2)
1912 tree arg0
= gimple_phi_arg_def (phi
, 0);
1913 tree arg1
= gimple_phi_arg_def (phi
, 1);
1916 if (is_gimple_ip_invariant (arg1
))
1921 else if (is_gimple_ip_invariant (arg0
))
1929 rhs1
= get_ssa_def_if_simple_copy (rhs1
, &stmt
);
1930 if (!is_gimple_assign (stmt
))
1934 agg_value
->pass_through
.operand
= operand
;
1936 else if (is_gimple_assign (stmt
))
1938 code
= gimple_assign_rhs_code (stmt
);
1939 switch (gimple_assign_rhs_class (stmt
))
1941 case GIMPLE_SINGLE_RHS
:
1942 if (is_gimple_ip_invariant (rhs1
))
1944 agg_value
->pass_through
.operand
= rhs1
;
1950 case GIMPLE_UNARY_RHS
:
1951 /* NOTE: A GIMPLE_UNARY_RHS operation might not be tcc_unary
1952 (truth_not_expr is example), GIMPLE_BINARY_RHS does not imply
1953 tcc_binary, this subtleness is somewhat misleading.
1955 Since tcc_unary is widely used in IPA-CP code to check an operation
1956 with one operand, here we only allow tc_unary operation to avoid
1957 possible problem. Then we can use (opclass == tc_unary) or not to
1958 distinguish unary and binary. */
1959 if (TREE_CODE_CLASS (code
) != tcc_unary
|| CONVERT_EXPR_CODE_P (code
))
1962 rhs1
= get_ssa_def_if_simple_copy (rhs1
, &stmt
);
1965 case GIMPLE_BINARY_RHS
:
1967 gimple
*rhs1_stmt
= stmt
;
1968 gimple
*rhs2_stmt
= stmt
;
1969 tree rhs2
= gimple_assign_rhs2 (stmt
);
1971 rhs1
= get_ssa_def_if_simple_copy (rhs1
, &rhs1_stmt
);
1972 rhs2
= get_ssa_def_if_simple_copy (rhs2
, &rhs2_stmt
);
1974 if (is_gimple_ip_invariant (rhs2
))
1976 agg_value
->pass_through
.operand
= rhs2
;
1979 else if (is_gimple_ip_invariant (rhs1
))
1981 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
1982 code
= swap_tree_comparison (code
);
1983 else if (!commutative_tree_code (code
))
1986 agg_value
->pass_through
.operand
= rhs1
;
1993 if (TREE_CODE_CLASS (code
) != tcc_comparison
1994 && !useless_type_conversion_p (TREE_TYPE (lhs
),
2007 if (TREE_CODE (rhs1
) != SSA_NAME
)
2008 index
= load_from_unmodified_param_or_agg (fbi
, fbi
->info
, stmt
,
2010 &agg_value
->by_ref
);
2011 else if (SSA_NAME_IS_DEFAULT_DEF (rhs1
))
2012 index
= ipa_get_param_decl_index (fbi
->info
, SSA_NAME_VAR (rhs1
));
2016 if (agg_value
->offset
>= 0)
2017 agg_value
->type
= TREE_TYPE (rhs1
);
2018 agg_value
->pass_through
.formal_id
= index
;
2019 agg_value
->pass_through
.operation
= code
;
2022 agg_value
->pass_through
.operand
= NULL_TREE
;
2025 /* If STMT is a memory store to the object whose address is BASE, extract
2026 information (offset, size, and value) into CONTENT, and return true,
2027 otherwise we conservatively assume the whole object is modified with
2028 unknown content, and return false. CHECK_REF means that access to object
2029 is expected to be in form of MEM_REF expression. */
2032 extract_mem_content (struct ipa_func_body_info
*fbi
,
2033 gimple
*stmt
, tree base
, bool check_ref
,
2034 struct ipa_known_agg_contents_list
*content
)
2036 HOST_WIDE_INT lhs_offset
, lhs_size
;
2039 if (!is_gimple_assign (stmt
))
2042 tree lhs
= gimple_assign_lhs (stmt
);
2043 tree lhs_base
= get_ref_base_and_extent_hwi (lhs
, &lhs_offset
, &lhs_size
,
2050 if (TREE_CODE (lhs_base
) != MEM_REF
2051 || TREE_OPERAND (lhs_base
, 0) != base
2052 || !integer_zerop (TREE_OPERAND (lhs_base
, 1)))
2055 else if (lhs_base
!= base
)
2058 content
->offset
= lhs_offset
;
2059 content
->size
= lhs_size
;
2060 content
->type
= TREE_TYPE (lhs
);
2061 content
->next
= NULL
;
2063 analyze_agg_content_value (fbi
, &content
->value
, stmt
);
2067 /* Traverse statements from CALL backwards, scanning whether an aggregate given
2068 in ARG is filled in constants or values that are derived from caller's
2069 formal parameter in the way described by some kinds of jump functions. FBI
2070 is the context of the caller function for interprocedural analysis. ARG can
2071 either be an aggregate expression or a pointer to an aggregate. ARG_TYPE is
2072 the type of the aggregate, JFUNC is the jump function for the aggregate. */
2075 determine_known_aggregate_parts (struct ipa_func_body_info
*fbi
,
2076 gcall
*call
, tree arg
,
2078 struct ipa_jump_func
*jfunc
)
2080 struct ipa_known_agg_contents_list
*list
= NULL
, *all_list
= NULL
;
2081 bitmap visited
= NULL
;
2082 int item_count
= 0, value_count
= 0;
2083 HOST_WIDE_INT arg_offset
, arg_size
;
2085 bool check_ref
, by_ref
;
2087 int max_agg_items
= opt_for_fn (fbi
->node
->decl
, param_ipa_max_agg_items
);
2089 if (max_agg_items
== 0)
2092 /* The function operates in three stages. First, we prepare check_ref, r,
2093 arg_base and arg_offset based on what is actually passed as an actual
2096 if (POINTER_TYPE_P (arg_type
))
2099 if (TREE_CODE (arg
) == SSA_NAME
)
2102 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type
)))
2103 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
2108 type_size
= TYPE_SIZE (TREE_TYPE (arg_type
));
2109 arg_size
= tree_to_uhwi (type_size
);
2110 ao_ref_init_from_ptr_and_size (&r
, arg_base
, NULL_TREE
);
2112 else if (TREE_CODE (arg
) == ADDR_EXPR
)
2116 arg
= TREE_OPERAND (arg
, 0);
2117 arg_base
= get_ref_base_and_extent_hwi (arg
, &arg_offset
,
2118 &arg_size
, &reverse
);
2121 if (DECL_P (arg_base
))
2124 ao_ref_init (&r
, arg_base
);
2136 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg
)));
2140 arg_base
= get_ref_base_and_extent_hwi (arg
, &arg_offset
,
2141 &arg_size
, &reverse
);
2145 ao_ref_init (&r
, arg
);
2148 /* Second stage traverses virtual SSA web backwards starting from the call
2149 statement, only looks at individual dominating virtual operand (its
2150 definition dominates the call), as long as it is confident that content
2151 of the aggregate is affected by definition of the virtual operand, it
2152 builds a sorted linked list of ipa_agg_jf_list describing that. */
2154 for (tree dom_vuse
= gimple_vuse (call
);
2155 dom_vuse
&& fbi
->aa_walk_budget
> 0;)
2157 gimple
*stmt
= SSA_NAME_DEF_STMT (dom_vuse
);
2159 if (gimple_code (stmt
) == GIMPLE_PHI
)
2161 dom_vuse
= get_continuation_for_phi (stmt
, &r
, true,
2162 fbi
->aa_walk_budget
,
2163 &visited
, false, NULL
, NULL
);
2167 fbi
->aa_walk_budget
--;
2168 if (stmt_may_clobber_ref_p_1 (stmt
, &r
))
2170 struct ipa_known_agg_contents_list
*content
2171 = XALLOCA (struct ipa_known_agg_contents_list
);
2173 if (!extract_mem_content (fbi
, stmt
, arg_base
, check_ref
, content
))
2176 /* Now we get a dominating virtual operand, and need to check
2177 whether its value is clobbered any other dominating one. */
2178 if ((content
->value
.pass_through
.formal_id
>= 0
2179 || content
->value
.pass_through
.operand
)
2180 && !clobber_by_agg_contents_list_p (all_list
, content
)
2181 /* Since IPA-CP stores results with unsigned int offsets, we can
2182 discard those which would not fit now before we stream them to
2184 && (content
->offset
+ content
->size
- arg_offset
2185 <= (HOST_WIDE_INT
) UINT_MAX
* BITS_PER_UNIT
))
2187 struct ipa_known_agg_contents_list
*copy
2188 = XALLOCA (struct ipa_known_agg_contents_list
);
2190 /* Add to the list consisting of only dominating virtual
2191 operands, whose definitions can finally reach the call. */
2192 add_to_agg_contents_list (&list
, (*copy
= *content
, copy
));
2194 if (++value_count
== max_agg_items
)
2198 /* Add to the list consisting of all dominating virtual operands. */
2199 add_to_agg_contents_list (&all_list
, content
);
2201 if (++item_count
== 2 * max_agg_items
)
2204 dom_vuse
= gimple_vuse (stmt
);
2208 BITMAP_FREE (visited
);
2210 /* Third stage just goes over the list and creates an appropriate vector of
2211 ipa_agg_jf_item structures out of it, of course only if there are
2212 any meaningful items to begin with. */
2216 jfunc
->agg
.by_ref
= by_ref
;
2217 build_agg_jump_func_from_list (list
, value_count
, arg_offset
, jfunc
);
2222 /* Return the Ith param type of callee associated with call graph
2226 ipa_get_callee_param_type (struct cgraph_edge
*e
, int i
)
2229 tree type
= (e
->callee
2230 ? TREE_TYPE (e
->callee
->decl
)
2231 : gimple_call_fntype (e
->call_stmt
));
2232 tree t
= TYPE_ARG_TYPES (type
);
2234 for (n
= 0; n
< i
; n
++)
2240 if (t
&& t
!= void_list_node
)
2241 return TREE_VALUE (t
);
2244 t
= DECL_ARGUMENTS (e
->callee
->decl
);
2245 for (n
= 0; n
< i
; n
++)
2252 return TREE_TYPE (t
);
2256 /* Return a pointer to an ipa_vr just like TMP, but either find it in
2257 ipa_vr_hash_table or allocate it in GC memory. */
2260 ipa_get_value_range (const vrange
&tmp
)
2262 inchash::hash hstate
;
2263 inchash::add_vrange (tmp
, hstate
);
2264 hashval_t hash
= hstate
.end ();
2265 ipa_vr
**slot
= ipa_vr_hash_table
->find_slot_with_hash (&tmp
, hash
, INSERT
);
2269 ipa_vr
*vr
= new (ggc_alloc
<ipa_vr
> ()) ipa_vr (tmp
);
2274 /* Assign to JF a pointer to a range just like TMP but either fetch a
2275 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
2278 ipa_set_jfunc_vr (ipa_jump_func
*jf
, const vrange
&tmp
)
2280 jf
->m_vr
= ipa_get_value_range (tmp
);
2284 ipa_set_jfunc_vr (ipa_jump_func
*jf
, const ipa_vr
&vr
)
2287 vr
.get_vrange (tmp
);
2288 ipa_set_jfunc_vr (jf
, tmp
);
2291 /* Compute jump function for all arguments of callsite CS and insert the
2292 information in the jump_functions array in the ipa_edge_args corresponding
2293 to this callsite. */
2296 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info
*fbi
,
2297 struct cgraph_edge
*cs
)
2299 ipa_node_params
*info
= ipa_node_params_sum
->get (cs
->caller
);
2300 ipa_edge_args
*args
= ipa_edge_args_sum
->get_create (cs
);
2301 gcall
*call
= cs
->call_stmt
;
2302 int n
, arg_num
= gimple_call_num_args (call
);
2303 bool useful_context
= false;
2305 if (arg_num
== 0 || args
->jump_functions
)
2307 vec_safe_grow_cleared (args
->jump_functions
, arg_num
, true);
2308 if (flag_devirtualize
)
2309 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, arg_num
, true);
2311 if (gimple_call_internal_p (call
))
2313 if (ipa_func_spec_opts_forbid_analysis_p (cs
->caller
))
2316 for (n
= 0; n
< arg_num
; n
++)
2318 struct ipa_jump_func
*jfunc
= ipa_get_ith_jump_func (args
, n
);
2319 tree arg
= gimple_call_arg (call
, n
);
2320 tree param_type
= ipa_get_callee_param_type (cs
, n
);
2321 if (flag_devirtualize
&& POINTER_TYPE_P (TREE_TYPE (arg
)))
2324 class ipa_polymorphic_call_context
context (cs
->caller
->decl
,
2327 context
.get_dynamic_type (instance
, arg
, NULL
, cs
->call_stmt
,
2328 &fbi
->aa_walk_budget
);
2329 *ipa_get_ith_polymorhic_call_context (args
, n
) = context
;
2330 if (!context
.useless_p ())
2331 useful_context
= true;
2334 Value_Range
vr (TREE_TYPE (arg
));
2335 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
2337 bool addr_nonzero
= false;
2338 bool strict_overflow
= false;
2340 if (TREE_CODE (arg
) == SSA_NAME
2342 && get_range_query (cfun
)->range_of_expr (vr
, arg
, cs
->call_stmt
)
2344 addr_nonzero
= true;
2345 else if (tree_single_nonzero_warnv_p (arg
, &strict_overflow
))
2346 addr_nonzero
= true;
2349 vr
.set_nonzero (TREE_TYPE (arg
));
2351 unsigned HOST_WIDE_INT bitpos
;
2352 unsigned align
, prec
= TYPE_PRECISION (TREE_TYPE (arg
));
2354 get_pointer_alignment_1 (arg
, &align
, &bitpos
);
2356 if (align
> BITS_PER_UNIT
2357 && opt_for_fn (cs
->caller
->decl
, flag_ipa_bit_cp
))
2360 = wi::bit_and_not (wi::mask (prec
, false, prec
),
2361 wide_int::from (align
/ BITS_PER_UNIT
- 1,
2363 wide_int value
= wide_int::from (bitpos
/ BITS_PER_UNIT
, prec
,
2365 irange_bitmask
bm (value
, mask
);
2367 vr
.set_varying (TREE_TYPE (arg
));
2368 irange
&r
= as_a
<irange
> (vr
);
2369 r
.update_bitmask (bm
);
2370 ipa_set_jfunc_vr (jfunc
, vr
);
2372 else if (addr_nonzero
)
2373 ipa_set_jfunc_vr (jfunc
, vr
);
2375 gcc_assert (!jfunc
->m_vr
);
2380 && Value_Range::supports_type_p (TREE_TYPE (arg
))
2381 && Value_Range::supports_type_p (param_type
)
2382 && irange::supports_p (TREE_TYPE (arg
))
2383 && irange::supports_p (param_type
)
2384 && get_range_query (cfun
)->range_of_expr (vr
, arg
, cs
->call_stmt
)
2385 && !vr
.undefined_p ())
2387 Value_Range
resvr (vr
);
2388 range_cast (resvr
, param_type
);
2389 if (!resvr
.undefined_p () && !resvr
.varying_p ())
2390 ipa_set_jfunc_vr (jfunc
, resvr
);
2392 gcc_assert (!jfunc
->m_vr
);
2395 gcc_assert (!jfunc
->m_vr
);
2398 if (is_gimple_ip_invariant (arg
)
2400 && is_global_var (arg
)
2401 && TREE_READONLY (arg
)))
2402 ipa_set_jf_constant (jfunc
, arg
, cs
);
2403 else if (!is_gimple_reg_type (TREE_TYPE (arg
))
2404 && TREE_CODE (arg
) == PARM_DECL
)
2406 int index
= ipa_get_param_decl_index (info
, arg
);
2408 gcc_assert (index
>=0);
2409 /* Aggregate passed by value, check for pass-through, otherwise we
2410 will attempt to fill in aggregate contents later in this
2412 if (parm_preserved_before_stmt_p (fbi
, index
, call
, arg
))
2414 ipa_set_jf_simple_pass_through (jfunc
, index
, false);
2418 else if (TREE_CODE (arg
) == SSA_NAME
)
2420 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
2422 int index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (arg
));
2426 agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
, arg
);
2427 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
2432 gimple
*stmt
= SSA_NAME_DEF_STMT (arg
);
2433 if (is_gimple_assign (stmt
))
2434 compute_complex_assign_jump_func (fbi
, info
, jfunc
,
2435 call
, stmt
, arg
, param_type
);
2436 else if (gimple_code (stmt
) == GIMPLE_PHI
)
2437 compute_complex_ancestor_jump_func (fbi
, info
, jfunc
,
2439 as_a
<gphi
*> (stmt
));
2443 /* If ARG is pointer, we cannot use its type to determine the type of aggregate
2444 passed (because type conversions are ignored in gimple). Usually we can
2445 safely get type from function declaration, but in case of K&R prototypes or
2446 variadic functions we can try our luck with type of the pointer passed.
2447 TODO: Since we look for actual initialization of the memory object, we may better
2448 work out the type based on the memory stores we find. */
2450 param_type
= TREE_TYPE (arg
);
2452 if ((jfunc
->type
!= IPA_JF_PASS_THROUGH
2453 || !ipa_get_jf_pass_through_agg_preserved (jfunc
))
2454 && (jfunc
->type
!= IPA_JF_ANCESTOR
2455 || !ipa_get_jf_ancestor_agg_preserved (jfunc
))
2456 && (AGGREGATE_TYPE_P (TREE_TYPE (arg
))
2457 || POINTER_TYPE_P (param_type
)))
2458 determine_known_aggregate_parts (fbi
, call
, arg
, param_type
, jfunc
);
2460 if (!useful_context
)
2461 vec_free (args
->polymorphic_call_contexts
);
2464 /* Compute jump functions for all edges - both direct and indirect - outgoing
2468 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info
*fbi
, basic_block bb
)
2470 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
2472 struct cgraph_edge
*cs
;
2474 FOR_EACH_VEC_ELT_REVERSE (bi
->cg_edges
, i
, cs
)
2476 struct cgraph_node
*callee
= cs
->callee
;
2480 callee
= callee
->ultimate_alias_target ();
2481 /* We do not need to bother analyzing calls to unknown functions
2482 unless they may become known during lto/whopr. */
2483 if (!callee
->definition
&& !flag_lto
2484 && !gimple_call_fnspec (cs
->call_stmt
).known_p ())
2487 ipa_compute_jump_functions_for_edge (fbi
, cs
);
2491 /* If STMT looks like a statement loading a value from a member pointer formal
2492 parameter, return that parameter and store the offset of the field to
2493 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2494 might be clobbered). If USE_DELTA, then we look for a use of the delta
2495 field rather than the pfn. */
2498 ipa_get_stmt_member_ptr_load_param (gimple
*stmt
, bool use_delta
,
2499 HOST_WIDE_INT
*offset_p
)
2501 tree rhs
, rec
, ref_field
, ref_offset
, fld
, ptr_field
, delta_field
;
2503 if (!gimple_assign_single_p (stmt
))
2506 rhs
= gimple_assign_rhs1 (stmt
);
2507 if (TREE_CODE (rhs
) == COMPONENT_REF
)
2509 ref_field
= TREE_OPERAND (rhs
, 1);
2510 rhs
= TREE_OPERAND (rhs
, 0);
2513 ref_field
= NULL_TREE
;
2514 if (TREE_CODE (rhs
) != MEM_REF
)
2516 rec
= TREE_OPERAND (rhs
, 0);
2517 if (TREE_CODE (rec
) != ADDR_EXPR
)
2519 rec
= TREE_OPERAND (rec
, 0);
2520 if (TREE_CODE (rec
) != PARM_DECL
2521 || !type_like_member_ptr_p (TREE_TYPE (rec
), &ptr_field
, &delta_field
))
2523 ref_offset
= TREE_OPERAND (rhs
, 1);
2530 *offset_p
= int_bit_position (fld
);
2534 if (integer_nonzerop (ref_offset
))
2536 return ref_field
== fld
? rec
: NULL_TREE
;
2539 return tree_int_cst_equal (byte_position (fld
), ref_offset
) ? rec
2543 /* Returns true iff T is an SSA_NAME defined by a statement. */
2546 ipa_is_ssa_with_stmt_def (tree t
)
2548 if (TREE_CODE (t
) == SSA_NAME
2549 && !SSA_NAME_IS_DEFAULT_DEF (t
))
2555 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2556 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2557 indirect call graph edge.
2558 If POLYMORPHIC is true record is as a destination of polymorphic call. */
2560 static struct cgraph_edge
*
2561 ipa_note_param_call (struct cgraph_node
*node
, int param_index
,
2562 gcall
*stmt
, bool polymorphic
)
2564 struct cgraph_edge
*cs
;
2566 cs
= node
->get_edge (stmt
);
2567 cs
->indirect_info
->param_index
= param_index
;
2568 cs
->indirect_info
->agg_contents
= 0;
2569 cs
->indirect_info
->member_ptr
= 0;
2570 cs
->indirect_info
->guaranteed_unmodified
= 0;
2571 ipa_node_params
*info
= ipa_node_params_sum
->get (node
);
2572 ipa_set_param_used_by_indirect_call (info
, param_index
, true);
2573 if (cs
->indirect_info
->polymorphic
|| polymorphic
)
2574 ipa_set_param_used_by_polymorphic_call (info
, param_index
, true);
2578 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2579 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2580 intermediate information about each formal parameter. Currently it checks
2581 whether the call calls a pointer that is a formal parameter and if so, the
2582 parameter is marked with the called flag and an indirect call graph edge
2583 describing the call is created. This is very simple for ordinary pointers
2584 represented in SSA but not-so-nice when it comes to member pointers. The
2585 ugly part of this function does nothing more than trying to match the
2586 pattern of such a call. An example of such a pattern is the gimple dump
2587 below, the call is on the last line:
2590 f$__delta_5 = f.__delta;
2591 f$__pfn_24 = f.__pfn;
2595 f$__delta_5 = MEM[(struct *)&f];
2596 f$__pfn_24 = MEM[(struct *)&f + 4B];
2598 and a few lines below:
2601 D.2496_3 = (int) f$__pfn_24;
2602 D.2497_4 = D.2496_3 & 1;
2609 D.2500_7 = (unsigned int) f$__delta_5;
2610 D.2501_8 = &S + D.2500_7;
2611 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2612 D.2503_10 = *D.2502_9;
2613 D.2504_12 = f$__pfn_24 + -1;
2614 D.2505_13 = (unsigned int) D.2504_12;
2615 D.2506_14 = D.2503_10 + D.2505_13;
2616 D.2507_15 = *D.2506_14;
2617 iftmp.11_16 = (String:: *) D.2507_15;
2620 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2621 D.2500_19 = (unsigned int) f$__delta_5;
2622 D.2508_20 = &S + D.2500_19;
2623 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2625 Such patterns are results of simple calls to a member pointer:
2627 int doprinting (int (MyString::* f)(int) const)
2629 MyString S ("somestring");
2634 Moreover, the function also looks for called pointers loaded from aggregates
2635 passed by value or reference. */
2638 ipa_analyze_indirect_call_uses (struct ipa_func_body_info
*fbi
, gcall
*call
,
2641 class ipa_node_params
*info
= fbi
->info
;
2642 HOST_WIDE_INT offset
;
2645 if (SSA_NAME_IS_DEFAULT_DEF (target
))
2647 tree var
= SSA_NAME_VAR (target
);
2648 int index
= ipa_get_param_decl_index (info
, var
);
2650 ipa_note_param_call (fbi
->node
, index
, call
, false);
2655 gimple
*def
= SSA_NAME_DEF_STMT (target
);
2656 bool guaranteed_unmodified
;
2657 if (gimple_assign_single_p (def
)
2658 && ipa_load_from_parm_agg (fbi
, info
->descriptors
, def
,
2659 gimple_assign_rhs1 (def
), &index
, &offset
,
2660 NULL
, &by_ref
, &guaranteed_unmodified
))
2662 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
,
2664 cs
->indirect_info
->offset
= offset
;
2665 cs
->indirect_info
->agg_contents
= 1;
2666 cs
->indirect_info
->by_ref
= by_ref
;
2667 cs
->indirect_info
->guaranteed_unmodified
= guaranteed_unmodified
;
2671 /* Now we need to try to match the complex pattern of calling a member
2673 if (gimple_code (def
) != GIMPLE_PHI
2674 || gimple_phi_num_args (def
) != 2
2675 || !POINTER_TYPE_P (TREE_TYPE (target
))
2676 || TREE_CODE (TREE_TYPE (TREE_TYPE (target
))) != METHOD_TYPE
)
2679 /* First, we need to check whether one of these is a load from a member
2680 pointer that is a parameter to this function. */
2681 tree n1
= PHI_ARG_DEF (def
, 0);
2682 tree n2
= PHI_ARG_DEF (def
, 1);
2683 if (!ipa_is_ssa_with_stmt_def (n1
) || !ipa_is_ssa_with_stmt_def (n2
))
2685 gimple
*d1
= SSA_NAME_DEF_STMT (n1
);
2686 gimple
*d2
= SSA_NAME_DEF_STMT (n2
);
2689 basic_block bb
, virt_bb
;
2690 basic_block join
= gimple_bb (def
);
2691 if ((rec
= ipa_get_stmt_member_ptr_load_param (d1
, false, &offset
)))
2693 if (ipa_get_stmt_member_ptr_load_param (d2
, false, NULL
))
2696 bb
= EDGE_PRED (join
, 0)->src
;
2697 virt_bb
= gimple_bb (d2
);
2699 else if ((rec
= ipa_get_stmt_member_ptr_load_param (d2
, false, &offset
)))
2701 bb
= EDGE_PRED (join
, 1)->src
;
2702 virt_bb
= gimple_bb (d1
);
2707 /* Second, we need to check that the basic blocks are laid out in the way
2708 corresponding to the pattern. */
2710 if (!single_pred_p (virt_bb
) || !single_succ_p (virt_bb
)
2711 || single_pred (virt_bb
) != bb
2712 || single_succ (virt_bb
) != join
)
2715 /* Third, let's see that the branching is done depending on the least
2716 significant bit of the pfn. */
2718 gcond
*branch
= safe_dyn_cast
<gcond
*> (*gsi_last_bb (bb
));
2722 if ((gimple_cond_code (branch
) != NE_EXPR
2723 && gimple_cond_code (branch
) != EQ_EXPR
)
2724 || !integer_zerop (gimple_cond_rhs (branch
)))
2727 tree cond
= gimple_cond_lhs (branch
);
2728 if (!ipa_is_ssa_with_stmt_def (cond
))
2731 def
= SSA_NAME_DEF_STMT (cond
);
2732 if (!is_gimple_assign (def
)
2733 || gimple_assign_rhs_code (def
) != BIT_AND_EXPR
2734 || !integer_onep (gimple_assign_rhs2 (def
)))
2737 cond
= gimple_assign_rhs1 (def
);
2738 if (!ipa_is_ssa_with_stmt_def (cond
))
2741 def
= SSA_NAME_DEF_STMT (cond
);
2743 if (is_gimple_assign (def
)
2744 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
2746 cond
= gimple_assign_rhs1 (def
);
2747 if (!ipa_is_ssa_with_stmt_def (cond
))
2749 def
= SSA_NAME_DEF_STMT (cond
);
2753 rec2
= ipa_get_stmt_member_ptr_load_param (def
,
2754 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2755 == ptrmemfunc_vbit_in_delta
),
2760 index
= ipa_get_param_decl_index (info
, rec
);
2762 && parm_preserved_before_stmt_p (fbi
, index
, call
, rec
))
2764 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
,
2766 cs
->indirect_info
->offset
= offset
;
2767 cs
->indirect_info
->agg_contents
= 1;
2768 cs
->indirect_info
->member_ptr
= 1;
2769 cs
->indirect_info
->guaranteed_unmodified
= 1;
2775 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2776 object referenced in the expression is a formal parameter of the caller
2777 FBI->node (described by FBI->info), create a call note for the
2781 ipa_analyze_virtual_call_uses (struct ipa_func_body_info
*fbi
,
2782 gcall
*call
, tree target
)
2784 tree obj
= OBJ_TYPE_REF_OBJECT (target
);
2786 HOST_WIDE_INT anc_offset
;
2788 if (!flag_devirtualize
)
2791 if (TREE_CODE (obj
) != SSA_NAME
)
2794 class ipa_node_params
*info
= fbi
->info
;
2795 if (SSA_NAME_IS_DEFAULT_DEF (obj
))
2797 if (TREE_CODE (SSA_NAME_VAR (obj
)) != PARM_DECL
)
2801 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (obj
));
2802 gcc_assert (index
>= 0);
2803 if (detect_type_change_ssa (fbi
, obj
, obj_type_ref_class (target
),
2809 gimple
*stmt
= SSA_NAME_DEF_STMT (obj
);
2812 expr
= get_ancestor_addr_info (stmt
, &obj
, &anc_offset
);
2815 index
= ipa_get_param_decl_index (info
,
2816 SSA_NAME_VAR (TREE_OPERAND (expr
, 0)));
2817 gcc_assert (index
>= 0);
2818 if (detect_type_change (fbi
, obj
, expr
, obj_type_ref_class (target
),
2823 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
,
2825 class cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
2826 ii
->offset
= anc_offset
;
2827 ii
->otr_token
= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target
));
2828 ii
->otr_type
= obj_type_ref_class (target
);
2829 ii
->polymorphic
= 1;
2832 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2833 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2834 containing intermediate information about each formal parameter. */
2837 ipa_analyze_call_uses (struct ipa_func_body_info
*fbi
, gcall
*call
)
2839 tree target
= gimple_call_fn (call
);
2842 || (TREE_CODE (target
) != SSA_NAME
2843 && !virtual_method_call_p (target
)))
2846 struct cgraph_edge
*cs
= fbi
->node
->get_edge (call
);
2847 /* If we previously turned the call into a direct call, there is
2848 no need to analyze. */
2849 if (cs
&& !cs
->indirect_unknown_callee
)
2852 if (cs
->indirect_info
->polymorphic
&& flag_devirtualize
)
2855 tree target
= gimple_call_fn (call
);
2856 ipa_polymorphic_call_context
context (current_function_decl
,
2857 target
, call
, &instance
);
2859 gcc_checking_assert (cs
->indirect_info
->otr_type
2860 == obj_type_ref_class (target
));
2861 gcc_checking_assert (cs
->indirect_info
->otr_token
2862 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target
)));
2864 cs
->indirect_info
->vptr_changed
2865 = !context
.get_dynamic_type (instance
,
2866 OBJ_TYPE_REF_OBJECT (target
),
2867 obj_type_ref_class (target
), call
,
2868 &fbi
->aa_walk_budget
);
2869 cs
->indirect_info
->context
= context
;
2872 if (TREE_CODE (target
) == SSA_NAME
)
2873 ipa_analyze_indirect_call_uses (fbi
, call
, target
);
2874 else if (virtual_method_call_p (target
))
2875 ipa_analyze_virtual_call_uses (fbi
, call
, target
);
2879 /* Analyze the call statement STMT with respect to formal parameters (described
2880 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2881 formal parameters are called. */
2884 ipa_analyze_stmt_uses (struct ipa_func_body_info
*fbi
, gimple
*stmt
)
2886 if (is_gimple_call (stmt
))
2887 ipa_analyze_call_uses (fbi
, as_a
<gcall
*> (stmt
));
2890 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2891 If OP is a parameter declaration, mark it as used in the info structure
2895 visit_ref_for_mod_analysis (gimple
*, tree op
, tree
, void *data
)
2897 class ipa_node_params
*info
= (class ipa_node_params
*) data
;
2899 op
= get_base_address (op
);
2901 && TREE_CODE (op
) == PARM_DECL
)
2903 int index
= ipa_get_param_decl_index (info
, op
);
2904 gcc_assert (index
>= 0);
2905 ipa_set_param_used (info
, index
, true);
2911 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2912 the findings in various structures of the associated ipa_node_params
2913 structure, such as parameter flags, notes etc. FBI holds various data about
2914 the function being analyzed. */
2917 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info
*fbi
, basic_block bb
)
2919 gimple_stmt_iterator gsi
;
2920 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2922 gimple
*stmt
= gsi_stmt (gsi
);
2924 if (is_gimple_debug (stmt
))
2927 ipa_analyze_stmt_uses (fbi
, stmt
);
2928 walk_stmt_load_store_addr_ops (stmt
, fbi
->info
,
2929 visit_ref_for_mod_analysis
,
2930 visit_ref_for_mod_analysis
,
2931 visit_ref_for_mod_analysis
);
2933 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2934 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), fbi
->info
,
2935 visit_ref_for_mod_analysis
,
2936 visit_ref_for_mod_analysis
,
2937 visit_ref_for_mod_analysis
);
2940 /* Return true EXPR is a load from a dereference of SSA_NAME NAME. */
2943 load_from_dereferenced_name (tree expr
, tree name
)
2945 tree base
= get_base_address (expr
);
2946 return (TREE_CODE (base
) == MEM_REF
2947 && TREE_OPERAND (base
, 0) == name
);
2950 /* Calculate controlled uses of parameters of NODE. */
2953 ipa_analyze_controlled_uses (struct cgraph_node
*node
)
2955 ipa_node_params
*info
= ipa_node_params_sum
->get (node
);
2957 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2959 tree parm
= ipa_get_param (info
, i
);
2961 bool load_dereferenced
= false;
2963 /* For SSA regs see if parameter is used. For non-SSA we compute
2964 the flag during modification analysis. */
2965 if (is_gimple_reg (parm
))
2967 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
),
2969 if (ddef
&& !has_zero_uses (ddef
))
2971 imm_use_iterator imm_iter
;
2974 ipa_set_param_used (info
, i
, true);
2975 FOR_EACH_IMM_USE_STMT (stmt
, imm_iter
, ddef
)
2977 if (is_gimple_debug (stmt
))
2980 int all_stmt_uses
= 0;
2981 use_operand_p use_p
;
2982 FOR_EACH_IMM_USE_ON_STMT (use_p
, imm_iter
)
2985 if (is_gimple_call (stmt
))
2987 if (gimple_call_internal_p (stmt
))
2989 call_uses
= IPA_UNDESCRIBED_USE
;
2992 int recognized_stmt_uses
;
2993 if (gimple_call_fn (stmt
) == ddef
)
2994 recognized_stmt_uses
= 1;
2996 recognized_stmt_uses
= 0;
2997 unsigned arg_count
= gimple_call_num_args (stmt
);
2998 for (unsigned i
= 0; i
< arg_count
; i
++)
3000 tree arg
= gimple_call_arg (stmt
, i
);
3002 recognized_stmt_uses
++;
3003 else if (load_from_dereferenced_name (arg
, ddef
))
3005 load_dereferenced
= true;
3006 recognized_stmt_uses
++;
3010 if (recognized_stmt_uses
!= all_stmt_uses
)
3012 call_uses
= IPA_UNDESCRIBED_USE
;
3016 call_uses
+= all_stmt_uses
;
3018 else if (gimple_assign_single_p (stmt
))
3020 tree rhs
= gimple_assign_rhs1 (stmt
);
3021 if (all_stmt_uses
!= 1
3022 || !load_from_dereferenced_name (rhs
, ddef
))
3024 call_uses
= IPA_UNDESCRIBED_USE
;
3027 load_dereferenced
= true;
3031 call_uses
= IPA_UNDESCRIBED_USE
;
3040 call_uses
= IPA_UNDESCRIBED_USE
;
3041 ipa_set_controlled_uses (info
, i
, call_uses
);
3042 ipa_set_param_load_dereferenced (info
, i
, load_dereferenced
);
3046 /* Free stuff in BI. */
3049 free_ipa_bb_info (struct ipa_bb_info
*bi
)
3051 bi
->cg_edges
.release ();
3052 bi
->param_aa_statuses
.release ();
3055 /* Dominator walker driving the analysis. */
3057 class analysis_dom_walker
: public dom_walker
3060 analysis_dom_walker (struct ipa_func_body_info
*fbi
)
3061 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
) {}
3063 edge
before_dom_children (basic_block
) final override
;
3066 struct ipa_func_body_info
*m_fbi
;
3070 analysis_dom_walker::before_dom_children (basic_block bb
)
3072 ipa_analyze_params_uses_in_bb (m_fbi
, bb
);
3073 ipa_compute_jump_functions_for_bb (m_fbi
, bb
);
3077 /* Release body info FBI. */
3080 ipa_release_body_info (struct ipa_func_body_info
*fbi
)
3083 struct ipa_bb_info
*bi
;
3085 FOR_EACH_VEC_ELT (fbi
->bb_infos
, i
, bi
)
3086 free_ipa_bb_info (bi
);
3087 fbi
->bb_infos
.release ();
3090 /* Initialize the array describing properties of formal parameters
3091 of NODE, analyze their uses and compute jump functions associated
3092 with actual arguments of calls from within NODE. */
3095 ipa_analyze_node (struct cgraph_node
*node
)
3097 struct ipa_func_body_info fbi
;
3098 class ipa_node_params
*info
;
3100 ipa_check_create_node_params ();
3101 ipa_check_create_edge_args ();
3102 info
= ipa_node_params_sum
->get_create (node
);
3104 if (info
->analysis_done
)
3106 info
->analysis_done
= 1;
3108 if (ipa_func_spec_opts_forbid_analysis_p (node
)
3109 || (count_formal_params (node
->decl
)
3110 >= (1 << IPA_PROP_ARG_INDEX_LIMIT_BITS
)))
3112 gcc_assert (!ipa_get_param_count (info
));
3116 struct function
*func
= DECL_STRUCT_FUNCTION (node
->decl
);
3118 calculate_dominance_info (CDI_DOMINATORS
);
3119 ipa_initialize_node_params (node
);
3120 ipa_analyze_controlled_uses (node
);
3124 fbi
.bb_infos
= vNULL
;
3125 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
), true);
3126 fbi
.param_count
= ipa_get_param_count (info
);
3127 fbi
.aa_walk_budget
= opt_for_fn (node
->decl
, param_ipa_max_aa_steps
);
3129 for (struct cgraph_edge
*cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
3131 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
3132 bi
->cg_edges
.safe_push (cs
);
3135 for (struct cgraph_edge
*cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
3137 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
3138 bi
->cg_edges
.safe_push (cs
);
3141 enable_ranger (cfun
, false);
3142 analysis_dom_walker (&fbi
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
3143 disable_ranger (cfun
);
3145 ipa_release_body_info (&fbi
);
3146 free_dominance_info (CDI_DOMINATORS
);
3150 /* Update the jump functions associated with call graph edge E when the call
3151 graph edge CS is being inlined, assuming that E->caller is already (possibly
3152 indirectly) inlined into CS->callee and that E has not been inlined. */
3155 update_jump_functions_after_inlining (struct cgraph_edge
*cs
,
3156 struct cgraph_edge
*e
)
3158 ipa_edge_args
*top
= ipa_edge_args_sum
->get (cs
);
3159 ipa_edge_args
*args
= ipa_edge_args_sum
->get (e
);
3162 int count
= ipa_get_cs_argument_count (args
);
3165 for (i
= 0; i
< count
; i
++)
3167 struct ipa_jump_func
*dst
= ipa_get_ith_jump_func (args
, i
);
3168 class ipa_polymorphic_call_context
*dst_ctx
3169 = ipa_get_ith_polymorhic_call_context (args
, i
);
3173 struct ipa_agg_jf_item
*item
;
3176 FOR_EACH_VEC_ELT (*dst
->agg
.items
, j
, item
)
3179 struct ipa_jump_func
*src
;
3181 if (item
->jftype
!= IPA_JF_PASS_THROUGH
3182 && item
->jftype
!= IPA_JF_LOAD_AGG
)
3185 dst_fid
= item
->value
.pass_through
.formal_id
;
3186 if (!top
|| dst_fid
>= ipa_get_cs_argument_count (top
))
3188 item
->jftype
= IPA_JF_UNKNOWN
;
3192 item
->value
.pass_through
.formal_id
= -1;
3193 src
= ipa_get_ith_jump_func (top
, dst_fid
);
3194 if (src
->type
== IPA_JF_CONST
)
3196 if (item
->jftype
== IPA_JF_PASS_THROUGH
3197 && item
->value
.pass_through
.operation
== NOP_EXPR
)
3199 item
->jftype
= IPA_JF_CONST
;
3200 item
->value
.constant
= src
->value
.constant
.value
;
3204 else if (src
->type
== IPA_JF_PASS_THROUGH
3205 && src
->value
.pass_through
.operation
== NOP_EXPR
)
3207 if (item
->jftype
== IPA_JF_PASS_THROUGH
3208 || !item
->value
.load_agg
.by_ref
3209 || src
->value
.pass_through
.agg_preserved
)
3210 item
->value
.pass_through
.formal_id
3211 = src
->value
.pass_through
.formal_id
;
3213 else if (src
->type
== IPA_JF_ANCESTOR
)
3215 if (item
->jftype
== IPA_JF_PASS_THROUGH
)
3217 if (!src
->value
.ancestor
.offset
)
3218 item
->value
.pass_through
.formal_id
3219 = src
->value
.ancestor
.formal_id
;
3221 else if (src
->value
.ancestor
.agg_preserved
)
3223 gcc_checking_assert (item
->value
.load_agg
.by_ref
);
3225 item
->value
.pass_through
.formal_id
3226 = src
->value
.ancestor
.formal_id
;
3227 item
->value
.load_agg
.offset
3228 += src
->value
.ancestor
.offset
;
3232 if (item
->value
.pass_through
.formal_id
< 0)
3233 item
->jftype
= IPA_JF_UNKNOWN
;
3239 ipa_set_jf_unknown (dst
);
3243 if (dst
->type
== IPA_JF_ANCESTOR
)
3245 struct ipa_jump_func
*src
;
3246 int dst_fid
= dst
->value
.ancestor
.formal_id
;
3247 class ipa_polymorphic_call_context
*src_ctx
3248 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
3250 /* Variable number of arguments can cause havoc if we try to access
3251 one that does not exist in the inlined edge. So make sure we
3253 if (dst_fid
>= ipa_get_cs_argument_count (top
))
3255 ipa_set_jf_unknown (dst
);
3259 src
= ipa_get_ith_jump_func (top
, dst_fid
);
3261 if (src_ctx
&& !src_ctx
->useless_p ())
3263 class ipa_polymorphic_call_context ctx
= *src_ctx
;
3265 /* TODO: Make type preserved safe WRT contexts. */
3266 if (!ipa_get_jf_ancestor_type_preserved (dst
))
3267 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
3268 ctx
.offset_by (dst
->value
.ancestor
.offset
);
3269 if (!ctx
.useless_p ())
3273 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
3275 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
3278 dst_ctx
->combine_with (ctx
);
3282 /* Parameter and argument in ancestor jump function must be pointer
3283 type, which means access to aggregate must be by-reference. */
3284 gcc_assert (!src
->agg
.items
|| src
->agg
.by_ref
);
3286 if (src
->agg
.items
&& dst
->value
.ancestor
.agg_preserved
)
3288 struct ipa_agg_jf_item
*item
;
3291 /* Currently we do not produce clobber aggregate jump functions,
3292 replace with merging when we do. */
3293 gcc_assert (!dst
->agg
.items
);
3295 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
3296 dst
->agg
.by_ref
= src
->agg
.by_ref
;
3297 FOR_EACH_VEC_SAFE_ELT (dst
->agg
.items
, j
, item
)
3298 item
->offset
-= dst
->value
.ancestor
.offset
;
3301 if (src
->type
== IPA_JF_PASS_THROUGH
3302 && src
->value
.pass_through
.operation
== NOP_EXPR
)
3304 dst
->value
.ancestor
.formal_id
= src
->value
.pass_through
.formal_id
;
3305 dst
->value
.ancestor
.agg_preserved
&=
3306 src
->value
.pass_through
.agg_preserved
;
3308 else if (src
->type
== IPA_JF_ANCESTOR
)
3310 dst
->value
.ancestor
.formal_id
= src
->value
.ancestor
.formal_id
;
3311 dst
->value
.ancestor
.offset
+= src
->value
.ancestor
.offset
;
3312 dst
->value
.ancestor
.agg_preserved
&=
3313 src
->value
.ancestor
.agg_preserved
;
3314 dst
->value
.ancestor
.keep_null
|= src
->value
.ancestor
.keep_null
;
3317 ipa_set_jf_unknown (dst
);
3319 else if (dst
->type
== IPA_JF_PASS_THROUGH
)
3321 struct ipa_jump_func
*src
;
3322 /* We must check range due to calls with variable number of arguments
3323 and we cannot combine jump functions with operations. */
3324 if (dst
->value
.pass_through
.operation
== NOP_EXPR
3325 && (top
&& dst
->value
.pass_through
.formal_id
3326 < ipa_get_cs_argument_count (top
)))
3328 int dst_fid
= dst
->value
.pass_through
.formal_id
;
3329 src
= ipa_get_ith_jump_func (top
, dst_fid
);
3330 bool dst_agg_p
= ipa_get_jf_pass_through_agg_preserved (dst
);
3331 class ipa_polymorphic_call_context
*src_ctx
3332 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
3334 if (src_ctx
&& !src_ctx
->useless_p ())
3336 class ipa_polymorphic_call_context ctx
= *src_ctx
;
3338 /* TODO: Make type preserved safe WRT contexts. */
3339 if (!ipa_get_jf_pass_through_type_preserved (dst
))
3340 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
3341 if (!ctx
.useless_p ())
3345 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
3347 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
3349 dst_ctx
->combine_with (ctx
);
3354 case IPA_JF_UNKNOWN
:
3355 ipa_set_jf_unknown (dst
);
3359 bool rd
= ipa_get_jf_pass_through_refdesc_decremented (dst
);
3360 ipa_set_jf_cst_copy (dst
, src
);
3362 ipa_zap_jf_refdesc (dst
);
3367 case IPA_JF_PASS_THROUGH
:
3369 int formal_id
= ipa_get_jf_pass_through_formal_id (src
);
3370 enum tree_code operation
;
3371 operation
= ipa_get_jf_pass_through_operation (src
);
3373 if (operation
== NOP_EXPR
)
3377 && ipa_get_jf_pass_through_agg_preserved (src
);
3378 ipa_set_jf_simple_pass_through (dst
, formal_id
, agg_p
);
3380 else if (TREE_CODE_CLASS (operation
) == tcc_unary
)
3381 ipa_set_jf_unary_pass_through (dst
, formal_id
, operation
);
3384 tree operand
= ipa_get_jf_pass_through_operand (src
);
3385 ipa_set_jf_arith_pass_through (dst
, formal_id
, operand
,
3390 case IPA_JF_ANCESTOR
:
3394 && ipa_get_jf_ancestor_agg_preserved (src
);
3395 ipa_set_ancestor_jf (dst
,
3396 ipa_get_jf_ancestor_offset (src
),
3397 ipa_get_jf_ancestor_formal_id (src
),
3399 ipa_get_jf_ancestor_keep_null (src
));
3407 && (dst_agg_p
|| !src
->agg
.by_ref
))
3409 /* Currently we do not produce clobber aggregate jump
3410 functions, replace with merging when we do. */
3411 gcc_assert (!dst
->agg
.items
);
3413 dst
->agg
.by_ref
= src
->agg
.by_ref
;
3414 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
3418 ipa_set_jf_unknown (dst
);
3423 /* If TARGET is an addr_expr of a function declaration, make it the
3424 (SPECULATIVE)destination of an indirect edge IE and return the edge.
3425 Otherwise, return NULL. */
3427 struct cgraph_edge
*
3428 ipa_make_edge_direct_to_target (struct cgraph_edge
*ie
, tree target
,
3431 struct cgraph_node
*callee
;
3432 bool unreachable
= false;
3434 if (TREE_CODE (target
) == ADDR_EXPR
)
3435 target
= TREE_OPERAND (target
, 0);
3436 if (TREE_CODE (target
) != FUNCTION_DECL
)
3438 target
= canonicalize_constructor_val (target
, NULL
);
3439 if (!target
|| TREE_CODE (target
) != FUNCTION_DECL
)
3441 /* Member pointer call that goes through a VMT lookup. */
3442 if (ie
->indirect_info
->member_ptr
3443 /* Or if target is not an invariant expression and we do not
3444 know if it will evaulate to function at runtime.
3445 This can happen when folding through &VAR, where &VAR
3446 is IP invariant, but VAR itself is not.
3448 TODO: Revisit this when GCC 5 is branched. It seems that
3449 member_ptr check is not needed and that we may try to fold
3450 the expression and see if VAR is readonly. */
3451 || !is_gimple_ip_invariant (target
))
3453 if (dump_enabled_p ())
3455 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, ie
->call_stmt
,
3456 "discovered direct call non-invariant %s\n",
3457 ie
->caller
->dump_name ());
3463 if (dump_enabled_p ())
3465 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, ie
->call_stmt
,
3466 "discovered direct call to non-function in %s, "
3467 "making it __builtin_unreachable\n",
3468 ie
->caller
->dump_name ());
3471 target
= builtin_decl_unreachable ();
3472 callee
= cgraph_node::get_create (target
);
3476 callee
= cgraph_node::get (target
);
3479 callee
= cgraph_node::get (target
);
3481 /* Because may-edges are not explicitely represented and vtable may be external,
3482 we may create the first reference to the object in the unit. */
3483 if (!callee
|| callee
->inlined_to
)
3486 /* We are better to ensure we can refer to it.
3487 In the case of static functions we are out of luck, since we already
3488 removed its body. In the case of public functions we may or may
3489 not introduce the reference. */
3490 if (!canonicalize_constructor_val (target
, NULL
)
3491 || !TREE_PUBLIC (target
))
3494 fprintf (dump_file
, "ipa-prop: Discovered call to a known target "
3495 "(%s -> %s) but cannot refer to it. Giving up.\n",
3496 ie
->caller
->dump_name (),
3497 ie
->callee
->dump_name ());
3500 callee
= cgraph_node::get_create (target
);
3503 /* If the edge is already speculated. */
3504 if (speculative
&& ie
->speculative
)
3508 cgraph_edge
*e2
= ie
->speculative_call_for_target (callee
);
3512 fprintf (dump_file
, "ipa-prop: Discovered call to a "
3513 "speculative target (%s -> %s) but the call is "
3514 "already speculated to different target. "
3516 ie
->caller
->dump_name (), callee
->dump_name ());
3522 "ipa-prop: Discovered call to a speculative target "
3523 "(%s -> %s) this agree with previous speculation.\n",
3524 ie
->caller
->dump_name (), callee
->dump_name ());
3530 if (!dbg_cnt (devirt
))
3533 ipa_check_create_node_params ();
3535 /* We cannot make edges to inline clones. It is bug that someone removed
3536 the cgraph node too early. */
3537 gcc_assert (!callee
->inlined_to
);
3539 if (dump_file
&& !unreachable
)
3541 fprintf (dump_file
, "ipa-prop: Discovered %s call to a %s target "
3542 "(%s -> %s), for stmt ",
3543 ie
->indirect_info
->polymorphic
? "a virtual" : "an indirect",
3544 speculative
? "speculative" : "known",
3545 ie
->caller
->dump_name (),
3546 callee
->dump_name ());
3548 print_gimple_stmt (dump_file
, ie
->call_stmt
, 2, TDF_SLIM
);
3550 fprintf (dump_file
, "with uid %i\n", ie
->lto_stmt_uid
);
3552 if (dump_enabled_p ())
3554 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, ie
->call_stmt
,
3555 "converting indirect call in %s to direct call to %s\n",
3556 ie
->caller
->dump_name (), callee
->dump_name ());
3560 struct cgraph_edge
*orig
= ie
;
3561 ie
= cgraph_edge::make_direct (ie
, callee
);
3562 /* If we resolved speculative edge the cost is already up to date
3563 for direct call (adjusted by inline_edge_duplication_hook). */
3566 ipa_call_summary
*es
= ipa_call_summaries
->get (ie
);
3567 es
->call_stmt_size
-= (eni_size_weights
.indirect_call_cost
3568 - eni_size_weights
.call_cost
);
3569 es
->call_stmt_time
-= (eni_time_weights
.indirect_call_cost
3570 - eni_time_weights
.call_cost
);
3575 if (!callee
->can_be_discarded_p ())
3578 alias
= dyn_cast
<cgraph_node
*> (callee
->noninterposable_alias ());
3582 /* make_speculative will update ie's cost to direct call cost. */
3583 ie
= ie
->make_speculative
3584 (callee
, ie
->count
.apply_scale (8, 10));
3590 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3591 CONSTRUCTOR and return it. Return NULL if the search fails for some
3595 find_constructor_constant_at_offset (tree constructor
, HOST_WIDE_INT req_offset
)
3597 tree type
= TREE_TYPE (constructor
);
3598 if (TREE_CODE (type
) != ARRAY_TYPE
3599 && TREE_CODE (type
) != RECORD_TYPE
)
3604 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor
), ix
, index
, val
)
3606 HOST_WIDE_INT elt_offset
;
3607 if (TREE_CODE (type
) == ARRAY_TYPE
)
3610 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (type
));
3611 gcc_assert (TREE_CODE (unit_size
) == INTEGER_CST
);
3615 if (TREE_CODE (index
) == RANGE_EXPR
)
3616 off
= wi::to_offset (TREE_OPERAND (index
, 0));
3618 off
= wi::to_offset (index
);
3619 if (TYPE_DOMAIN (type
) && TYPE_MIN_VALUE (TYPE_DOMAIN (type
)))
3621 tree low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
3622 gcc_assert (TREE_CODE (unit_size
) == INTEGER_CST
);
3623 off
= wi::sext (off
- wi::to_offset (low_bound
),
3624 TYPE_PRECISION (TREE_TYPE (index
)));
3626 off
*= wi::to_offset (unit_size
);
3627 /* ??? Handle more than just the first index of a
3631 off
= wi::to_offset (unit_size
) * ix
;
3633 off
= wi::lshift (off
, LOG2_BITS_PER_UNIT
);
3634 if (!wi::fits_shwi_p (off
) || wi::neg_p (off
))
3636 elt_offset
= off
.to_shwi ();
3638 else if (TREE_CODE (type
) == RECORD_TYPE
)
3640 gcc_checking_assert (index
&& TREE_CODE (index
) == FIELD_DECL
);
3641 if (DECL_BIT_FIELD (index
))
3643 elt_offset
= int_bit_position (index
);
3648 if (elt_offset
> req_offset
)
3651 if (TREE_CODE (val
) == CONSTRUCTOR
)
3652 return find_constructor_constant_at_offset (val
,
3653 req_offset
- elt_offset
);
3655 if (elt_offset
== req_offset
3656 && is_gimple_reg_type (TREE_TYPE (val
))
3657 && is_gimple_ip_invariant (val
))
3663 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3664 invariant from a static constructor and if so, return it. Otherwise return
3668 ipa_find_agg_cst_from_init (tree scalar
, HOST_WIDE_INT offset
, bool by_ref
)
3672 if (TREE_CODE (scalar
) != ADDR_EXPR
)
3674 scalar
= TREE_OPERAND (scalar
, 0);
3678 || !is_global_var (scalar
)
3679 || !TREE_READONLY (scalar
)
3680 || !DECL_INITIAL (scalar
)
3681 || TREE_CODE (DECL_INITIAL (scalar
)) != CONSTRUCTOR
)
3684 return find_constructor_constant_at_offset (DECL_INITIAL (scalar
), offset
);
3687 /* Retrieve value from AGG_JFUNC for the given OFFSET or return NULL if there
3688 is none. BY_REF specifies whether the value has to be passed by reference
3692 ipa_find_agg_cst_from_jfunc_items (struct ipa_agg_jump_function
*agg_jfunc
,
3693 ipa_node_params
*src_info
,
3694 cgraph_node
*src_node
,
3695 HOST_WIDE_INT offset
, bool by_ref
)
3697 if (by_ref
!= agg_jfunc
->by_ref
)
3700 for (const ipa_agg_jf_item
&item
: agg_jfunc
->items
)
3701 if (item
.offset
== offset
)
3702 return ipa_agg_value_from_jfunc (src_info
, src_node
, &item
);
3707 /* Remove a reference to SYMBOL from the list of references of a node given by
3708 reference description RDESC. Return true if the reference has been
3709 successfully found and removed. */
3712 remove_described_reference (symtab_node
*symbol
, struct ipa_cst_ref_desc
*rdesc
)
3714 struct ipa_ref
*to_del
;
3715 struct cgraph_edge
*origin
;
3720 to_del
= origin
->caller
->find_reference (symbol
, origin
->call_stmt
,
3721 origin
->lto_stmt_uid
, IPA_REF_ADDR
);
3725 to_del
->remove_reference ();
3727 fprintf (dump_file
, "ipa-prop: Removed a reference from %s to %s.\n",
3728 origin
->caller
->dump_name (), symbol
->dump_name ());
3732 /* If JFUNC has a reference description with refcount different from
3733 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3734 NULL. JFUNC must be a constant jump function. */
3736 static struct ipa_cst_ref_desc
*
3737 jfunc_rdesc_usable (struct ipa_jump_func
*jfunc
)
3739 struct ipa_cst_ref_desc
*rdesc
= ipa_get_jf_constant_rdesc (jfunc
);
3740 if (rdesc
&& rdesc
->refcount
!= IPA_UNDESCRIBED_USE
)
3746 /* If the value of constant jump function JFUNC is an address of a function
3747 declaration, return the associated call graph node. Otherwise return
3750 static symtab_node
*
3751 symtab_node_for_jfunc (struct ipa_jump_func
*jfunc
)
3753 gcc_checking_assert (jfunc
->type
== IPA_JF_CONST
);
3754 tree cst
= ipa_get_jf_constant (jfunc
);
3755 if (TREE_CODE (cst
) != ADDR_EXPR
3756 || (TREE_CODE (TREE_OPERAND (cst
, 0)) != FUNCTION_DECL
3757 && TREE_CODE (TREE_OPERAND (cst
, 0)) != VAR_DECL
))
3760 return symtab_node::get (TREE_OPERAND (cst
, 0));
3764 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3765 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3766 the edge specified in the rdesc. Return false if either the symbol or the
3767 reference could not be found, otherwise return true. */
3770 try_decrement_rdesc_refcount (struct ipa_jump_func
*jfunc
)
3772 struct ipa_cst_ref_desc
*rdesc
;
3773 if (jfunc
->type
== IPA_JF_CONST
3774 && (rdesc
= jfunc_rdesc_usable (jfunc
))
3775 && --rdesc
->refcount
== 0)
3777 symtab_node
*symbol
= symtab_node_for_jfunc (jfunc
);
3781 return remove_described_reference (symbol
, rdesc
);
3786 /* Try to find a destination for indirect edge IE that corresponds to a simple
3787 call or a call of a member function pointer and where the destination is a
3788 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3789 the type of the parameter to which the result of JFUNC is passed. If it can
3790 be determined, return the newly direct edge, otherwise return NULL.
3791 NEW_ROOT and NEW_ROOT_INFO is the node and its info that JFUNC lattices are
3794 static struct cgraph_edge
*
3795 try_make_edge_direct_simple_call (struct cgraph_edge
*ie
,
3796 struct ipa_jump_func
*jfunc
, tree target_type
,
3797 struct cgraph_node
*new_root
,
3798 class ipa_node_params
*new_root_info
)
3800 struct cgraph_edge
*cs
;
3801 tree target
= NULL_TREE
;
3802 bool agg_contents
= ie
->indirect_info
->agg_contents
;
3803 tree scalar
= ipa_value_from_jfunc (new_root_info
, jfunc
, target_type
);
3807 target
= ipa_find_agg_cst_from_init (scalar
, ie
->indirect_info
->offset
,
3808 ie
->indirect_info
->by_ref
);
3809 if (!target
&& ie
->indirect_info
->guaranteed_unmodified
)
3810 target
= ipa_find_agg_cst_from_jfunc_items (&jfunc
->agg
, new_root_info
,
3812 ie
->indirect_info
->offset
,
3813 ie
->indirect_info
->by_ref
);
3819 cs
= ipa_make_edge_direct_to_target (ie
, target
);
3821 if (cs
&& !agg_contents
)
3824 gcc_checking_assert (cs
->callee
3826 || jfunc
->type
!= IPA_JF_CONST
3827 || !symtab_node_for_jfunc (jfunc
)
3828 || cs
->callee
== symtab_node_for_jfunc (jfunc
)));
3829 ok
= try_decrement_rdesc_refcount (jfunc
);
3830 gcc_checking_assert (ok
);
3836 /* Return the target to be used in cases of impossible devirtualization. IE
3837 and target (the latter can be NULL) are dumped when dumping is enabled. */
3840 ipa_impossible_devirt_target (struct cgraph_edge
*ie
, tree target
)
3846 "Type inconsistent devirtualization: %s->%s\n",
3847 ie
->caller
->dump_name (),
3848 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target
)));
3851 "No devirtualization target in %s\n",
3852 ie
->caller
->dump_name ());
3854 tree new_target
= builtin_decl_unreachable ();
3855 cgraph_node::get_create (new_target
);
3859 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3860 call based on a formal parameter which is described by jump function JFUNC
3861 and if it can be determined, make it direct and return the direct edge.
3862 Otherwise, return NULL. CTX describes the polymorphic context that the
3863 parameter the call is based on brings along with it. NEW_ROOT and
3864 NEW_ROOT_INFO is the node and its info that JFUNC lattices are relative
3867 static struct cgraph_edge
*
3868 try_make_edge_direct_virtual_call (struct cgraph_edge
*ie
,
3869 struct ipa_jump_func
*jfunc
,
3870 class ipa_polymorphic_call_context ctx
,
3871 struct cgraph_node
*new_root
,
3872 class ipa_node_params
*new_root_info
)
3875 bool speculative
= false;
3877 if (!opt_for_fn (ie
->caller
->decl
, flag_devirtualize
))
3880 gcc_assert (!ie
->indirect_info
->by_ref
);
3882 /* Try to do lookup via known virtual table pointer value. */
3883 if (!ie
->indirect_info
->vptr_changed
3884 || opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
))
3887 unsigned HOST_WIDE_INT offset
;
3889 if (jfunc
->type
== IPA_JF_CONST
)
3890 t
= ipa_find_agg_cst_from_init (ipa_get_jf_constant (jfunc
),
3891 ie
->indirect_info
->offset
, true);
3893 t
= ipa_find_agg_cst_from_jfunc_items (&jfunc
->agg
, new_root_info
,
3895 ie
->indirect_info
->offset
, true);
3896 if (t
&& vtable_pointer_value_to_vtable (t
, &vtable
, &offset
))
3899 t
= gimple_get_virt_method_for_vtable (ie
->indirect_info
->otr_token
,
3900 vtable
, offset
, &can_refer
);
3904 || fndecl_built_in_p (t
, BUILT_IN_UNREACHABLE
,
3905 BUILT_IN_UNREACHABLE_TRAP
)
3906 || !possible_polymorphic_call_target_p
3907 (ie
, cgraph_node::get (t
)))
3909 /* Do not speculate builtin_unreachable, it is stupid! */
3910 if (!ie
->indirect_info
->vptr_changed
)
3911 target
= ipa_impossible_devirt_target (ie
, target
);
3918 speculative
= ie
->indirect_info
->vptr_changed
;
3924 ipa_polymorphic_call_context
ie_context (ie
);
3925 vec
<cgraph_node
*>targets
;
3928 ctx
.offset_by (ie
->indirect_info
->offset
);
3929 if (ie
->indirect_info
->vptr_changed
)
3930 ctx
.possible_dynamic_type_change (ie
->in_polymorphic_cdtor
,
3931 ie
->indirect_info
->otr_type
);
3932 ctx
.combine_with (ie_context
, ie
->indirect_info
->otr_type
);
3933 targets
= possible_polymorphic_call_targets
3934 (ie
->indirect_info
->otr_type
,
3935 ie
->indirect_info
->otr_token
,
3937 if (final
&& targets
.length () <= 1)
3939 speculative
= false;
3940 if (targets
.length () == 1)
3941 target
= targets
[0]->decl
;
3943 target
= ipa_impossible_devirt_target (ie
, NULL_TREE
);
3945 else if (!target
&& opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
)
3946 && !ie
->speculative
&& ie
->maybe_hot_p ())
3949 n
= try_speculative_devirtualization (ie
->indirect_info
->otr_type
,
3950 ie
->indirect_info
->otr_token
,
3951 ie
->indirect_info
->context
);
3961 if (!possible_polymorphic_call_target_p
3962 (ie
, cgraph_node::get_create (target
)))
3966 target
= ipa_impossible_devirt_target (ie
, target
);
3968 return ipa_make_edge_direct_to_target (ie
, target
, speculative
);
3974 /* Update the param called notes associated with NODE when CS is being inlined,
3975 assuming NODE is (potentially indirectly) inlined into CS->callee.
3976 Moreover, if the callee is discovered to be constant, create a new cgraph
3977 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3978 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3981 update_indirect_edges_after_inlining (struct cgraph_edge
*cs
,
3982 struct cgraph_node
*node
,
3983 vec
<cgraph_edge
*> *new_edges
)
3985 class ipa_edge_args
*top
;
3986 struct cgraph_edge
*ie
, *next_ie
, *new_direct_edge
;
3987 struct cgraph_node
*new_root
;
3988 class ipa_node_params
*new_root_info
, *inlined_node_info
;
3991 ipa_check_create_edge_args ();
3992 top
= ipa_edge_args_sum
->get (cs
);
3993 new_root
= cs
->caller
->inlined_to
3994 ? cs
->caller
->inlined_to
: cs
->caller
;
3995 new_root_info
= ipa_node_params_sum
->get (new_root
);
3996 inlined_node_info
= ipa_node_params_sum
->get (cs
->callee
->function_symbol ());
3998 for (ie
= node
->indirect_calls
; ie
; ie
= next_ie
)
4000 class cgraph_indirect_call_info
*ici
= ie
->indirect_info
;
4001 struct ipa_jump_func
*jfunc
;
4004 next_ie
= ie
->next_callee
;
4006 if (ici
->param_index
== -1)
4009 /* We must check range due to calls with variable number of arguments: */
4010 if (!top
|| ici
->param_index
>= ipa_get_cs_argument_count (top
))
4012 ici
->param_index
= -1;
4016 param_index
= ici
->param_index
;
4017 jfunc
= ipa_get_ith_jump_func (top
, param_index
);
4019 auto_vec
<cgraph_node
*, 4> spec_targets
;
4020 if (ie
->speculative
)
4021 for (cgraph_edge
*direct
= ie
->first_speculative_call_target ();
4023 direct
= direct
->next_speculative_call_target ())
4024 spec_targets
.safe_push (direct
->callee
);
4026 if (!opt_for_fn (node
->decl
, flag_indirect_inlining
))
4027 new_direct_edge
= NULL
;
4028 else if (ici
->polymorphic
)
4030 ipa_polymorphic_call_context ctx
;
4031 ctx
= ipa_context_from_jfunc (new_root_info
, cs
, param_index
, jfunc
);
4032 new_direct_edge
= try_make_edge_direct_virtual_call (ie
, jfunc
, ctx
,
4038 tree target_type
= ipa_get_type (inlined_node_info
, param_index
);
4039 new_direct_edge
= try_make_edge_direct_simple_call (ie
, jfunc
,
4045 /* If speculation was removed, then we need to do nothing. */
4046 if (new_direct_edge
&& new_direct_edge
!= ie
4047 && spec_targets
.contains (new_direct_edge
->callee
))
4049 new_direct_edge
->indirect_inlining_edge
= 1;
4051 if (!new_direct_edge
->speculative
)
4054 else if (new_direct_edge
)
4056 new_direct_edge
->indirect_inlining_edge
= 1;
4059 new_edges
->safe_push (new_direct_edge
);
4062 /* If speculative edge was introduced we still need to update
4063 call info of the indirect edge. */
4064 if (!new_direct_edge
->speculative
)
4067 if (jfunc
->type
== IPA_JF_PASS_THROUGH
4068 && ipa_get_jf_pass_through_operation (jfunc
) == NOP_EXPR
)
4070 if (ici
->agg_contents
4071 && !ipa_get_jf_pass_through_agg_preserved (jfunc
)
4072 && !ici
->polymorphic
)
4073 ici
->param_index
= -1;
4076 ici
->param_index
= ipa_get_jf_pass_through_formal_id (jfunc
);
4077 if (ici
->polymorphic
4078 && !ipa_get_jf_pass_through_type_preserved (jfunc
))
4079 ici
->vptr_changed
= true;
4080 ipa_set_param_used_by_indirect_call (new_root_info
,
4081 ici
->param_index
, true);
4082 if (ici
->polymorphic
)
4083 ipa_set_param_used_by_polymorphic_call (new_root_info
,
4084 ici
->param_index
, true);
4087 else if (jfunc
->type
== IPA_JF_ANCESTOR
)
4089 if (ici
->agg_contents
4090 && !ipa_get_jf_ancestor_agg_preserved (jfunc
)
4091 && !ici
->polymorphic
)
4092 ici
->param_index
= -1;
4095 ici
->param_index
= ipa_get_jf_ancestor_formal_id (jfunc
);
4096 ici
->offset
+= ipa_get_jf_ancestor_offset (jfunc
);
4097 if (ici
->polymorphic
4098 && !ipa_get_jf_ancestor_type_preserved (jfunc
))
4099 ici
->vptr_changed
= true;
4100 ipa_set_param_used_by_indirect_call (new_root_info
,
4101 ici
->param_index
, true);
4102 if (ici
->polymorphic
)
4103 ipa_set_param_used_by_polymorphic_call (new_root_info
,
4104 ici
->param_index
, true);
4108 /* Either we can find a destination for this edge now or never. */
4109 ici
->param_index
= -1;
4115 /* Recursively traverse subtree of NODE (including node) made of inlined
4116 cgraph_edges when CS has been inlined and invoke
4117 update_indirect_edges_after_inlining on all nodes and
4118 update_jump_functions_after_inlining on all non-inlined edges that lead out
4119 of this subtree. Newly discovered indirect edges will be added to
4120 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
4124 propagate_info_to_inlined_callees (struct cgraph_edge
*cs
,
4125 struct cgraph_node
*node
,
4126 vec
<cgraph_edge
*> *new_edges
)
4128 struct cgraph_edge
*e
;
4131 res
= update_indirect_edges_after_inlining (cs
, node
, new_edges
);
4133 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4134 if (!e
->inline_failed
)
4135 res
|= propagate_info_to_inlined_callees (cs
, e
->callee
, new_edges
);
4137 update_jump_functions_after_inlining (cs
, e
);
4138 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4139 update_jump_functions_after_inlining (cs
, e
);
4144 /* Combine two controlled uses counts as done during inlining. */
4147 combine_controlled_uses_counters (int c
, int d
)
4149 if (c
== IPA_UNDESCRIBED_USE
|| d
== IPA_UNDESCRIBED_USE
)
4150 return IPA_UNDESCRIBED_USE
;
4155 /* Propagate number of controlled users from CS->caleee to the new root of the
4156 tree of inlined nodes. */
4159 propagate_controlled_uses (struct cgraph_edge
*cs
)
4161 ipa_edge_args
*args
= ipa_edge_args_sum
->get (cs
);
4164 struct cgraph_node
*new_root
= cs
->caller
->inlined_to
4165 ? cs
->caller
->inlined_to
: cs
->caller
;
4166 ipa_node_params
*new_root_info
= ipa_node_params_sum
->get (new_root
);
4167 ipa_node_params
*old_root_info
= ipa_node_params_sum
->get (cs
->callee
);
4173 count
= MIN (ipa_get_cs_argument_count (args
),
4174 ipa_get_param_count (old_root_info
));
4175 for (i
= 0; i
< count
; i
++)
4177 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
4178 struct ipa_cst_ref_desc
*rdesc
;
4180 if (jf
->type
== IPA_JF_PASS_THROUGH
4181 && !ipa_get_jf_pass_through_refdesc_decremented (jf
))
4184 src_idx
= ipa_get_jf_pass_through_formal_id (jf
);
4185 c
= ipa_get_controlled_uses (new_root_info
, src_idx
);
4186 d
= ipa_get_controlled_uses (old_root_info
, i
);
4188 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf
)
4189 == NOP_EXPR
|| c
== IPA_UNDESCRIBED_USE
);
4190 c
= combine_controlled_uses_counters (c
, d
);
4191 ipa_set_controlled_uses (new_root_info
, src_idx
, c
);
4193 if (c
!= IPA_UNDESCRIBED_USE
)
4195 lderef
= (ipa_get_param_load_dereferenced (new_root_info
, src_idx
)
4196 || ipa_get_param_load_dereferenced (old_root_info
, i
));
4197 ipa_set_param_load_dereferenced (new_root_info
, src_idx
, lderef
);
4200 if (c
== 0 && !lderef
&& new_root_info
->ipcp_orig_node
)
4202 struct cgraph_node
*n
;
4203 struct ipa_ref
*ref
;
4204 tree t
= new_root_info
->known_csts
[src_idx
];
4206 if (t
&& TREE_CODE (t
) == ADDR_EXPR
4207 && TREE_CODE (TREE_OPERAND (t
, 0)) == FUNCTION_DECL
4208 && (n
= cgraph_node::get (TREE_OPERAND (t
, 0)))
4209 && (ref
= new_root
->find_reference (n
, NULL
, 0,
4213 fprintf (dump_file
, "ipa-prop: Removing cloning-created "
4214 "reference from %s to %s.\n",
4215 new_root
->dump_name (),
4217 ref
->remove_reference ();
4221 else if (jf
->type
== IPA_JF_CONST
4222 && (rdesc
= jfunc_rdesc_usable (jf
)))
4224 int d
= ipa_get_controlled_uses (old_root_info
, i
);
4225 int c
= rdesc
->refcount
;
4226 tree cst
= ipa_get_jf_constant (jf
);
4227 rdesc
->refcount
= combine_controlled_uses_counters (c
, d
);
4228 if (rdesc
->refcount
!= IPA_UNDESCRIBED_USE
4229 && ipa_get_param_load_dereferenced (old_root_info
, i
)
4230 && TREE_CODE (cst
) == ADDR_EXPR
4231 && VAR_P (TREE_OPERAND (cst
, 0)))
4233 symtab_node
*n
= symtab_node::get (TREE_OPERAND (cst
, 0));
4234 new_root
->create_reference (n
, IPA_REF_LOAD
, NULL
);
4236 fprintf (dump_file
, "ipa-prop: Address IPA constant will reach "
4237 "a load so adding LOAD reference from %s to %s.\n",
4238 new_root
->dump_name (), n
->dump_name ());
4240 if (rdesc
->refcount
== 0)
4242 gcc_checking_assert (TREE_CODE (cst
) == ADDR_EXPR
4243 && ((TREE_CODE (TREE_OPERAND (cst
, 0))
4245 || VAR_P (TREE_OPERAND (cst
, 0))));
4247 symtab_node
*n
= symtab_node::get (TREE_OPERAND (cst
, 0));
4250 remove_described_reference (n
, rdesc
);
4251 cgraph_node
*clone
= cs
->caller
;
4252 while (clone
->inlined_to
4253 && clone
->ipcp_clone
4254 && clone
!= rdesc
->cs
->caller
)
4256 struct ipa_ref
*ref
;
4257 ref
= clone
->find_reference (n
, NULL
, 0, IPA_REF_ADDR
);
4261 fprintf (dump_file
, "ipa-prop: Removing "
4262 "cloning-created reference "
4264 clone
->dump_name (),
4266 ref
->remove_reference ();
4268 clone
= clone
->callers
->caller
;
4275 for (i
= ipa_get_param_count (old_root_info
);
4276 i
< ipa_get_cs_argument_count (args
);
4279 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
4281 if (jf
->type
== IPA_JF_CONST
)
4283 struct ipa_cst_ref_desc
*rdesc
= jfunc_rdesc_usable (jf
);
4285 rdesc
->refcount
= IPA_UNDESCRIBED_USE
;
4287 else if (jf
->type
== IPA_JF_PASS_THROUGH
)
4288 ipa_set_controlled_uses (new_root_info
,
4289 jf
->value
.pass_through
.formal_id
,
4290 IPA_UNDESCRIBED_USE
);
4294 /* Update jump functions and call note functions on inlining the call site CS.
4295 CS is expected to lead to a node already cloned by
4296 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
4297 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
4301 ipa_propagate_indirect_call_infos (struct cgraph_edge
*cs
,
4302 vec
<cgraph_edge
*> *new_edges
)
4305 /* Do nothing if the preparation phase has not been carried out yet
4306 (i.e. during early inlining). */
4307 if (!ipa_node_params_sum
)
4309 gcc_assert (ipa_edge_args_sum
);
4311 propagate_controlled_uses (cs
);
4312 changed
= propagate_info_to_inlined_callees (cs
, cs
->callee
, new_edges
);
4313 ipa_node_params_sum
->remove (cs
->callee
);
4315 ipa_edge_args
*args
= ipa_edge_args_sum
->get (cs
);
4319 if (args
->jump_functions
)
4321 struct ipa_jump_func
*jf
;
4323 FOR_EACH_VEC_ELT (*args
->jump_functions
, i
, jf
)
4324 if (jf
->type
== IPA_JF_CONST
4325 && ipa_get_jf_constant_rdesc (jf
))
4332 ipa_edge_args_sum
->remove (cs
);
4334 if (ipcp_transformation_sum
)
4335 ipcp_transformation_sum
->remove (cs
->callee
);
4340 /* Ensure that array of edge arguments infos is big enough to accommodate a
4341 structure for all edges and reallocates it if not. Also, allocate
4342 associated hash tables is they do not already exist. */
4345 ipa_check_create_edge_args (void)
4347 if (!ipa_edge_args_sum
)
4349 = (new (ggc_alloc_no_dtor
<ipa_edge_args_sum_t
> ())
4350 ipa_edge_args_sum_t (symtab
, true));
4351 if (!ipa_vr_hash_table
)
4352 ipa_vr_hash_table
= hash_table
<ipa_vr_ggc_hash_traits
>::create_ggc (37);
4355 /* Free all ipa_edge structures. */
4358 ipa_free_all_edge_args (void)
4360 if (!ipa_edge_args_sum
)
4363 ggc_delete (ipa_edge_args_sum
);
4364 ipa_edge_args_sum
= NULL
;
4367 /* Free all ipa_node_params structures. */
4370 ipa_free_all_node_params (void)
4372 if (ipa_node_params_sum
)
4373 ggc_delete (ipa_node_params_sum
);
4374 ipa_node_params_sum
= NULL
;
4377 /* Initialize IPA CP transformation summary and also allocate any necessary hash
4378 tables if they do not already exist. */
4381 ipcp_transformation_initialize (void)
4383 if (!ipa_vr_hash_table
)
4384 ipa_vr_hash_table
= hash_table
<ipa_vr_ggc_hash_traits
>::create_ggc (37);
4385 if (ipcp_transformation_sum
== NULL
)
4387 ipcp_transformation_sum
= ipcp_transformation_t::create_ggc (symtab
);
4388 ipcp_transformation_sum
->disable_insertion_hook ();
4392 /* Release the IPA CP transformation summary. */
4395 ipcp_free_transformation_sum (void)
4397 if (!ipcp_transformation_sum
)
4400 ipcp_transformation_sum
->~function_summary
<ipcp_transformation
*> ();
4401 ggc_free (ipcp_transformation_sum
);
4402 ipcp_transformation_sum
= NULL
;
4405 /* Set the aggregate replacements of NODE to be AGGVALS. */
4408 ipa_set_node_agg_value_chain (struct cgraph_node
*node
,
4409 vec
<ipa_argagg_value
, va_gc
> *aggs
)
4411 ipcp_transformation_initialize ();
4412 ipcp_transformation
*s
= ipcp_transformation_sum
->get_create (node
);
4413 s
->m_agg_values
= aggs
;
4416 /* Hook that is called by cgraph.cc when an edge is removed. Adjust reference
4417 count data structures accordingly. */
4420 ipa_edge_args_sum_t::remove (cgraph_edge
*cs
, ipa_edge_args
*args
)
4422 if (args
->jump_functions
)
4424 struct ipa_jump_func
*jf
;
4426 FOR_EACH_VEC_ELT (*args
->jump_functions
, i
, jf
)
4428 struct ipa_cst_ref_desc
*rdesc
;
4429 try_decrement_rdesc_refcount (jf
);
4430 if (jf
->type
== IPA_JF_CONST
4431 && (rdesc
= ipa_get_jf_constant_rdesc (jf
))
4438 /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
4439 reference count data strucutres accordingly. */
4442 ipa_edge_args_sum_t::duplicate (cgraph_edge
*src
, cgraph_edge
*dst
,
4443 ipa_edge_args
*old_args
, ipa_edge_args
*new_args
)
4447 new_args
->jump_functions
= vec_safe_copy (old_args
->jump_functions
);
4448 if (old_args
->polymorphic_call_contexts
)
4449 new_args
->polymorphic_call_contexts
4450 = vec_safe_copy (old_args
->polymorphic_call_contexts
);
4452 for (i
= 0; i
< vec_safe_length (old_args
->jump_functions
); i
++)
4454 struct ipa_jump_func
*src_jf
= ipa_get_ith_jump_func (old_args
, i
);
4455 struct ipa_jump_func
*dst_jf
= ipa_get_ith_jump_func (new_args
, i
);
4457 dst_jf
->agg
.items
= vec_safe_copy (dst_jf
->agg
.items
);
4459 if (src_jf
->type
== IPA_JF_CONST
)
4461 struct ipa_cst_ref_desc
*src_rdesc
= jfunc_rdesc_usable (src_jf
);
4464 dst_jf
->value
.constant
.rdesc
= NULL
;
4465 else if (src
->caller
== dst
->caller
)
4467 /* Creation of a speculative edge. If the source edge is the one
4468 grabbing a reference, we must create a new (duplicate)
4469 reference description. Otherwise they refer to the same
4470 description corresponding to a reference taken in a function
4471 src->caller is inlined to. In that case we just must
4472 increment the refcount. */
4473 if (src_rdesc
->cs
== src
)
4475 symtab_node
*n
= symtab_node_for_jfunc (src_jf
);
4476 gcc_checking_assert (n
);
4478 = src
->caller
->find_reference (n
, src
->call_stmt
,
4481 gcc_checking_assert (ref
);
4482 dst
->caller
->clone_reference (ref
, ref
->stmt
);
4484 ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
4485 dst_rdesc
->cs
= dst
;
4486 dst_rdesc
->refcount
= src_rdesc
->refcount
;
4487 dst_rdesc
->next_duplicate
= NULL
;
4488 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
4492 src_rdesc
->refcount
++;
4493 dst_jf
->value
.constant
.rdesc
= src_rdesc
;
4496 else if (src_rdesc
->cs
== src
)
4498 struct ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
4499 dst_rdesc
->cs
= dst
;
4500 dst_rdesc
->refcount
= src_rdesc
->refcount
;
4501 dst_rdesc
->next_duplicate
= src_rdesc
->next_duplicate
;
4502 src_rdesc
->next_duplicate
= dst_rdesc
;
4503 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
4507 struct ipa_cst_ref_desc
*dst_rdesc
;
4508 /* This can happen during inlining, when a JFUNC can refer to a
4509 reference taken in a function up in the tree of inline clones.
4510 We need to find the duplicate that refers to our tree of
4513 gcc_assert (dst
->caller
->inlined_to
);
4514 for (dst_rdesc
= src_rdesc
->next_duplicate
;
4516 dst_rdesc
= dst_rdesc
->next_duplicate
)
4518 struct cgraph_node
*top
;
4519 top
= dst_rdesc
->cs
->caller
->inlined_to
4520 ? dst_rdesc
->cs
->caller
->inlined_to
4521 : dst_rdesc
->cs
->caller
;
4522 if (dst
->caller
->inlined_to
== top
)
4525 gcc_assert (dst_rdesc
);
4526 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
4529 else if (dst_jf
->type
== IPA_JF_PASS_THROUGH
4530 && src
->caller
== dst
->caller
)
4532 struct cgraph_node
*inline_root
= dst
->caller
->inlined_to
4533 ? dst
->caller
->inlined_to
: dst
->caller
;
4534 ipa_node_params
*root_info
= ipa_node_params_sum
->get (inline_root
);
4535 int idx
= ipa_get_jf_pass_through_formal_id (dst_jf
);
4537 int c
= ipa_get_controlled_uses (root_info
, idx
);
4538 if (c
!= IPA_UNDESCRIBED_USE
)
4541 ipa_set_controlled_uses (root_info
, idx
, c
);
4547 /* Analyze newly added function into callgraph. */
4550 ipa_add_new_function (cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
4552 if (node
->has_gimple_body_p ())
4553 ipa_analyze_node (node
);
4556 /* Hook that is called by summary when a node is duplicated. */
4559 ipa_node_params_t::duplicate(cgraph_node
*, cgraph_node
*,
4560 ipa_node_params
*old_info
,
4561 ipa_node_params
*new_info
)
4563 new_info
->descriptors
= vec_safe_copy (old_info
->descriptors
);
4564 new_info
->lattices
= NULL
;
4565 new_info
->ipcp_orig_node
= old_info
->ipcp_orig_node
;
4566 new_info
->known_csts
= old_info
->known_csts
.copy ();
4567 new_info
->known_contexts
= old_info
->known_contexts
.copy ();
4569 new_info
->analysis_done
= old_info
->analysis_done
;
4570 new_info
->node_enqueued
= old_info
->node_enqueued
;
4571 new_info
->versionable
= old_info
->versionable
;
4574 /* Duplication of ipcp transformation summaries. */
4577 ipcp_transformation_t::duplicate(cgraph_node
*, cgraph_node
*dst
,
4578 ipcp_transformation
*src_trans
,
4579 ipcp_transformation
*dst_trans
)
4581 /* Avoid redundant work of duplicating vectors we will never use. */
4582 if (dst
->inlined_to
)
4584 dst_trans
->m_agg_values
= vec_safe_copy (src_trans
->m_agg_values
);
4585 dst_trans
->m_vr
= vec_safe_copy (src_trans
->m_vr
);
4588 /* Register our cgraph hooks if they are not already there. */
4591 ipa_register_cgraph_hooks (void)
4593 ipa_check_create_node_params ();
4594 ipa_check_create_edge_args ();
4596 function_insertion_hook_holder
=
4597 symtab
->add_cgraph_insertion_hook (&ipa_add_new_function
, NULL
);
4600 /* Unregister our cgraph hooks if they are not already there. */
4603 ipa_unregister_cgraph_hooks (void)
4605 if (function_insertion_hook_holder
)
4606 symtab
->remove_cgraph_insertion_hook (function_insertion_hook_holder
);
4607 function_insertion_hook_holder
= NULL
;
4610 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4611 longer needed after ipa-cp. */
4614 ipa_free_all_structures_after_ipa_cp (void)
4616 if (!optimize
&& !in_lto_p
)
4618 ipa_free_all_edge_args ();
4619 ipa_free_all_node_params ();
4620 ipcp_sources_pool
.release ();
4621 ipcp_cst_values_pool
.release ();
4622 ipcp_poly_ctx_values_pool
.release ();
4623 ipcp_agg_lattice_pool
.release ();
4624 ipa_unregister_cgraph_hooks ();
4625 ipa_refdesc_pool
.release ();
4629 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4630 longer needed after indirect inlining. */
4633 ipa_free_all_structures_after_iinln (void)
4635 ipa_free_all_edge_args ();
4636 ipa_free_all_node_params ();
4637 ipa_unregister_cgraph_hooks ();
4638 ipcp_sources_pool
.release ();
4639 ipcp_cst_values_pool
.release ();
4640 ipcp_poly_ctx_values_pool
.release ();
4641 ipcp_agg_lattice_pool
.release ();
4642 ipa_refdesc_pool
.release ();
4645 /* Print ipa_tree_map data structures of all functions in the
4649 ipa_print_node_params (FILE *f
, struct cgraph_node
*node
)
4652 class ipa_node_params
*info
;
4654 if (!node
->definition
)
4656 info
= ipa_node_params_sum
->get (node
);
4657 fprintf (f
, " function %s parameter descriptors:\n", node
->dump_name ());
4660 fprintf (f
, " no params return\n");
4663 count
= ipa_get_param_count (info
);
4664 for (i
= 0; i
< count
; i
++)
4669 ipa_dump_param (f
, info
, i
);
4670 if (ipa_is_param_used (info
, i
))
4671 fprintf (f
, " used");
4672 if (ipa_is_param_used_by_ipa_predicates (info
, i
))
4673 fprintf (f
, " used_by_ipa_predicates");
4674 if (ipa_is_param_used_by_indirect_call (info
, i
))
4675 fprintf (f
, " used_by_indirect_call");
4676 if (ipa_is_param_used_by_polymorphic_call (info
, i
))
4677 fprintf (f
, " used_by_polymorphic_call");
4678 c
= ipa_get_controlled_uses (info
, i
);
4679 if (c
== IPA_UNDESCRIBED_USE
)
4680 fprintf (f
, " undescribed_use");
4682 fprintf (f
, " controlled_uses=%i %s", c
,
4683 ipa_get_param_load_dereferenced (info
, i
)
4684 ? "(load_dereferenced)" : "");
4689 /* Print ipa_tree_map data structures of all functions in the
4693 ipa_print_all_params (FILE * f
)
4695 struct cgraph_node
*node
;
4697 fprintf (f
, "\nFunction parameters:\n");
4698 FOR_EACH_FUNCTION (node
)
4699 ipa_print_node_params (f
, node
);
4702 /* Stream out jump function JUMP_FUNC to OB. */
4705 ipa_write_jump_function (struct output_block
*ob
,
4706 struct ipa_jump_func
*jump_func
)
4708 struct ipa_agg_jf_item
*item
;
4709 struct bitpack_d bp
;
4713 /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4714 as well as WPA memory by handling them specially. */
4715 if (jump_func
->type
== IPA_JF_CONST
4716 && TREE_CODE (jump_func
->value
.constant
.value
) == ADDR_EXPR
)
4719 streamer_write_uhwi (ob
, jump_func
->type
* 2 + flag
);
4720 switch (jump_func
->type
)
4722 case IPA_JF_UNKNOWN
:
4726 EXPR_LOCATION (jump_func
->value
.constant
.value
) == UNKNOWN_LOCATION
);
4727 stream_write_tree (ob
,
4729 ? TREE_OPERAND (jump_func
->value
.constant
.value
, 0)
4730 : jump_func
->value
.constant
.value
, true);
4732 case IPA_JF_PASS_THROUGH
:
4733 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.operation
);
4734 if (jump_func
->value
.pass_through
.operation
== NOP_EXPR
)
4736 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4737 bp
= bitpack_create (ob
->main_stream
);
4738 bp_pack_value (&bp
, jump_func
->value
.pass_through
.agg_preserved
, 1);
4739 gcc_assert (!jump_func
->value
.pass_through
.refdesc_decremented
);
4740 streamer_write_bitpack (&bp
);
4742 else if (TREE_CODE_CLASS (jump_func
->value
.pass_through
.operation
)
4744 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4747 stream_write_tree (ob
, jump_func
->value
.pass_through
.operand
, true);
4748 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4751 case IPA_JF_ANCESTOR
:
4752 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.offset
);
4753 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.formal_id
);
4754 bp
= bitpack_create (ob
->main_stream
);
4755 bp_pack_value (&bp
, jump_func
->value
.ancestor
.agg_preserved
, 1);
4756 bp_pack_value (&bp
, jump_func
->value
.ancestor
.keep_null
, 1);
4757 streamer_write_bitpack (&bp
);
4760 fatal_error (UNKNOWN_LOCATION
, "invalid jump function in LTO stream");
4763 count
= vec_safe_length (jump_func
->agg
.items
);
4764 streamer_write_uhwi (ob
, count
);
4767 bp
= bitpack_create (ob
->main_stream
);
4768 bp_pack_value (&bp
, jump_func
->agg
.by_ref
, 1);
4769 streamer_write_bitpack (&bp
);
4772 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, i
, item
)
4774 stream_write_tree (ob
, item
->type
, true);
4775 streamer_write_uhwi (ob
, item
->offset
);
4776 streamer_write_uhwi (ob
, item
->jftype
);
4777 switch (item
->jftype
)
4779 case IPA_JF_UNKNOWN
:
4782 stream_write_tree (ob
, item
->value
.constant
, true);
4784 case IPA_JF_PASS_THROUGH
:
4785 case IPA_JF_LOAD_AGG
:
4786 streamer_write_uhwi (ob
, item
->value
.pass_through
.operation
);
4787 streamer_write_uhwi (ob
, item
->value
.pass_through
.formal_id
);
4788 if (TREE_CODE_CLASS (item
->value
.pass_through
.operation
)
4790 stream_write_tree (ob
, item
->value
.pass_through
.operand
, true);
4791 if (item
->jftype
== IPA_JF_LOAD_AGG
)
4793 stream_write_tree (ob
, item
->value
.load_agg
.type
, true);
4794 streamer_write_uhwi (ob
, item
->value
.load_agg
.offset
);
4795 bp
= bitpack_create (ob
->main_stream
);
4796 bp_pack_value (&bp
, item
->value
.load_agg
.by_ref
, 1);
4797 streamer_write_bitpack (&bp
);
4801 fatal_error (UNKNOWN_LOCATION
,
4802 "invalid jump function in LTO stream");
4806 bp
= bitpack_create (ob
->main_stream
);
4807 if (jump_func
->m_vr
)
4808 jump_func
->m_vr
->streamer_write (ob
);
4811 bp_pack_value (&bp
, false, 1);
4812 streamer_write_bitpack (&bp
);
4816 /* Read in jump function JUMP_FUNC from IB. */
4819 ipa_read_jump_function (class lto_input_block
*ib
,
4820 struct ipa_jump_func
*jump_func
,
4821 struct cgraph_edge
*cs
,
4822 class data_in
*data_in
,
4825 enum jump_func_type jftype
;
4826 enum tree_code operation
;
4828 int val
= streamer_read_uhwi (ib
);
4829 bool flag
= val
& 1;
4831 jftype
= (enum jump_func_type
) (val
/ 2);
4834 case IPA_JF_UNKNOWN
:
4835 ipa_set_jf_unknown (jump_func
);
4839 tree t
= stream_read_tree (ib
, data_in
);
4840 if (flag
&& prevails
)
4841 t
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (t
)), t
);
4842 ipa_set_jf_constant (jump_func
, t
, cs
);
4845 case IPA_JF_PASS_THROUGH
:
4846 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
4847 if (operation
== NOP_EXPR
)
4849 int formal_id
= streamer_read_uhwi (ib
);
4850 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4851 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4852 ipa_set_jf_simple_pass_through (jump_func
, formal_id
, agg_preserved
);
4854 else if (TREE_CODE_CLASS (operation
) == tcc_unary
)
4856 int formal_id
= streamer_read_uhwi (ib
);
4857 ipa_set_jf_unary_pass_through (jump_func
, formal_id
, operation
);
4861 tree operand
= stream_read_tree (ib
, data_in
);
4862 int formal_id
= streamer_read_uhwi (ib
);
4863 ipa_set_jf_arith_pass_through (jump_func
, formal_id
, operand
,
4867 case IPA_JF_ANCESTOR
:
4869 HOST_WIDE_INT offset
= streamer_read_uhwi (ib
);
4870 int formal_id
= streamer_read_uhwi (ib
);
4871 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4872 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4873 bool keep_null
= bp_unpack_value (&bp
, 1);
4874 ipa_set_ancestor_jf (jump_func
, offset
, formal_id
, agg_preserved
,
4879 fatal_error (UNKNOWN_LOCATION
, "invalid jump function in LTO stream");
4882 count
= streamer_read_uhwi (ib
);
4885 jump_func
->agg
.items
= NULL
;
4886 vec_safe_reserve (jump_func
->agg
.items
, count
, true);
4890 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4891 jump_func
->agg
.by_ref
= bp_unpack_value (&bp
, 1);
4893 for (i
= 0; i
< count
; i
++)
4895 struct ipa_agg_jf_item item
;
4896 item
.type
= stream_read_tree (ib
, data_in
);
4897 item
.offset
= streamer_read_uhwi (ib
);
4898 item
.jftype
= (enum jump_func_type
) streamer_read_uhwi (ib
);
4900 switch (item
.jftype
)
4902 case IPA_JF_UNKNOWN
:
4905 item
.value
.constant
= stream_read_tree (ib
, data_in
);
4907 case IPA_JF_PASS_THROUGH
:
4908 case IPA_JF_LOAD_AGG
:
4909 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
4910 item
.value
.pass_through
.operation
= operation
;
4911 item
.value
.pass_through
.formal_id
= streamer_read_uhwi (ib
);
4912 if (TREE_CODE_CLASS (operation
) == tcc_unary
)
4913 item
.value
.pass_through
.operand
= NULL_TREE
;
4915 item
.value
.pass_through
.operand
= stream_read_tree (ib
, data_in
);
4916 if (item
.jftype
== IPA_JF_LOAD_AGG
)
4918 struct bitpack_d bp
;
4919 item
.value
.load_agg
.type
= stream_read_tree (ib
, data_in
);
4920 item
.value
.load_agg
.offset
= streamer_read_uhwi (ib
);
4921 bp
= streamer_read_bitpack (ib
);
4922 item
.value
.load_agg
.by_ref
= bp_unpack_value (&bp
, 1);
4926 fatal_error (UNKNOWN_LOCATION
,
4927 "invalid jump function in LTO stream");
4930 jump_func
->agg
.items
->quick_push (item
);
4934 vr
.streamer_read (ib
, data_in
);
4938 ipa_set_jfunc_vr (jump_func
, vr
);
4941 jump_func
->m_vr
= NULL
;
4944 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4945 relevant to indirect inlining to OB. */
4948 ipa_write_indirect_edge_info (struct output_block
*ob
,
4949 struct cgraph_edge
*cs
)
4951 class cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4952 struct bitpack_d bp
;
4954 streamer_write_hwi (ob
, ii
->param_index
);
4955 bp
= bitpack_create (ob
->main_stream
);
4956 bp_pack_value (&bp
, ii
->polymorphic
, 1);
4957 bp_pack_value (&bp
, ii
->agg_contents
, 1);
4958 bp_pack_value (&bp
, ii
->member_ptr
, 1);
4959 bp_pack_value (&bp
, ii
->by_ref
, 1);
4960 bp_pack_value (&bp
, ii
->guaranteed_unmodified
, 1);
4961 bp_pack_value (&bp
, ii
->vptr_changed
, 1);
4962 streamer_write_bitpack (&bp
);
4963 if (ii
->agg_contents
|| ii
->polymorphic
)
4964 streamer_write_hwi (ob
, ii
->offset
);
4966 gcc_assert (ii
->offset
== 0);
4968 if (ii
->polymorphic
)
4970 streamer_write_hwi (ob
, ii
->otr_token
);
4971 stream_write_tree (ob
, ii
->otr_type
, true);
4972 ii
->context
.stream_out (ob
);
4976 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4977 relevant to indirect inlining from IB. */
4980 ipa_read_indirect_edge_info (class lto_input_block
*ib
,
4981 class data_in
*data_in
,
4982 struct cgraph_edge
*cs
,
4983 class ipa_node_params
*info
)
4985 class cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4986 struct bitpack_d bp
;
4988 ii
->param_index
= (int) streamer_read_hwi (ib
);
4989 bp
= streamer_read_bitpack (ib
);
4990 ii
->polymorphic
= bp_unpack_value (&bp
, 1);
4991 ii
->agg_contents
= bp_unpack_value (&bp
, 1);
4992 ii
->member_ptr
= bp_unpack_value (&bp
, 1);
4993 ii
->by_ref
= bp_unpack_value (&bp
, 1);
4994 ii
->guaranteed_unmodified
= bp_unpack_value (&bp
, 1);
4995 ii
->vptr_changed
= bp_unpack_value (&bp
, 1);
4996 if (ii
->agg_contents
|| ii
->polymorphic
)
4997 ii
->offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
5000 if (ii
->polymorphic
)
5002 ii
->otr_token
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
5003 ii
->otr_type
= stream_read_tree (ib
, data_in
);
5004 ii
->context
.stream_in (ib
, data_in
);
5006 if (info
&& ii
->param_index
>= 0)
5008 if (ii
->polymorphic
)
5009 ipa_set_param_used_by_polymorphic_call (info
,
5010 ii
->param_index
, true);
5011 ipa_set_param_used_by_indirect_call (info
,
5012 ii
->param_index
, true);
5016 /* Stream out NODE info to OB. */
5019 ipa_write_node_info (struct output_block
*ob
, struct cgraph_node
*node
)
5022 lto_symtab_encoder_t encoder
;
5023 ipa_node_params
*info
= ipa_node_params_sum
->get (node
);
5025 struct cgraph_edge
*e
;
5026 struct bitpack_d bp
;
5028 encoder
= ob
->decl_state
->symtab_node_encoder
;
5029 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
5030 streamer_write_uhwi (ob
, node_ref
);
5032 streamer_write_uhwi (ob
, ipa_get_param_count (info
));
5033 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
5034 streamer_write_uhwi (ob
, ipa_get_param_move_cost (info
, j
));
5035 bp
= bitpack_create (ob
->main_stream
);
5036 gcc_assert (info
->analysis_done
5037 || ipa_get_param_count (info
) == 0);
5038 gcc_assert (!info
->node_enqueued
);
5039 gcc_assert (!info
->ipcp_orig_node
);
5040 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
5042 /* TODO: We could just not stream the bit in the undescribed case. */
5043 bool d
= (ipa_get_controlled_uses (info
, j
) != IPA_UNDESCRIBED_USE
)
5044 ? ipa_get_param_load_dereferenced (info
, j
) : true;
5045 bp_pack_value (&bp
, d
, 1);
5046 bp_pack_value (&bp
, ipa_is_param_used (info
, j
), 1);
5048 streamer_write_bitpack (&bp
);
5049 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
5051 streamer_write_hwi (ob
, ipa_get_controlled_uses (info
, j
));
5052 stream_write_tree (ob
, ipa_get_type (info
, j
), true);
5054 for (e
= node
->callees
; e
; e
= e
->next_callee
)
5056 ipa_edge_args
*args
= ipa_edge_args_sum
->get (e
);
5060 streamer_write_uhwi (ob
, 0);
5064 streamer_write_uhwi (ob
,
5065 ipa_get_cs_argument_count (args
) * 2
5066 + (args
->polymorphic_call_contexts
!= NULL
));
5067 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
5069 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
5070 if (args
->polymorphic_call_contexts
!= NULL
)
5071 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
5074 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
5076 ipa_edge_args
*args
= ipa_edge_args_sum
->get (e
);
5078 streamer_write_uhwi (ob
, 0);
5081 streamer_write_uhwi (ob
,
5082 ipa_get_cs_argument_count (args
) * 2
5083 + (args
->polymorphic_call_contexts
!= NULL
));
5084 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
5086 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
5087 if (args
->polymorphic_call_contexts
!= NULL
)
5088 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
5091 ipa_write_indirect_edge_info (ob
, e
);
5095 /* Stream in edge E from IB. */
5098 ipa_read_edge_info (class lto_input_block
*ib
,
5099 class data_in
*data_in
,
5100 struct cgraph_edge
*e
, bool prevails
)
5102 int count
= streamer_read_uhwi (ib
);
5103 bool contexts_computed
= count
& 1;
5109 && (e
->possibly_call_in_translation_unit_p ()
5110 /* Also stream in jump functions to builtins in hope that they
5111 will get fnspecs. */
5112 || fndecl_built_in_p (e
->callee
->decl
, BUILT_IN_NORMAL
)))
5114 ipa_edge_args
*args
= ipa_edge_args_sum
->get_create (e
);
5115 vec_safe_grow_cleared (args
->jump_functions
, count
, true);
5116 if (contexts_computed
)
5117 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
, true);
5118 for (int k
= 0; k
< count
; k
++)
5120 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
5122 if (contexts_computed
)
5123 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in
5129 for (int k
= 0; k
< count
; k
++)
5131 struct ipa_jump_func dummy
;
5132 ipa_read_jump_function (ib
, &dummy
, e
,
5134 if (contexts_computed
)
5136 class ipa_polymorphic_call_context ctx
;
5137 ctx
.stream_in (ib
, data_in
);
5143 /* Stream in NODE info from IB. */
5146 ipa_read_node_info (class lto_input_block
*ib
, struct cgraph_node
*node
,
5147 class data_in
*data_in
)
5150 struct cgraph_edge
*e
;
5151 struct bitpack_d bp
;
5152 bool prevails
= node
->prevailing_p ();
5153 ipa_node_params
*info
5154 = prevails
? ipa_node_params_sum
->get_create (node
) : NULL
;
5156 int param_count
= streamer_read_uhwi (ib
);
5159 ipa_alloc_node_params (node
, param_count
);
5160 for (k
= 0; k
< param_count
; k
++)
5161 (*info
->descriptors
)[k
].move_cost
= streamer_read_uhwi (ib
);
5162 if (ipa_get_param_count (info
) != 0)
5163 info
->analysis_done
= true;
5164 info
->node_enqueued
= false;
5167 for (k
= 0; k
< param_count
; k
++)
5168 streamer_read_uhwi (ib
);
5170 bp
= streamer_read_bitpack (ib
);
5171 for (k
= 0; k
< param_count
; k
++)
5173 bool load_dereferenced
= bp_unpack_value (&bp
, 1);
5174 bool used
= bp_unpack_value (&bp
, 1);
5178 ipa_set_param_load_dereferenced (info
, k
, load_dereferenced
);
5179 ipa_set_param_used (info
, k
, used
);
5182 for (k
= 0; k
< param_count
; k
++)
5184 int nuses
= streamer_read_hwi (ib
);
5185 tree type
= stream_read_tree (ib
, data_in
);
5189 ipa_set_controlled_uses (info
, k
, nuses
);
5190 (*info
->descriptors
)[k
].decl_or_type
= type
;
5193 for (e
= node
->callees
; e
; e
= e
->next_callee
)
5194 ipa_read_edge_info (ib
, data_in
, e
, prevails
);
5195 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
5197 ipa_read_edge_info (ib
, data_in
, e
, prevails
);
5198 ipa_read_indirect_edge_info (ib
, data_in
, e
, info
);
5202 /* Write jump functions for nodes in SET. */
5205 ipa_prop_write_jump_functions (void)
5207 struct output_block
*ob
;
5208 unsigned int count
= 0;
5209 lto_symtab_encoder_iterator lsei
;
5210 lto_symtab_encoder_t encoder
;
5212 if (!ipa_node_params_sum
|| !ipa_edge_args_sum
)
5215 ob
= create_output_block (LTO_section_jump_functions
);
5216 encoder
= ob
->decl_state
->symtab_node_encoder
;
5218 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5219 lsei_next_function_in_partition (&lsei
))
5221 cgraph_node
*node
= lsei_cgraph_node (lsei
);
5222 if (node
->has_gimple_body_p ()
5223 && ipa_node_params_sum
->get (node
) != NULL
)
5227 streamer_write_uhwi (ob
, count
);
5229 /* Process all of the functions. */
5230 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5231 lsei_next_function_in_partition (&lsei
))
5233 cgraph_node
*node
= lsei_cgraph_node (lsei
);
5234 if (node
->has_gimple_body_p ()
5235 && ipa_node_params_sum
->get (node
) != NULL
)
5236 ipa_write_node_info (ob
, node
);
5238 streamer_write_char_stream (ob
->main_stream
, 0);
5239 produce_asm (ob
, NULL
);
5240 destroy_output_block (ob
);
5243 /* Read section in file FILE_DATA of length LEN with data DATA. */
5246 ipa_prop_read_section (struct lto_file_decl_data
*file_data
, const char *data
,
5249 const struct lto_function_header
*header
=
5250 (const struct lto_function_header
*) data
;
5251 const int cfg_offset
= sizeof (struct lto_function_header
);
5252 const int main_offset
= cfg_offset
+ header
->cfg_size
;
5253 const int string_offset
= main_offset
+ header
->main_size
;
5254 class data_in
*data_in
;
5258 lto_input_block
ib_main ((const char *) data
+ main_offset
,
5259 header
->main_size
, file_data
);
5262 lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
5263 header
->string_size
, vNULL
);
5264 count
= streamer_read_uhwi (&ib_main
);
5266 for (i
= 0; i
< count
; i
++)
5269 struct cgraph_node
*node
;
5270 lto_symtab_encoder_t encoder
;
5272 index
= streamer_read_uhwi (&ib_main
);
5273 encoder
= file_data
->symtab_node_encoder
;
5274 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
5276 gcc_assert (node
->definition
);
5277 ipa_read_node_info (&ib_main
, node
, data_in
);
5279 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
5281 lto_data_in_delete (data_in
);
5284 /* Read ipcp jump functions. */
5287 ipa_prop_read_jump_functions (void)
5289 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
5290 struct lto_file_decl_data
*file_data
;
5293 ipa_check_create_node_params ();
5294 ipa_check_create_edge_args ();
5295 ipa_register_cgraph_hooks ();
5297 while ((file_data
= file_data_vec
[j
++]))
5301 = lto_get_summary_section_data (file_data
, LTO_section_jump_functions
,
5304 ipa_prop_read_section (file_data
, data
, len
);
5308 /* Return true if the IPA-CP transformation summary TS is non-NULL and contains
5311 useful_ipcp_transformation_info_p (ipcp_transformation
*ts
)
5315 if (!vec_safe_is_empty (ts
->m_agg_values
)
5316 || !vec_safe_is_empty (ts
->m_vr
))
5321 /* Write into OB IPA-CP transfromation summary TS describing NODE. */
5324 write_ipcp_transformation_info (output_block
*ob
, cgraph_node
*node
,
5325 ipcp_transformation
*ts
)
5327 lto_symtab_encoder_t encoder
= ob
->decl_state
->symtab_node_encoder
;
5328 int node_ref
= lto_symtab_encoder_encode (encoder
, node
);
5329 streamer_write_uhwi (ob
, node_ref
);
5331 streamer_write_uhwi (ob
, vec_safe_length (ts
->m_agg_values
));
5332 for (const ipa_argagg_value
&av
: ts
->m_agg_values
)
5334 struct bitpack_d bp
;
5336 stream_write_tree (ob
, av
.value
, true);
5337 streamer_write_uhwi (ob
, av
.unit_offset
);
5338 streamer_write_uhwi (ob
, av
.index
);
5340 bp
= bitpack_create (ob
->main_stream
);
5341 bp_pack_value (&bp
, av
.by_ref
, 1);
5342 bp_pack_value (&bp
, av
.killed
, 1);
5343 streamer_write_bitpack (&bp
);
5346 streamer_write_uhwi (ob
, vec_safe_length (ts
->m_vr
));
5347 for (const ipa_vr
&parm_vr
: ts
->m_vr
)
5348 parm_vr
.streamer_write (ob
);
5351 /* Stream in the aggregate value replacement chain for NODE from IB. */
5354 read_ipcp_transformation_info (lto_input_block
*ib
, cgraph_node
*node
,
5357 unsigned int count
, i
;
5358 ipcp_transformation_initialize ();
5359 ipcp_transformation
*ts
= ipcp_transformation_sum
->get_create (node
);
5361 count
= streamer_read_uhwi (ib
);
5364 vec_safe_grow_cleared (ts
->m_agg_values
, count
, true);
5365 for (i
= 0; i
<count
; i
++)
5367 ipa_argagg_value
*av
= &(*ts
->m_agg_values
)[i
];;
5369 av
->value
= stream_read_tree (ib
, data_in
);
5370 av
->unit_offset
= streamer_read_uhwi (ib
);
5371 av
->index
= streamer_read_uhwi (ib
);
5373 bitpack_d bp
= streamer_read_bitpack (ib
);
5374 av
->by_ref
= bp_unpack_value (&bp
, 1);
5375 av
->killed
= bp_unpack_value (&bp
, 1);
5379 count
= streamer_read_uhwi (ib
);
5382 vec_safe_grow_cleared (ts
->m_vr
, count
, true);
5383 for (i
= 0; i
< count
; i
++)
5386 parm_vr
= &(*ts
->m_vr
)[i
];
5387 parm_vr
->streamer_read (ib
, data_in
);
5392 /* Write all aggregate replacement for nodes in set. */
5395 ipcp_write_transformation_summaries (void)
5397 struct output_block
*ob
;
5398 unsigned int count
= 0;
5399 lto_symtab_encoder_t encoder
;
5401 ob
= create_output_block (LTO_section_ipcp_transform
);
5402 encoder
= ob
->decl_state
->symtab_node_encoder
;
5405 for (int i
= 0; i
< lto_symtab_encoder_size (encoder
); i
++)
5407 symtab_node
*snode
= lto_symtab_encoder_deref (encoder
, i
);
5408 cgraph_node
*cnode
= dyn_cast
<cgraph_node
*> (snode
);
5411 ipcp_transformation
*ts
= ipcp_get_transformation_summary (cnode
);
5412 if (useful_ipcp_transformation_info_p (ts
)
5413 && lto_symtab_encoder_encode_body_p (encoder
, cnode
))
5417 streamer_write_uhwi (ob
, count
);
5419 for (int i
= 0; i
< lto_symtab_encoder_size (encoder
); i
++)
5421 symtab_node
*snode
= lto_symtab_encoder_deref (encoder
, i
);
5422 cgraph_node
*cnode
= dyn_cast
<cgraph_node
*> (snode
);
5425 ipcp_transformation
*ts
= ipcp_get_transformation_summary (cnode
);
5426 if (useful_ipcp_transformation_info_p (ts
)
5427 && lto_symtab_encoder_encode_body_p (encoder
, cnode
))
5428 write_ipcp_transformation_info (ob
, cnode
, ts
);
5430 streamer_write_char_stream (ob
->main_stream
, 0);
5431 produce_asm (ob
, NULL
);
5432 destroy_output_block (ob
);
5435 /* Read replacements section in file FILE_DATA of length LEN with data
5439 read_replacements_section (struct lto_file_decl_data
*file_data
,
5443 const struct lto_function_header
*header
=
5444 (const struct lto_function_header
*) data
;
5445 const int cfg_offset
= sizeof (struct lto_function_header
);
5446 const int main_offset
= cfg_offset
+ header
->cfg_size
;
5447 const int string_offset
= main_offset
+ header
->main_size
;
5448 class data_in
*data_in
;
5452 lto_input_block
ib_main ((const char *) data
+ main_offset
,
5453 header
->main_size
, file_data
);
5455 data_in
= lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
5456 header
->string_size
, vNULL
);
5457 count
= streamer_read_uhwi (&ib_main
);
5459 for (i
= 0; i
< count
; i
++)
5462 struct cgraph_node
*node
;
5463 lto_symtab_encoder_t encoder
;
5465 index
= streamer_read_uhwi (&ib_main
);
5466 encoder
= file_data
->symtab_node_encoder
;
5467 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
5469 read_ipcp_transformation_info (&ib_main
, node
, data_in
);
5471 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
5473 lto_data_in_delete (data_in
);
5476 /* Read IPA-CP aggregate replacements. */
5479 ipcp_read_transformation_summaries (void)
5481 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
5482 struct lto_file_decl_data
*file_data
;
5485 while ((file_data
= file_data_vec
[j
++]))
5489 = lto_get_summary_section_data (file_data
, LTO_section_ipcp_transform
,
5492 read_replacements_section (file_data
, data
, len
);
5496 /* Adjust the aggregate replacements in TS to reflect any parameter removals
5497 which might have already taken place. If after adjustments there are no
5498 aggregate replacements left, the m_agg_values will be set to NULL. In other
5499 cases, it may be shrunk. */
5502 adjust_agg_replacement_values (cgraph_node
*node
, ipcp_transformation
*ts
)
5504 clone_info
*cinfo
= clone_info::get (node
);
5505 if (!cinfo
|| !cinfo
->param_adjustments
)
5508 auto_vec
<int, 16> new_indices
;
5509 cinfo
->param_adjustments
->get_updated_indices (&new_indices
);
5510 bool removed_item
= false;
5511 unsigned dst_index
= 0;
5512 unsigned count
= ts
->m_agg_values
->length ();
5513 for (unsigned i
= 0; i
< count
; i
++)
5515 ipa_argagg_value
*v
= &(*ts
->m_agg_values
)[i
];
5516 gcc_checking_assert (v
->index
>= 0);
5519 if ((unsigned) v
->index
< new_indices
.length ())
5520 new_idx
= new_indices
[v
->index
];
5526 (*ts
->m_agg_values
)[dst_index
] = *v
;
5530 removed_item
= true;
5535 ggc_free (ts
->m_agg_values
);
5536 ts
->m_agg_values
= NULL
;
5538 else if (removed_item
)
5539 ts
->m_agg_values
->truncate (dst_index
);
5544 /* Dominator walker driving the ipcp modification phase. */
5546 class ipcp_modif_dom_walker
: public dom_walker
5549 ipcp_modif_dom_walker (struct ipa_func_body_info
*fbi
,
5550 vec
<ipa_param_descriptor
, va_gc
> *descs
,
5551 ipcp_transformation
*ts
, bool *sc
)
5552 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
), m_descriptors (descs
),
5553 m_ts (ts
), m_something_changed (sc
) {}
5555 edge
before_dom_children (basic_block
) final override
;
5557 { return gimple_purge_all_dead_eh_edges (m_need_eh_cleanup
); }
5560 struct ipa_func_body_info
*m_fbi
;
5561 vec
<ipa_param_descriptor
, va_gc
> *m_descriptors
;
5562 ipcp_transformation
*m_ts
;
5563 bool *m_something_changed
;
5564 auto_bitmap m_need_eh_cleanup
;
5568 ipcp_modif_dom_walker::before_dom_children (basic_block bb
)
5570 gimple_stmt_iterator gsi
;
5571 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5573 gimple
*stmt
= gsi_stmt (gsi
);
5575 HOST_WIDE_INT bit_offset
;
5580 if (!gimple_assign_load_p (stmt
))
5582 rhs
= gimple_assign_rhs1 (stmt
);
5583 if (!is_gimple_reg_type (TREE_TYPE (rhs
)))
5588 while (handled_component_p (t
))
5590 /* V_C_E can do things like convert an array of integers to one
5591 bigger integer and similar things we do not handle below. */
5592 if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
5597 t
= TREE_OPERAND (t
, 0);
5602 if (!ipa_load_from_parm_agg (m_fbi
, m_descriptors
, stmt
, rhs
, &index
,
5603 &bit_offset
, &size
, &by_ref
))
5605 unsigned unit_offset
= bit_offset
/ BITS_PER_UNIT
;
5606 ipa_argagg_value_list
avl (m_ts
);
5607 tree v
= avl
.get_value (index
, unit_offset
, by_ref
);
5610 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v
))), size
))
5613 gcc_checking_assert (is_gimple_ip_invariant (v
));
5614 if (!useless_type_conversion_p (TREE_TYPE (rhs
), TREE_TYPE (v
)))
5616 if (fold_convertible_p (TREE_TYPE (rhs
), v
))
5617 val
= fold_build1 (NOP_EXPR
, TREE_TYPE (rhs
), v
);
5618 else if (TYPE_SIZE (TREE_TYPE (rhs
))
5619 == TYPE_SIZE (TREE_TYPE (v
)))
5620 val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (rhs
), v
);
5625 fprintf (dump_file
, " const ");
5626 print_generic_expr (dump_file
, v
);
5627 fprintf (dump_file
, " can't be converted to type of ");
5628 print_generic_expr (dump_file
, rhs
);
5629 fprintf (dump_file
, "\n");
5637 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5639 fprintf (dump_file
, "Modifying stmt:\n ");
5640 print_gimple_stmt (dump_file
, stmt
, 0);
5642 gimple_assign_set_rhs_from_tree (&gsi
, val
);
5645 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5647 fprintf (dump_file
, "into:\n ");
5648 print_gimple_stmt (dump_file
, stmt
, 0);
5649 fprintf (dump_file
, "\n");
5652 *m_something_changed
= true;
5653 if (maybe_clean_eh_stmt (stmt
))
5654 bitmap_set_bit (m_need_eh_cleanup
, bb
->index
);
5659 /* If IPA-CP discovered a constant in parameter PARM at OFFSET of a given SIZE
5660 - whether passed by reference or not is given by BY_REF - return that
5661 constant. Otherwise return NULL_TREE. The is supposed to be used only
5662 after clone materialization and transformation is done (because it asserts
5663 that killed constants have been pruned). */
5666 ipcp_get_aggregate_const (struct function
*func
, tree parm
, bool by_ref
,
5667 HOST_WIDE_INT bit_offset
, HOST_WIDE_INT bit_size
)
5669 cgraph_node
*node
= cgraph_node::get (func
->decl
);
5670 ipcp_transformation
*ts
= ipcp_get_transformation_summary (node
);
5672 if (!ts
|| !ts
->m_agg_values
)
5675 int index
= ts
->get_param_index (func
->decl
, parm
);
5679 ipa_argagg_value_list
avl (ts
);
5680 unsigned unit_offset
= bit_offset
/ BITS_PER_UNIT
;
5681 const ipa_argagg_value
*av
= avl
.get_elt (index
, unit_offset
);
5682 if (!av
|| av
->by_ref
!= by_ref
)
5684 gcc_assert (!av
->killed
);
5687 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v
))), bit_size
))
5693 /* Return true if we have recorded VALUE and MASK about PARM.
5694 Set VALUE and MASk accordingly. */
5697 ipcp_get_parm_bits (tree parm
, tree
*value
, widest_int
*mask
)
5699 cgraph_node
*cnode
= cgraph_node::get (current_function_decl
);
5700 ipcp_transformation
*ts
= ipcp_get_transformation_summary (cnode
);
5702 || vec_safe_length (ts
->m_vr
) == 0
5703 || !irange::supports_p (TREE_TYPE (parm
)))
5706 int i
= ts
->get_param_index (current_function_decl
, parm
);
5709 clone_info
*cinfo
= clone_info::get (cnode
);
5710 if (cinfo
&& cinfo
->param_adjustments
)
5712 i
= cinfo
->param_adjustments
->get_original_index (i
);
5717 vec
<ipa_vr
, va_gc
> &vr
= *ts
->m_vr
;
5718 if (!vr
[i
].known_p ())
5721 vr
[i
].get_vrange (tmp
);
5722 if (tmp
.undefined_p () || tmp
.varying_p ())
5724 irange
&r
= as_a
<irange
> (tmp
);
5725 irange_bitmask bm
= r
.get_bitmask ();
5726 *mask
= widest_int::from (bm
.mask (), TYPE_SIGN (TREE_TYPE (parm
)));
5727 *value
= wide_int_to_tree (TREE_TYPE (parm
), bm
.value ());
5731 /* Update value range of formal parameters of NODE as described in TS. */
5734 ipcp_update_vr (struct cgraph_node
*node
, ipcp_transformation
*ts
)
5736 if (vec_safe_is_empty (ts
->m_vr
))
5738 const vec
<ipa_vr
, va_gc
> &vr
= *ts
->m_vr
;
5739 unsigned count
= vr
.length ();
5743 auto_vec
<int, 16> new_indices
;
5744 bool need_remapping
= false;
5745 clone_info
*cinfo
= clone_info::get (node
);
5746 if (cinfo
&& cinfo
->param_adjustments
)
5748 cinfo
->param_adjustments
->get_updated_indices (&new_indices
);
5749 need_remapping
= true;
5751 auto_vec
<tree
, 16> parm_decls
;
5752 push_function_arg_decls (&parm_decls
, node
->decl
);
5754 for (unsigned i
= 0; i
< count
; ++i
)
5760 if (i
>= new_indices
.length ())
5762 remapped_idx
= new_indices
[i
];
5763 if (remapped_idx
< 0)
5769 parm
= parm_decls
[remapped_idx
];
5771 gcc_checking_assert (parm
);
5772 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
), parm
);
5774 if (!ddef
|| !is_gimple_reg (parm
))
5777 if (vr
[i
].known_p ())
5780 vr
[i
].get_vrange (tmp
);
5782 if (!tmp
.undefined_p () && !tmp
.varying_p ())
5786 fprintf (dump_file
, "Setting value range of param %u "
5787 "(now %i) ", i
, remapped_idx
);
5788 tmp
.dump (dump_file
);
5789 fprintf (dump_file
, "]\n");
5791 set_range_info (ddef
, tmp
);
5793 if (POINTER_TYPE_P (TREE_TYPE (parm
))
5794 && opt_for_fn (node
->decl
, flag_ipa_bit_cp
))
5796 irange
&r
= as_a
<irange
> (tmp
);
5797 irange_bitmask bm
= r
.get_bitmask ();
5798 unsigned tem
= bm
.mask ().to_uhwi ();
5799 unsigned HOST_WIDE_INT bitpos
= bm
.value ().to_uhwi ();
5800 unsigned align
= tem
& -tem
;
5801 unsigned misalign
= bitpos
& (align
- 1);
5808 "Adjusting mask for param %u to ", i
);
5809 print_hex (bm
.mask (), dump_file
);
5810 fprintf (dump_file
, "\n");
5815 "Adjusting align: %u, misalign: %u\n",
5818 unsigned old_align
, old_misalign
;
5819 struct ptr_info_def
*pi
= get_ptr_info (ddef
);
5820 bool old_known
= get_ptr_info_alignment (pi
, &old_align
,
5823 if (old_known
&& old_align
> align
)
5828 "But alignment was already %u.\n",
5830 if ((old_misalign
& (align
- 1)) != misalign
)
5832 "old_misalign (%u) and misalign "
5834 old_misalign
, misalign
);
5841 && ((misalign
& (old_align
- 1)) != old_misalign
))
5843 "old_misalign (%u) and misalign (%u) "
5845 old_misalign
, misalign
);
5847 set_ptr_info_alignment (pi
, align
, misalign
);
5850 else if (dump_file
&& INTEGRAL_TYPE_P (TREE_TYPE (parm
)))
5852 irange
&r
= as_a
<irange
> (tmp
);
5853 irange_bitmask bm
= r
.get_bitmask ();
5854 unsigned prec
= TYPE_PRECISION (TREE_TYPE (parm
));
5855 if (wi::ne_p (bm
.mask (), wi::shwi (-1, prec
)))
5858 "Adjusting mask for param %u to ", i
);
5859 print_hex (bm
.mask (), dump_file
);
5860 fprintf (dump_file
, "\n");
5868 /* IPCP transformation phase doing propagation of aggregate values. */
5871 ipcp_transform_function (struct cgraph_node
*node
)
5873 struct ipa_func_body_info fbi
;
5876 gcc_checking_assert (cfun
);
5877 gcc_checking_assert (current_function_decl
);
5880 fprintf (dump_file
, "Modification phase of node %s\n",
5881 node
->dump_name ());
5883 ipcp_transformation
*ts
= ipcp_get_transformation_summary (node
);
5885 || (vec_safe_is_empty (ts
->m_agg_values
)
5886 && vec_safe_is_empty (ts
->m_vr
)))
5889 ts
->maybe_create_parm_idx_map (cfun
->decl
);
5890 ipcp_update_vr (node
, ts
);
5891 if (vec_safe_is_empty (ts
->m_agg_values
))
5893 param_count
= count_formal_params (node
->decl
);
5894 if (param_count
== 0)
5897 adjust_agg_replacement_values (node
, ts
);
5898 if (vec_safe_is_empty (ts
->m_agg_values
))
5901 fprintf (dump_file
, " All affected aggregate parameters were either "
5902 "removed or converted into scalars, phase done.\n");
5907 fprintf (dump_file
, " Aggregate replacements:");
5908 ipa_argagg_value_list
avs (ts
);
5909 avs
.dump (dump_file
);
5914 fbi
.bb_infos
= vNULL
;
5915 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
), true);
5916 fbi
.param_count
= param_count
;
5917 fbi
.aa_walk_budget
= opt_for_fn (node
->decl
, param_ipa_max_aa_steps
);
5919 vec
<ipa_param_descriptor
, va_gc
> *descriptors
= NULL
;
5920 vec_safe_grow_cleared (descriptors
, param_count
, true);
5921 ipa_populate_param_decls (node
, *descriptors
);
5922 bool modified_mem_access
= false;
5923 calculate_dominance_info (CDI_DOMINATORS
);
5924 ipcp_modif_dom_walker
walker (&fbi
, descriptors
, ts
, &modified_mem_access
);
5925 walker
.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5926 free_dominance_info (CDI_DOMINATORS
);
5927 bool cfg_changed
= walker
.cleanup_eh ();
5930 struct ipa_bb_info
*bi
;
5931 FOR_EACH_VEC_ELT (fbi
.bb_infos
, i
, bi
)
5932 free_ipa_bb_info (bi
);
5933 fbi
.bb_infos
.release ();
5935 ts
->remove_argaggs_if ([](const ipa_argagg_value
&v
)
5940 vec_free (descriptors
);
5942 delete_unreachable_blocks_update_callgraph (node
, false);
5944 return modified_mem_access
? TODO_update_ssa_only_virtuals
: 0;
5947 /* Record that current function return value range is VAL. */
5950 ipa_record_return_value_range (Value_Range val
)
5952 cgraph_node
*n
= cgraph_node::get (current_function_decl
);
5953 if (!ipa_return_value_sum
)
5955 if (!ipa_vr_hash_table
)
5956 ipa_vr_hash_table
= hash_table
<ipa_vr_ggc_hash_traits
>::create_ggc (37);
5957 ipa_return_value_sum
= new (ggc_alloc_no_dtor
<ipa_return_value_sum_t
> ())
5958 ipa_return_value_sum_t (symtab
, true);
5959 ipa_return_value_sum
->disable_insertion_hook ();
5961 ipa_return_value_sum
->get_create (n
)->vr
= ipa_get_value_range (val
);
5962 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5964 fprintf (dump_file
, "Recording return range ");
5965 val
.dump (dump_file
);
5966 fprintf (dump_file
, "\n");
5970 /* Return true if value range of DECL is known and if so initialize RANGE. */
5973 ipa_return_value_range (Value_Range
&range
, tree decl
)
5975 cgraph_node
*n
= cgraph_node::get (decl
);
5976 if (!n
|| !ipa_return_value_sum
)
5978 enum availability avail
;
5979 n
= n
->ultimate_alias_target (&avail
);
5980 if (avail
< AVAIL_AVAILABLE
)
5982 if (n
->decl
!= decl
&& !useless_type_conversion_p (TREE_TYPE (decl
), TREE_TYPE (n
->decl
)))
5984 ipa_return_value_summary
*v
= ipa_return_value_sum
->get (n
);
5987 v
->vr
->get_vrange (range
);
5991 /* Reset all state within ipa-prop.cc so that we can rerun the compiler
5992 within the same process. For use by toplev::finalize. */
5995 ipa_prop_cc_finalize (void)
5997 if (function_insertion_hook_holder
)
5998 symtab
->remove_cgraph_insertion_hook (function_insertion_hook_holder
);
5999 function_insertion_hook_holder
= NULL
;
6001 if (ipa_edge_args_sum
)
6002 ggc_delete (ipa_edge_args_sum
);
6003 ipa_edge_args_sum
= NULL
;
6005 if (ipa_node_params_sum
)
6006 ggc_delete (ipa_node_params_sum
);
6007 ipa_node_params_sum
= NULL
;
6010 #include "gt-ipa-prop.h"