1 /* Interprocedural analyses.
2 Copyright (C) 2005-2022 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
30 #include "tree-streamer.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-iterator.h"
35 #include "gimple-fold.h"
38 #include "stor-layout.h"
39 #include "print-tree.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
50 #include "ipa-utils.h"
54 #include "tree-cfgcleanup.h"
56 #include "symtab-clones.h"
57 #include "attr-fnspec.h"
58 #include "gimple-range.h"
60 /* Function summary where the parameter infos are actually stored. */
61 ipa_node_params_t
*ipa_node_params_sum
= NULL
;
63 function_summary
<ipcp_transformation
*> *ipcp_transformation_sum
= NULL
;
65 /* Edge summary for IPA-CP edge information. */
66 ipa_edge_args_sum_t
*ipa_edge_args_sum
;
68 /* Traits for a hash table for reusing already existing ipa_bits. */
70 struct ipa_bit_ggc_hash_traits
: public ggc_cache_remove
<ipa_bits
*>
72 typedef ipa_bits
*value_type
;
73 typedef ipa_bits
*compare_type
;
75 hash (const ipa_bits
*p
)
77 hashval_t t
= (hashval_t
) p
->value
.to_shwi ();
78 return iterative_hash_host_wide_int (p
->mask
.to_shwi (), t
);
81 equal (const ipa_bits
*a
, const ipa_bits
*b
)
83 return a
->value
== b
->value
&& a
->mask
== b
->mask
;
85 static const bool empty_zero_p
= true;
87 mark_empty (ipa_bits
*&p
)
92 is_empty (const ipa_bits
*p
)
97 is_deleted (const ipa_bits
*p
)
99 return p
== reinterpret_cast<const ipa_bits
*> (1);
102 mark_deleted (ipa_bits
*&p
)
104 p
= reinterpret_cast<ipa_bits
*> (1);
108 /* Hash table for avoid repeated allocations of equal ipa_bits. */
109 static GTY ((cache
)) hash_table
<ipa_bit_ggc_hash_traits
> *ipa_bits_hash_table
;
111 /* Traits for a hash table for reusing value_ranges used for IPA. Note that
112 the equiv bitmap is not hashed and is expected to be NULL. */
114 struct ipa_vr_ggc_hash_traits
: public ggc_cache_remove
<value_range
*>
116 typedef value_range
*value_type
;
117 typedef value_range
*compare_type
;
119 hash (const value_range
*p
)
121 inchash::hash
hstate (p
->kind ());
122 inchash::add_expr (p
->min (), hstate
);
123 inchash::add_expr (p
->max (), hstate
);
124 return hstate
.end ();
127 equal (const value_range
*a
, const value_range
*b
)
129 return (types_compatible_p (a
->type (), b
->type ())
132 static const bool empty_zero_p
= true;
134 mark_empty (value_range
*&p
)
139 is_empty (const value_range
*p
)
144 is_deleted (const value_range
*p
)
146 return p
== reinterpret_cast<const value_range
*> (1);
149 mark_deleted (value_range
*&p
)
151 p
= reinterpret_cast<value_range
*> (1);
155 /* Hash table for avoid repeated allocations of equal value_ranges. */
156 static GTY ((cache
)) hash_table
<ipa_vr_ggc_hash_traits
> *ipa_vr_hash_table
;
158 /* Holders of ipa cgraph hooks: */
159 static struct cgraph_node_hook_list
*function_insertion_hook_holder
;
161 /* Description of a reference to an IPA constant. */
162 struct ipa_cst_ref_desc
164 /* Edge that corresponds to the statement which took the reference. */
165 struct cgraph_edge
*cs
;
166 /* Linked list of duplicates created when call graph edges are cloned. */
167 struct ipa_cst_ref_desc
*next_duplicate
;
168 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
169 if out of control. */
173 /* Allocation pool for reference descriptions. */
175 static object_allocator
<ipa_cst_ref_desc
> ipa_refdesc_pool
176 ("IPA-PROP ref descriptions");
178 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
179 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
182 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node
*node
)
184 tree fs_opts
= DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node
->decl
);
188 return !opt_for_fn (node
->decl
, optimize
) || !opt_for_fn (node
->decl
, flag_ipa_cp
);
191 /* Return index of the formal whose tree is PTREE in function which corresponds
195 ipa_get_param_decl_index_1 (vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
200 count
= vec_safe_length (descriptors
);
201 for (i
= 0; i
< count
; i
++)
202 if ((*descriptors
)[i
].decl_or_type
== ptree
)
208 /* Return index of the formal whose tree is PTREE in function which corresponds
212 ipa_get_param_decl_index (class ipa_node_params
*info
, tree ptree
)
214 return ipa_get_param_decl_index_1 (info
->descriptors
, ptree
);
217 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
221 ipa_populate_param_decls (struct cgraph_node
*node
,
222 vec
<ipa_param_descriptor
, va_gc
> &descriptors
)
230 gcc_assert (gimple_has_body_p (fndecl
));
231 fnargs
= DECL_ARGUMENTS (fndecl
);
233 for (parm
= fnargs
; parm
; parm
= DECL_CHAIN (parm
))
235 descriptors
[param_num
].decl_or_type
= parm
;
236 unsigned int cost
= estimate_move_cost (TREE_TYPE (parm
), true);
237 descriptors
[param_num
].move_cost
= cost
;
238 /* Watch overflow, move_cost is a bitfield. */
239 gcc_checking_assert (cost
== descriptors
[param_num
].move_cost
);
244 /* Return how many formal parameters FNDECL has. */
247 count_formal_params (tree fndecl
)
251 gcc_assert (gimple_has_body_p (fndecl
));
253 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
259 /* Return the declaration of Ith formal parameter of the function corresponding
260 to INFO. Note there is no setter function as this array is built just once
261 using ipa_initialize_node_params. */
264 ipa_dump_param (FILE *file
, class ipa_node_params
*info
, int i
)
266 fprintf (file
, "param #%i", i
);
267 if ((*info
->descriptors
)[i
].decl_or_type
)
270 print_generic_expr (file
, (*info
->descriptors
)[i
].decl_or_type
);
274 /* If necessary, allocate vector of parameter descriptors in info of NODE.
275 Return true if they were allocated, false if not. */
278 ipa_alloc_node_params (struct cgraph_node
*node
, int param_count
)
280 ipa_node_params
*info
= ipa_node_params_sum
->get_create (node
);
282 if (!info
->descriptors
&& param_count
)
284 vec_safe_grow_cleared (info
->descriptors
, param_count
, true);
291 /* Initialize the ipa_node_params structure associated with NODE by counting
292 the function parameters, creating the descriptors and populating their
296 ipa_initialize_node_params (struct cgraph_node
*node
)
298 ipa_node_params
*info
= ipa_node_params_sum
->get_create (node
);
300 if (!info
->descriptors
301 && ipa_alloc_node_params (node
, count_formal_params (node
->decl
)))
302 ipa_populate_param_decls (node
, *info
->descriptors
);
305 /* Print the jump functions associated with call graph edge CS to file F. */
308 ipa_print_node_jump_functions_for_edge (FILE *f
, struct cgraph_edge
*cs
)
310 ipa_edge_args
*args
= ipa_edge_args_sum
->get (cs
);
311 int count
= ipa_get_cs_argument_count (args
);
313 for (int i
= 0; i
< count
; i
++)
315 struct ipa_jump_func
*jump_func
;
316 enum jump_func_type type
;
318 jump_func
= ipa_get_ith_jump_func (args
, i
);
319 type
= jump_func
->type
;
321 fprintf (f
, " param %d: ", i
);
322 if (type
== IPA_JF_UNKNOWN
)
323 fprintf (f
, "UNKNOWN\n");
324 else if (type
== IPA_JF_CONST
)
326 tree val
= jump_func
->value
.constant
.value
;
327 fprintf (f
, "CONST: ");
328 print_generic_expr (f
, val
);
329 if (TREE_CODE (val
) == ADDR_EXPR
330 && TREE_CODE (TREE_OPERAND (val
, 0)) == CONST_DECL
)
333 print_generic_expr (f
, DECL_INITIAL (TREE_OPERAND (val
, 0)));
337 else if (type
== IPA_JF_PASS_THROUGH
)
339 fprintf (f
, "PASS THROUGH: ");
340 fprintf (f
, "%d, op %s",
341 jump_func
->value
.pass_through
.formal_id
,
342 get_tree_code_name(jump_func
->value
.pass_through
.operation
));
343 if (jump_func
->value
.pass_through
.operation
!= NOP_EXPR
)
346 print_generic_expr (f
, jump_func
->value
.pass_through
.operand
);
348 if (jump_func
->value
.pass_through
.agg_preserved
)
349 fprintf (f
, ", agg_preserved");
352 else if (type
== IPA_JF_ANCESTOR
)
354 fprintf (f
, "ANCESTOR: ");
355 fprintf (f
, "%d, offset " HOST_WIDE_INT_PRINT_DEC
,
356 jump_func
->value
.ancestor
.formal_id
,
357 jump_func
->value
.ancestor
.offset
);
358 if (jump_func
->value
.ancestor
.agg_preserved
)
359 fprintf (f
, ", agg_preserved");
360 if (jump_func
->value
.ancestor
.keep_null
)
361 fprintf (f
, ", keep_null");
365 if (jump_func
->agg
.items
)
367 struct ipa_agg_jf_item
*item
;
370 fprintf (f
, " Aggregate passed by %s:\n",
371 jump_func
->agg
.by_ref
? "reference" : "value");
372 FOR_EACH_VEC_ELT (*jump_func
->agg
.items
, j
, item
)
374 fprintf (f
, " offset: " HOST_WIDE_INT_PRINT_DEC
", ",
376 fprintf (f
, "type: ");
377 print_generic_expr (f
, item
->type
);
379 if (item
->jftype
== IPA_JF_PASS_THROUGH
)
380 fprintf (f
, "PASS THROUGH: %d,",
381 item
->value
.pass_through
.formal_id
);
382 else if (item
->jftype
== IPA_JF_LOAD_AGG
)
384 fprintf (f
, "LOAD AGG: %d",
385 item
->value
.pass_through
.formal_id
);
386 fprintf (f
, " [offset: " HOST_WIDE_INT_PRINT_DEC
", by %s],",
387 item
->value
.load_agg
.offset
,
388 item
->value
.load_agg
.by_ref
? "reference"
392 if (item
->jftype
== IPA_JF_PASS_THROUGH
393 || item
->jftype
== IPA_JF_LOAD_AGG
)
395 fprintf (f
, " op %s",
396 get_tree_code_name (item
->value
.pass_through
.operation
));
397 if (item
->value
.pass_through
.operation
!= NOP_EXPR
)
400 print_generic_expr (f
, item
->value
.pass_through
.operand
);
403 else if (item
->jftype
== IPA_JF_CONST
)
405 fprintf (f
, "CONST: ");
406 print_generic_expr (f
, item
->value
.constant
);
408 else if (item
->jftype
== IPA_JF_UNKNOWN
)
409 fprintf (f
, "UNKNOWN: " HOST_WIDE_INT_PRINT_DEC
" bits",
410 tree_to_uhwi (TYPE_SIZE (item
->type
)));
415 class ipa_polymorphic_call_context
*ctx
416 = ipa_get_ith_polymorhic_call_context (args
, i
);
417 if (ctx
&& !ctx
->useless_p ())
419 fprintf (f
, " Context: ");
420 ctx
->dump (dump_file
);
425 fprintf (f
, " value: ");
426 print_hex (jump_func
->bits
->value
, f
);
427 fprintf (f
, ", mask: ");
428 print_hex (jump_func
->bits
->mask
, f
);
432 fprintf (f
, " Unknown bits\n");
438 (jump_func
->m_vr
->kind () == VR_ANTI_RANGE
) ? "~" : "");
439 print_decs (wi::to_wide (jump_func
->m_vr
->min ()), f
);
441 print_decs (wi::to_wide (jump_func
->m_vr
->max ()), f
);
445 fprintf (f
, " Unknown VR\n");
450 /* Print the jump functions of all arguments on all call graph edges going from
454 ipa_print_node_jump_functions (FILE *f
, struct cgraph_node
*node
)
456 struct cgraph_edge
*cs
;
458 fprintf (f
, " Jump functions of caller %s:\n", node
->dump_name ());
459 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
462 fprintf (f
, " callsite %s -> %s : \n",
464 cs
->callee
->dump_name ());
465 if (!ipa_edge_args_info_available_for_edge_p (cs
))
466 fprintf (f
, " no arg info\n");
468 ipa_print_node_jump_functions_for_edge (f
, cs
);
471 for (cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
473 class cgraph_indirect_call_info
*ii
;
475 ii
= cs
->indirect_info
;
476 if (ii
->agg_contents
)
477 fprintf (f
, " indirect %s callsite, calling param %i, "
478 "offset " HOST_WIDE_INT_PRINT_DEC
", %s",
479 ii
->member_ptr
? "member ptr" : "aggregate",
480 ii
->param_index
, ii
->offset
,
481 ii
->by_ref
? "by reference" : "by_value");
483 fprintf (f
, " indirect %s callsite, calling param %i, "
484 "offset " HOST_WIDE_INT_PRINT_DEC
,
485 ii
->polymorphic
? "polymorphic" : "simple", ii
->param_index
,
490 fprintf (f
, ", for stmt ");
491 print_gimple_stmt (f
, cs
->call_stmt
, 0, TDF_SLIM
);
496 ii
->context
.dump (f
);
497 if (!ipa_edge_args_info_available_for_edge_p (cs
))
498 fprintf (f
, " no arg info\n");
500 ipa_print_node_jump_functions_for_edge (f
, cs
);
504 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
507 ipa_print_all_jump_functions (FILE *f
)
509 struct cgraph_node
*node
;
511 fprintf (f
, "\nJump functions:\n");
512 FOR_EACH_FUNCTION (node
)
514 ipa_print_node_jump_functions (f
, node
);
518 /* Set jfunc to be a know-really nothing jump function. */
521 ipa_set_jf_unknown (struct ipa_jump_func
*jfunc
)
523 jfunc
->type
= IPA_JF_UNKNOWN
;
526 /* Set JFUNC to be a copy of another jmp (to be used by jump function
527 combination code). The two functions will share their rdesc. */
530 ipa_set_jf_cst_copy (struct ipa_jump_func
*dst
,
531 struct ipa_jump_func
*src
)
534 gcc_checking_assert (src
->type
== IPA_JF_CONST
);
535 dst
->type
= IPA_JF_CONST
;
536 dst
->value
.constant
= src
->value
.constant
;
539 /* Set JFUNC to be a constant jmp function. */
542 ipa_set_jf_constant (struct ipa_jump_func
*jfunc
, tree constant
,
543 struct cgraph_edge
*cs
)
545 jfunc
->type
= IPA_JF_CONST
;
546 jfunc
->value
.constant
.value
= unshare_expr_without_location (constant
);
548 if (TREE_CODE (constant
) == ADDR_EXPR
549 && (TREE_CODE (TREE_OPERAND (constant
, 0)) == FUNCTION_DECL
550 || (TREE_CODE (TREE_OPERAND (constant
, 0)) == VAR_DECL
551 && TREE_STATIC (TREE_OPERAND (constant
, 0)))))
553 struct ipa_cst_ref_desc
*rdesc
;
555 rdesc
= ipa_refdesc_pool
.allocate ();
557 rdesc
->next_duplicate
= NULL
;
559 jfunc
->value
.constant
.rdesc
= rdesc
;
562 jfunc
->value
.constant
.rdesc
= NULL
;
565 /* Set JFUNC to be a simple pass-through jump function. */
567 ipa_set_jf_simple_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
570 jfunc
->type
= IPA_JF_PASS_THROUGH
;
571 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
572 jfunc
->value
.pass_through
.formal_id
= formal_id
;
573 jfunc
->value
.pass_through
.operation
= NOP_EXPR
;
574 jfunc
->value
.pass_through
.agg_preserved
= agg_preserved
;
577 /* Set JFUNC to be an unary pass through jump function. */
580 ipa_set_jf_unary_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
581 enum tree_code operation
)
583 jfunc
->type
= IPA_JF_PASS_THROUGH
;
584 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
585 jfunc
->value
.pass_through
.formal_id
= formal_id
;
586 jfunc
->value
.pass_through
.operation
= operation
;
587 jfunc
->value
.pass_through
.agg_preserved
= false;
589 /* Set JFUNC to be an arithmetic pass through jump function. */
592 ipa_set_jf_arith_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
593 tree operand
, enum tree_code operation
)
595 jfunc
->type
= IPA_JF_PASS_THROUGH
;
596 jfunc
->value
.pass_through
.operand
= unshare_expr_without_location (operand
);
597 jfunc
->value
.pass_through
.formal_id
= formal_id
;
598 jfunc
->value
.pass_through
.operation
= operation
;
599 jfunc
->value
.pass_through
.agg_preserved
= false;
602 /* Set JFUNC to be an ancestor jump function. */
605 ipa_set_ancestor_jf (struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
,
606 int formal_id
, bool agg_preserved
, bool keep_null
)
608 jfunc
->type
= IPA_JF_ANCESTOR
;
609 jfunc
->value
.ancestor
.formal_id
= formal_id
;
610 jfunc
->value
.ancestor
.offset
= offset
;
611 jfunc
->value
.ancestor
.agg_preserved
= agg_preserved
;
612 jfunc
->value
.ancestor
.keep_null
= keep_null
;
615 /* Get IPA BB information about the given BB. FBI is the context of analyzis
616 of this function body. */
618 static struct ipa_bb_info
*
619 ipa_get_bb_info (struct ipa_func_body_info
*fbi
, basic_block bb
)
621 gcc_checking_assert (fbi
);
622 return &fbi
->bb_infos
[bb
->index
];
625 /* Structure to be passed in between detect_type_change and
626 check_stmt_for_type_change. */
628 struct prop_type_change_info
630 /* Offset into the object where there is the virtual method pointer we are
632 HOST_WIDE_INT offset
;
633 /* The declaration or SSA_NAME pointer of the base that we are checking for
636 /* Set to true if dynamic type change has been detected. */
637 bool type_maybe_changed
;
640 /* Return true if STMT can modify a virtual method table pointer.
642 This function makes special assumptions about both constructors and
643 destructors which are all the functions that are allowed to alter the VMT
644 pointers. It assumes that destructors begin with assignment into all VMT
645 pointers and that constructors essentially look in the following way:
647 1) The very first thing they do is that they call constructors of ancestor
648 sub-objects that have them.
650 2) Then VMT pointers of this and all its ancestors is set to new values
651 corresponding to the type corresponding to the constructor.
653 3) Only afterwards, other stuff such as constructor of member sub-objects
654 and the code written by the user is run. Only this may include calling
655 virtual functions, directly or indirectly.
657 There is no way to call a constructor of an ancestor sub-object in any
660 This means that we do not have to care whether constructors get the correct
661 type information because they will always change it (in fact, if we define
662 the type to be given by the VMT pointer, it is undefined).
664 The most important fact to derive from the above is that if, for some
665 statement in the section 3, we try to detect whether the dynamic type has
666 changed, we can safely ignore all calls as we examine the function body
667 backwards until we reach statements in section 2 because these calls cannot
668 be ancestor constructors or destructors (if the input is not bogus) and so
669 do not change the dynamic type (this holds true only for automatically
670 allocated objects but at the moment we devirtualize only these). We then
671 must detect that statements in section 2 change the dynamic type and can try
672 to derive the new type. That is enough and we can stop, we will never see
673 the calls into constructors of sub-objects in this code. Therefore we can
674 safely ignore all call statements that we traverse.
678 stmt_may_be_vtbl_ptr_store (gimple
*stmt
)
680 if (is_gimple_call (stmt
))
682 if (gimple_clobber_p (stmt
))
684 else if (is_gimple_assign (stmt
))
686 tree lhs
= gimple_assign_lhs (stmt
);
688 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs
)))
690 if (flag_strict_aliasing
691 && !POINTER_TYPE_P (TREE_TYPE (lhs
)))
694 if (TREE_CODE (lhs
) == COMPONENT_REF
695 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
697 /* In the future we might want to use get_ref_base_and_extent to find
698 if there is a field corresponding to the offset and if so, proceed
699 almost like if it was a component ref. */
705 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
706 to check whether a particular statement may modify the virtual table
707 pointerIt stores its result into DATA, which points to a
708 prop_type_change_info structure. */
711 check_stmt_for_type_change (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef
, void *data
)
713 gimple
*stmt
= SSA_NAME_DEF_STMT (vdef
);
714 struct prop_type_change_info
*tci
= (struct prop_type_change_info
*) data
;
716 if (stmt_may_be_vtbl_ptr_store (stmt
))
718 tci
->type_maybe_changed
= true;
725 /* See if ARG is PARAM_DECl describing instance passed by pointer
726 or reference in FUNCTION. Return false if the dynamic type may change
727 in between beggining of the function until CALL is invoked.
729 Generally functions are not allowed to change type of such instances,
730 but they call destructors. We assume that methods cannot destroy the THIS
731 pointer. Also as a special cases, constructor and destructors may change
732 type of the THIS pointer. */
735 param_type_may_change_p (tree function
, tree arg
, gimple
*call
)
737 /* Pure functions cannot do any changes on the dynamic type;
738 that require writting to memory. */
739 if (flags_from_decl_or_type (function
) & (ECF_PURE
| ECF_CONST
))
741 /* We need to check if we are within inlined consturctor
742 or destructor (ideally we would have way to check that the
743 inline cdtor is actually working on ARG, but we don't have
744 easy tie on this, so punt on all non-pure cdtors.
745 We may also record the types of cdtors and once we know type
746 of the instance match them.
748 Also code unification optimizations may merge calls from
749 different blocks making return values unreliable. So
750 do nothing during late optimization. */
751 if (DECL_STRUCT_FUNCTION (function
)->after_inlining
)
753 if (TREE_CODE (arg
) == SSA_NAME
754 && SSA_NAME_IS_DEFAULT_DEF (arg
)
755 && TREE_CODE (SSA_NAME_VAR (arg
)) == PARM_DECL
)
757 /* Normal (non-THIS) argument. */
758 if ((SSA_NAME_VAR (arg
) != DECL_ARGUMENTS (function
)
759 || TREE_CODE (TREE_TYPE (function
)) != METHOD_TYPE
)
760 /* THIS pointer of an method - here we want to watch constructors
761 and destructors as those definitely may change the dynamic
763 || (TREE_CODE (TREE_TYPE (function
)) == METHOD_TYPE
764 && !DECL_CXX_CONSTRUCTOR_P (function
)
765 && !DECL_CXX_DESTRUCTOR_P (function
)
766 && (SSA_NAME_VAR (arg
) == DECL_ARGUMENTS (function
))))
768 /* Walk the inline stack and watch out for ctors/dtors. */
769 for (tree block
= gimple_block (call
); block
&& TREE_CODE (block
) == BLOCK
;
770 block
= BLOCK_SUPERCONTEXT (block
))
771 if (inlined_polymorphic_ctor_dtor_block_p (block
, false))
779 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
780 callsite CALL) by looking for assignments to its virtual table pointer. If
781 it is, return true. ARG is the object itself (not a pointer
782 to it, unless dereferenced). BASE is the base of the memory access as
783 returned by get_ref_base_and_extent, as is the offset.
785 This is helper function for detect_type_change and detect_type_change_ssa
786 that does the heavy work which is usually unnecesary. */
789 detect_type_change_from_memory_writes (ipa_func_body_info
*fbi
, tree arg
,
790 tree base
, tree comp_type
, gcall
*call
,
791 HOST_WIDE_INT offset
)
793 struct prop_type_change_info tci
;
796 gcc_checking_assert (DECL_P (arg
)
797 || TREE_CODE (arg
) == MEM_REF
798 || handled_component_p (arg
));
800 comp_type
= TYPE_MAIN_VARIANT (comp_type
);
802 /* Const calls cannot call virtual methods through VMT and so type changes do
804 if (!flag_devirtualize
|| !gimple_vuse (call
)
805 /* Be sure expected_type is polymorphic. */
807 || TREE_CODE (comp_type
) != RECORD_TYPE
808 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))
809 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))))
812 if (fbi
->aa_walk_budget
== 0)
815 ao_ref_init (&ao
, arg
);
818 ao
.size
= POINTER_SIZE
;
819 ao
.max_size
= ao
.size
;
822 tci
.object
= get_base_address (arg
);
823 tci
.type_maybe_changed
= false;
826 = walk_aliased_vdefs (&ao
, gimple_vuse (call
), check_stmt_for_type_change
,
827 &tci
, NULL
, NULL
, fbi
->aa_walk_budget
);
829 fbi
->aa_walk_budget
-= walked
;
831 fbi
->aa_walk_budget
= 0;
833 if (walked
>= 0 && !tci
.type_maybe_changed
)
839 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
840 If it is, return true. ARG is the object itself (not a pointer
841 to it, unless dereferenced). BASE is the base of the memory access as
842 returned by get_ref_base_and_extent, as is the offset. */
845 detect_type_change (ipa_func_body_info
*fbi
, tree arg
, tree base
,
846 tree comp_type
, gcall
*call
,
847 HOST_WIDE_INT offset
)
849 if (!flag_devirtualize
)
852 if (TREE_CODE (base
) == MEM_REF
853 && !param_type_may_change_p (current_function_decl
,
854 TREE_OPERAND (base
, 0),
857 return detect_type_change_from_memory_writes (fbi
, arg
, base
, comp_type
,
861 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
862 SSA name (its dereference will become the base and the offset is assumed to
866 detect_type_change_ssa (ipa_func_body_info
*fbi
, tree arg
, tree comp_type
,
869 gcc_checking_assert (TREE_CODE (arg
) == SSA_NAME
);
870 if (!flag_devirtualize
871 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
874 if (!param_type_may_change_p (current_function_decl
, arg
, call
))
877 arg
= build2 (MEM_REF
, ptr_type_node
, arg
,
878 build_int_cst (ptr_type_node
, 0));
880 return detect_type_change_from_memory_writes (fbi
, arg
, arg
, comp_type
,
884 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
885 boolean variable pointed to by DATA. */
888 mark_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
891 bool *b
= (bool *) data
;
896 /* Find the nearest valid aa status for parameter specified by INDEX that
899 static struct ipa_param_aa_status
*
900 find_dominating_aa_status (struct ipa_func_body_info
*fbi
, basic_block bb
,
905 bb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
908 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
909 if (!bi
->param_aa_statuses
.is_empty ()
910 && bi
->param_aa_statuses
[index
].valid
)
911 return &bi
->param_aa_statuses
[index
];
915 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
916 structures and/or intialize the result with a dominating description as
919 static struct ipa_param_aa_status
*
920 parm_bb_aa_status_for_bb (struct ipa_func_body_info
*fbi
, basic_block bb
,
923 gcc_checking_assert (fbi
);
924 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
925 if (bi
->param_aa_statuses
.is_empty ())
926 bi
->param_aa_statuses
.safe_grow_cleared (fbi
->param_count
, true);
927 struct ipa_param_aa_status
*paa
= &bi
->param_aa_statuses
[index
];
930 gcc_checking_assert (!paa
->parm_modified
931 && !paa
->ref_modified
932 && !paa
->pt_modified
);
933 struct ipa_param_aa_status
*dom_paa
;
934 dom_paa
= find_dominating_aa_status (fbi
, bb
, index
);
944 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
945 a value known not to be modified in this function before reaching the
946 statement STMT. FBI holds information about the function we have so far
947 gathered but do not survive the summary building stage. */
950 parm_preserved_before_stmt_p (struct ipa_func_body_info
*fbi
, int index
,
951 gimple
*stmt
, tree parm_load
)
953 struct ipa_param_aa_status
*paa
;
954 bool modified
= false;
957 tree base
= get_base_address (parm_load
);
958 gcc_assert (TREE_CODE (base
) == PARM_DECL
);
959 if (TREE_READONLY (base
))
962 gcc_checking_assert (fbi
);
963 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
964 if (paa
->parm_modified
|| fbi
->aa_walk_budget
== 0)
967 gcc_checking_assert (gimple_vuse (stmt
) != NULL_TREE
);
968 ao_ref_init (&refd
, parm_load
);
969 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
970 &modified
, NULL
, NULL
,
971 fbi
->aa_walk_budget
);
975 fbi
->aa_walk_budget
= 0;
978 fbi
->aa_walk_budget
-= walked
;
980 paa
->parm_modified
= true;
984 /* If STMT is an assignment that loads a value from an parameter declaration,
985 return the index of the parameter in ipa_node_params which has not been
986 modified. Otherwise return -1. */
989 load_from_unmodified_param (struct ipa_func_body_info
*fbi
,
990 vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
996 if (!gimple_assign_single_p (stmt
))
999 op1
= gimple_assign_rhs1 (stmt
);
1000 if (TREE_CODE (op1
) != PARM_DECL
)
1003 index
= ipa_get_param_decl_index_1 (descriptors
, op1
);
1005 || !parm_preserved_before_stmt_p (fbi
, index
, stmt
, op1
))
1011 /* Return true if memory reference REF (which must be a load through parameter
1012 with INDEX) loads data that are known to be unmodified in this function
1013 before reaching statement STMT. */
1016 parm_ref_data_preserved_p (struct ipa_func_body_info
*fbi
,
1017 int index
, gimple
*stmt
, tree ref
)
1019 struct ipa_param_aa_status
*paa
;
1020 bool modified
= false;
1023 gcc_checking_assert (fbi
);
1024 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
1025 if (paa
->ref_modified
|| fbi
->aa_walk_budget
== 0)
1028 gcc_checking_assert (gimple_vuse (stmt
));
1029 ao_ref_init (&refd
, ref
);
1030 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
1031 &modified
, NULL
, NULL
,
1032 fbi
->aa_walk_budget
);
1036 fbi
->aa_walk_budget
= 0;
1039 fbi
->aa_walk_budget
-= walked
;
1041 paa
->ref_modified
= true;
1045 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1046 is known to be unmodified in this function before reaching call statement
1047 CALL into which it is passed. FBI describes the function body. */
1050 parm_ref_data_pass_through_p (struct ipa_func_body_info
*fbi
, int index
,
1051 gimple
*call
, tree parm
)
1053 bool modified
= false;
1056 /* It's unnecessary to calculate anything about memory contnets for a const
1057 function because it is not goin to use it. But do not cache the result
1058 either. Also, no such calculations for non-pointers. */
1059 if (!gimple_vuse (call
)
1060 || !POINTER_TYPE_P (TREE_TYPE (parm
)))
1063 struct ipa_param_aa_status
*paa
= parm_bb_aa_status_for_bb (fbi
,
1066 if (paa
->pt_modified
|| fbi
->aa_walk_budget
== 0)
1069 ao_ref_init_from_ptr_and_size (&refd
, parm
, NULL_TREE
);
1070 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (call
), mark_modified
,
1071 &modified
, NULL
, NULL
,
1072 fbi
->aa_walk_budget
);
1075 fbi
->aa_walk_budget
= 0;
1079 fbi
->aa_walk_budget
-= walked
;
1081 paa
->pt_modified
= true;
1085 /* Return true if we can prove that OP is a memory reference loading
1086 data from an aggregate passed as a parameter.
1088 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1089 false if it cannot prove that the value has not been modified before the
1090 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1091 if it cannot prove the value has not been modified, in that case it will
1092 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1094 INFO and PARMS_AINFO describe parameters of the current function (but the
1095 latter can be NULL), STMT is the load statement. If function returns true,
1096 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1097 within the aggregate and whether it is a load from a value passed by
1098 reference respectively.
1100 Return false if the offset divided by BITS_PER_UNIT would not fit into an
1104 ipa_load_from_parm_agg (struct ipa_func_body_info
*fbi
,
1105 vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
1106 gimple
*stmt
, tree op
, int *index_p
,
1107 HOST_WIDE_INT
*offset_p
, poly_int64
*size_p
,
1108 bool *by_ref_p
, bool *guaranteed_unmodified
)
1113 tree base
= get_ref_base_and_extent_hwi (op
, offset_p
, &size
, &reverse
);
1116 || (*offset_p
/ BITS_PER_UNIT
) > UINT_MAX
)
1119 /* We can not propagate across volatile loads. */
1120 if (TREE_THIS_VOLATILE (op
))
1125 int index
= ipa_get_param_decl_index_1 (descriptors
, base
);
1127 && parm_preserved_before_stmt_p (fbi
, index
, stmt
, op
))
1133 if (guaranteed_unmodified
)
1134 *guaranteed_unmodified
= true;
1140 if (TREE_CODE (base
) != MEM_REF
1141 || TREE_CODE (TREE_OPERAND (base
, 0)) != SSA_NAME
1142 || !integer_zerop (TREE_OPERAND (base
, 1)))
1145 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base
, 0)))
1147 tree parm
= SSA_NAME_VAR (TREE_OPERAND (base
, 0));
1148 index
= ipa_get_param_decl_index_1 (descriptors
, parm
);
1152 /* This branch catches situations where a pointer parameter is not a
1153 gimple register, for example:
1155 void hip7(S*) (struct S * p)
1157 void (*<T2e4>) (struct S *) D.1867;
1162 D.1867_2 = p.1_1->f;
1167 gimple
*def
= SSA_NAME_DEF_STMT (TREE_OPERAND (base
, 0));
1168 index
= load_from_unmodified_param (fbi
, descriptors
, def
);
1173 bool data_preserved
= parm_ref_data_preserved_p (fbi
, index
, stmt
, op
);
1174 if (!data_preserved
&& !guaranteed_unmodified
)
1181 if (guaranteed_unmodified
)
1182 *guaranteed_unmodified
= data_preserved
;
1188 /* If STMT is an assignment that loads a value from a parameter declaration,
1189 or from an aggregate passed as the parameter either by value or reference,
1190 return the index of the parameter in ipa_node_params. Otherwise return -1.
1192 FBI holds gathered information about the function. INFO describes
1193 parameters of the function, STMT is the assignment statement. If it is a
1194 memory load from an aggregate, *OFFSET_P is filled with offset within the
1195 aggregate, and *BY_REF_P specifies whether the aggregate is passed by
1199 load_from_unmodified_param_or_agg (struct ipa_func_body_info
*fbi
,
1200 class ipa_node_params
*info
,
1202 HOST_WIDE_INT
*offset_p
,
1205 int index
= load_from_unmodified_param (fbi
, info
->descriptors
, stmt
);
1208 /* Load value from a parameter declaration. */
1215 if (!gimple_assign_load_p (stmt
))
1218 tree rhs
= gimple_assign_rhs1 (stmt
);
1220 /* Skip memory reference containing VIEW_CONVERT_EXPR. */
1221 for (tree t
= rhs
; handled_component_p (t
); t
= TREE_OPERAND (t
, 0))
1222 if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
1225 /* Skip memory reference containing bit-field. */
1226 if (TREE_CODE (rhs
) == BIT_FIELD_REF
1227 || contains_bitfld_component_ref_p (rhs
))
1230 if (!ipa_load_from_parm_agg (fbi
, info
->descriptors
, stmt
, rhs
, &index
,
1231 offset_p
, &size
, by_ref_p
))
1234 gcc_assert (!maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (rhs
))),
1238 tree param_type
= ipa_get_type (info
, index
);
1240 if (!param_type
|| !AGGREGATE_TYPE_P (param_type
))
1243 else if (TREE_THIS_VOLATILE (rhs
))
1249 /* Walk pointer adjustemnts from OP (such as POINTER_PLUS and ADDR_EXPR)
1250 to find original pointer. Initialize RET to the pointer which results from
1252 If offset is known return true and initialize OFFSET_RET. */
1255 unadjusted_ptr_and_unit_offset (tree op
, tree
*ret
, poly_int64
*offset_ret
)
1257 poly_int64 offset
= 0;
1258 bool offset_known
= true;
1261 for (i
= 0; i
< param_ipa_jump_function_lookups
; i
++)
1263 if (TREE_CODE (op
) == ADDR_EXPR
)
1265 poly_int64 extra_offset
= 0;
1266 tree base
= get_addr_base_and_unit_offset (TREE_OPERAND (op
, 0),
1270 base
= get_base_address (TREE_OPERAND (op
, 0));
1271 if (TREE_CODE (base
) != MEM_REF
)
1273 offset_known
= false;
1277 if (TREE_CODE (base
) != MEM_REF
)
1279 offset
+= extra_offset
;
1281 op
= TREE_OPERAND (base
, 0);
1282 if (mem_ref_offset (base
).to_shwi (&extra_offset
))
1283 offset
+= extra_offset
;
1285 offset_known
= false;
1287 else if (TREE_CODE (op
) == SSA_NAME
1288 && !SSA_NAME_IS_DEFAULT_DEF (op
))
1290 gimple
*pstmt
= SSA_NAME_DEF_STMT (op
);
1292 if (gimple_assign_single_p (pstmt
))
1293 op
= gimple_assign_rhs1 (pstmt
);
1294 else if (is_gimple_assign (pstmt
)
1295 && gimple_assign_rhs_code (pstmt
) == POINTER_PLUS_EXPR
)
1297 poly_int64 extra_offset
= 0;
1298 if (ptrdiff_tree_p (gimple_assign_rhs2 (pstmt
),
1300 offset
+= extra_offset
;
1302 offset_known
= false;
1303 op
= gimple_assign_rhs1 (pstmt
);
1312 *offset_ret
= offset
;
1313 return offset_known
;
1316 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1317 of an assignment statement STMT, try to determine whether we are actually
1318 handling any of the following cases and construct an appropriate jump
1319 function into JFUNC if so:
1321 1) The passed value is loaded from a formal parameter which is not a gimple
1322 register (most probably because it is addressable, the value has to be
1323 scalar) and we can guarantee the value has not changed. This case can
1324 therefore be described by a simple pass-through jump function. For example:
1333 2) The passed value can be described by a simple arithmetic pass-through
1340 D.2064_4 = a.1(D) + 4;
1343 This case can also occur in combination of the previous one, e.g.:
1351 D.2064_4 = a.0_3 + 4;
1354 3) The passed value is an address of an object within another one (which
1355 also passed by reference). Such situations are described by an ancestor
1356 jump function and describe situations such as:
1358 B::foo() (struct B * const this)
1362 D.1845_2 = &this_1(D)->D.1748;
1365 INFO is the structure describing individual parameters access different
1366 stages of IPA optimizations. PARMS_AINFO contains the information that is
1367 only needed for intraprocedural analysis. */
1370 compute_complex_assign_jump_func (struct ipa_func_body_info
*fbi
,
1371 class ipa_node_params
*info
,
1372 struct ipa_jump_func
*jfunc
,
1373 gcall
*call
, gimple
*stmt
, tree name
,
1376 HOST_WIDE_INT offset
, size
;
1377 tree op1
, tc_ssa
, base
, ssa
;
1381 op1
= gimple_assign_rhs1 (stmt
);
1383 if (TREE_CODE (op1
) == SSA_NAME
)
1385 if (SSA_NAME_IS_DEFAULT_DEF (op1
))
1386 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (op1
));
1388 index
= load_from_unmodified_param (fbi
, info
->descriptors
,
1389 SSA_NAME_DEF_STMT (op1
));
1394 index
= load_from_unmodified_param (fbi
, info
->descriptors
, stmt
);
1395 tc_ssa
= gimple_assign_lhs (stmt
);
1400 switch (gimple_assign_rhs_class (stmt
))
1402 case GIMPLE_BINARY_RHS
:
1404 tree op2
= gimple_assign_rhs2 (stmt
);
1405 if (!is_gimple_ip_invariant (op2
)
1406 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt
))
1408 && !useless_type_conversion_p (TREE_TYPE (name
),
1412 ipa_set_jf_arith_pass_through (jfunc
, index
, op2
,
1413 gimple_assign_rhs_code (stmt
));
1416 case GIMPLE_SINGLE_RHS
:
1418 bool agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
,
1420 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1423 case GIMPLE_UNARY_RHS
:
1424 if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
)))
1425 ipa_set_jf_unary_pass_through (jfunc
, index
,
1426 gimple_assign_rhs_code (stmt
));
1432 if (TREE_CODE (op1
) != ADDR_EXPR
)
1434 op1
= TREE_OPERAND (op1
, 0);
1435 base
= get_ref_base_and_extent_hwi (op1
, &offset
, &size
, &reverse
);
1436 offset_int mem_offset
;
1438 || TREE_CODE (base
) != MEM_REF
1439 || !mem_ref_offset (base
).is_constant (&mem_offset
))
1441 offset
+= mem_offset
.to_short_addr () * BITS_PER_UNIT
;
1442 ssa
= TREE_OPERAND (base
, 0);
1443 if (TREE_CODE (ssa
) != SSA_NAME
1444 || !SSA_NAME_IS_DEFAULT_DEF (ssa
)
1448 /* Dynamic types are changed in constructors and destructors. */
1449 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (ssa
));
1450 if (index
>= 0 && param_type
&& POINTER_TYPE_P (param_type
))
1451 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1452 parm_ref_data_pass_through_p (fbi
, index
, call
, ssa
),
1456 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1459 iftmp.1_3 = &obj_2(D)->D.1762;
1461 The base of the MEM_REF must be a default definition SSA NAME of a
1462 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1463 whole MEM_REF expression is returned and the offset calculated from any
1464 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1465 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1468 get_ancestor_addr_info (gimple
*assign
, tree
*obj_p
, HOST_WIDE_INT
*offset
)
1471 tree expr
, parm
, obj
;
1474 if (!gimple_assign_single_p (assign
))
1476 expr
= gimple_assign_rhs1 (assign
);
1478 if (TREE_CODE (expr
) != ADDR_EXPR
)
1480 expr
= TREE_OPERAND (expr
, 0);
1482 expr
= get_ref_base_and_extent_hwi (expr
, offset
, &size
, &reverse
);
1484 offset_int mem_offset
;
1486 || TREE_CODE (expr
) != MEM_REF
1487 || !mem_ref_offset (expr
).is_constant (&mem_offset
))
1489 parm
= TREE_OPERAND (expr
, 0);
1490 if (TREE_CODE (parm
) != SSA_NAME
1491 || !SSA_NAME_IS_DEFAULT_DEF (parm
)
1492 || TREE_CODE (SSA_NAME_VAR (parm
)) != PARM_DECL
)
1495 *offset
+= mem_offset
.to_short_addr () * BITS_PER_UNIT
;
1501 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1502 statement PHI, try to find out whether NAME is in fact a
1503 multiple-inheritance typecast from a descendant into an ancestor of a formal
1504 parameter and thus can be described by an ancestor jump function and if so,
1505 write the appropriate function into JFUNC.
1507 Essentially we want to match the following pattern:
1515 iftmp.1_3 = &obj_2(D)->D.1762;
1518 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1519 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1523 compute_complex_ancestor_jump_func (struct ipa_func_body_info
*fbi
,
1524 class ipa_node_params
*info
,
1525 struct ipa_jump_func
*jfunc
,
1526 gcall
*call
, gphi
*phi
)
1528 HOST_WIDE_INT offset
;
1529 gimple
*assign
, *cond
;
1530 basic_block phi_bb
, assign_bb
, cond_bb
;
1531 tree tmp
, parm
, expr
, obj
;
1534 if (gimple_phi_num_args (phi
) != 2)
1537 if (integer_zerop (PHI_ARG_DEF (phi
, 1)))
1538 tmp
= PHI_ARG_DEF (phi
, 0);
1539 else if (integer_zerop (PHI_ARG_DEF (phi
, 0)))
1540 tmp
= PHI_ARG_DEF (phi
, 1);
1543 if (TREE_CODE (tmp
) != SSA_NAME
1544 || SSA_NAME_IS_DEFAULT_DEF (tmp
)
1545 || !POINTER_TYPE_P (TREE_TYPE (tmp
))
1546 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp
))) != RECORD_TYPE
)
1549 assign
= SSA_NAME_DEF_STMT (tmp
);
1550 assign_bb
= gimple_bb (assign
);
1551 if (!single_pred_p (assign_bb
))
1553 expr
= get_ancestor_addr_info (assign
, &obj
, &offset
);
1556 parm
= TREE_OPERAND (expr
, 0);
1557 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (parm
));
1561 cond_bb
= single_pred (assign_bb
);
1562 cond
= last_stmt (cond_bb
);
1564 || gimple_code (cond
) != GIMPLE_COND
1565 || gimple_cond_code (cond
) != NE_EXPR
1566 || gimple_cond_lhs (cond
) != parm
1567 || !integer_zerop (gimple_cond_rhs (cond
)))
1570 phi_bb
= gimple_bb (phi
);
1571 for (i
= 0; i
< 2; i
++)
1573 basic_block pred
= EDGE_PRED (phi_bb
, i
)->src
;
1574 if (pred
!= assign_bb
&& pred
!= cond_bb
)
1578 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1579 parm_ref_data_pass_through_p (fbi
, index
, call
, parm
),
1583 /* Inspect the given TYPE and return true iff it has the same structure (the
1584 same number of fields of the same types) as a C++ member pointer. If
1585 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1586 corresponding fields there. */
1589 type_like_member_ptr_p (tree type
, tree
*method_ptr
, tree
*delta
)
1593 if (TREE_CODE (type
) != RECORD_TYPE
)
1596 fld
= TYPE_FIELDS (type
);
1597 if (!fld
|| !POINTER_TYPE_P (TREE_TYPE (fld
))
1598 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld
))) != METHOD_TYPE
1599 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1605 fld
= DECL_CHAIN (fld
);
1606 if (!fld
|| INTEGRAL_TYPE_P (fld
)
1607 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1612 if (DECL_CHAIN (fld
))
1618 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1619 return the rhs of its defining statement, and this statement is stored in
1620 *RHS_STMT. Otherwise return RHS as it is. */
1623 get_ssa_def_if_simple_copy (tree rhs
, gimple
**rhs_stmt
)
1625 while (TREE_CODE (rhs
) == SSA_NAME
&& !SSA_NAME_IS_DEFAULT_DEF (rhs
))
1627 gimple
*def_stmt
= SSA_NAME_DEF_STMT (rhs
);
1629 if (gimple_assign_single_p (def_stmt
))
1630 rhs
= gimple_assign_rhs1 (def_stmt
);
1633 *rhs_stmt
= def_stmt
;
1638 /* Simple linked list, describing contents of an aggregate before call. */
1640 struct ipa_known_agg_contents_list
1642 /* Offset and size of the described part of the aggregate. */
1643 HOST_WIDE_INT offset
, size
;
1645 /* Type of the described part of the aggregate. */
1648 /* Known constant value or jump function data describing contents. */
1649 struct ipa_load_agg_data value
;
1651 /* Pointer to the next structure in the list. */
1652 struct ipa_known_agg_contents_list
*next
;
1655 /* Add an aggregate content item into a linked list of
1656 ipa_known_agg_contents_list structure, in which all elements
1657 are sorted ascendingly by offset. */
1660 add_to_agg_contents_list (struct ipa_known_agg_contents_list
**plist
,
1661 struct ipa_known_agg_contents_list
*item
)
1663 struct ipa_known_agg_contents_list
*list
= *plist
;
1665 for (; list
; list
= list
->next
)
1667 if (list
->offset
>= item
->offset
)
1670 plist
= &list
->next
;
1677 /* Check whether a given aggregate content is clobbered by certain element in
1678 a linked list of ipa_known_agg_contents_list. */
1681 clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list
*list
,
1682 struct ipa_known_agg_contents_list
*item
)
1684 for (; list
; list
= list
->next
)
1686 if (list
->offset
>= item
->offset
)
1687 return list
->offset
< item
->offset
+ item
->size
;
1689 if (list
->offset
+ list
->size
> item
->offset
)
1696 /* Build aggregate jump function from LIST, assuming there are exactly
1697 VALUE_COUNT entries there and that offset of the passed argument
1698 is ARG_OFFSET and store it into JFUNC. */
1701 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list
*list
,
1702 int value_count
, HOST_WIDE_INT arg_offset
,
1703 struct ipa_jump_func
*jfunc
)
1705 vec_safe_reserve (jfunc
->agg
.items
, value_count
, true);
1706 for (; list
; list
= list
->next
)
1708 struct ipa_agg_jf_item item
;
1709 tree operand
= list
->value
.pass_through
.operand
;
1711 if (list
->value
.pass_through
.formal_id
>= 0)
1713 /* Content value is derived from some formal paramerter. */
1714 if (list
->value
.offset
>= 0)
1715 item
.jftype
= IPA_JF_LOAD_AGG
;
1717 item
.jftype
= IPA_JF_PASS_THROUGH
;
1719 item
.value
.load_agg
= list
->value
;
1721 item
.value
.pass_through
.operand
1722 = unshare_expr_without_location (operand
);
1726 /* Content value is known constant. */
1727 item
.jftype
= IPA_JF_CONST
;
1728 item
.value
.constant
= unshare_expr_without_location (operand
);
1733 item
.type
= list
->type
;
1734 gcc_assert (tree_to_shwi (TYPE_SIZE (list
->type
)) == list
->size
);
1736 item
.offset
= list
->offset
- arg_offset
;
1737 gcc_assert ((item
.offset
% BITS_PER_UNIT
) == 0);
1739 jfunc
->agg
.items
->quick_push (item
);
1743 /* Given an assignment statement STMT, try to collect information into
1744 AGG_VALUE that will be used to construct jump function for RHS of the
1745 assignment, from which content value of an aggregate part comes.
1747 Besides constant and simple pass-through jump functions, also try to
1748 identify whether it matches the following pattern that can be described by
1749 a load-value-from-aggregate jump function, which is a derivative of simple
1750 pass-through jump function.
1756 *(q_5 + 4) = *(p_3(D) + 28) op 1;
1760 Here IPA_LOAD_AGG_DATA data structure is informative enough to describe
1761 constant, simple pass-through and load-vale-from-aggregate. If value
1762 is constant, it will be kept in field OPERAND, and field FORMAL_ID is
1763 set to -1. For simple pass-through and load-value-from-aggregate, field
1764 FORMAL_ID specifies the related formal parameter index, and field
1765 OFFSET can be used to distinguish them, -1 means simple pass-through,
1766 otherwise means load-value-from-aggregate. */
1769 analyze_agg_content_value (struct ipa_func_body_info
*fbi
,
1770 struct ipa_load_agg_data
*agg_value
,
1773 tree lhs
= gimple_assign_lhs (stmt
);
1774 tree rhs1
= gimple_assign_rhs1 (stmt
);
1775 enum tree_code code
;
1778 /* Initialize jump function data for the aggregate part. */
1779 memset (agg_value
, 0, sizeof (*agg_value
));
1780 agg_value
->pass_through
.operation
= NOP_EXPR
;
1781 agg_value
->pass_through
.formal_id
= -1;
1782 agg_value
->offset
= -1;
1784 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs
)) /* TODO: Support aggregate type. */
1785 || TREE_THIS_VOLATILE (lhs
)
1786 || TREE_CODE (lhs
) == BIT_FIELD_REF
1787 || contains_bitfld_component_ref_p (lhs
))
1790 /* Skip SSA copies. */
1791 while (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
1793 if (TREE_CODE (rhs1
) != SSA_NAME
|| SSA_NAME_IS_DEFAULT_DEF (rhs1
))
1796 stmt
= SSA_NAME_DEF_STMT (rhs1
);
1797 if (!is_gimple_assign (stmt
))
1800 rhs1
= gimple_assign_rhs1 (stmt
);
1803 if (gphi
*phi
= dyn_cast
<gphi
*> (stmt
))
1805 /* Also special case like the following (a is a formal parameter):
1807 _12 = *a_11(D).dim[0].stride;
1809 # iftmp.22_9 = PHI <_12(2), 1(3)>
1811 parm.6.dim[0].stride = iftmp.22_9;
1813 __x_MOD_foo (&parm.6, b_31(D));
1815 The aggregate function describing parm.6.dim[0].stride is encoded as a
1816 PASS-THROUGH jump function with ASSERT_EXPR operation whith operand 1
1817 (the constant from the PHI node). */
1819 if (gimple_phi_num_args (phi
) != 2)
1821 tree arg0
= gimple_phi_arg_def (phi
, 0);
1822 tree arg1
= gimple_phi_arg_def (phi
, 1);
1825 if (is_gimple_ip_invariant (arg1
))
1830 else if (is_gimple_ip_invariant (arg0
))
1838 rhs1
= get_ssa_def_if_simple_copy (rhs1
, &stmt
);
1839 if (!is_gimple_assign (stmt
))
1843 agg_value
->pass_through
.operand
= operand
;
1845 else if (is_gimple_assign (stmt
))
1847 code
= gimple_assign_rhs_code (stmt
);
1848 switch (gimple_assign_rhs_class (stmt
))
1850 case GIMPLE_SINGLE_RHS
:
1851 if (is_gimple_ip_invariant (rhs1
))
1853 agg_value
->pass_through
.operand
= rhs1
;
1859 case GIMPLE_UNARY_RHS
:
1860 /* NOTE: A GIMPLE_UNARY_RHS operation might not be tcc_unary
1861 (truth_not_expr is example), GIMPLE_BINARY_RHS does not imply
1862 tcc_binary, this subtleness is somewhat misleading.
1864 Since tcc_unary is widely used in IPA-CP code to check an operation
1865 with one operand, here we only allow tc_unary operation to avoid
1866 possible problem. Then we can use (opclass == tc_unary) or not to
1867 distinguish unary and binary. */
1868 if (TREE_CODE_CLASS (code
) != tcc_unary
|| CONVERT_EXPR_CODE_P (code
))
1871 rhs1
= get_ssa_def_if_simple_copy (rhs1
, &stmt
);
1874 case GIMPLE_BINARY_RHS
:
1876 gimple
*rhs1_stmt
= stmt
;
1877 gimple
*rhs2_stmt
= stmt
;
1878 tree rhs2
= gimple_assign_rhs2 (stmt
);
1880 rhs1
= get_ssa_def_if_simple_copy (rhs1
, &rhs1_stmt
);
1881 rhs2
= get_ssa_def_if_simple_copy (rhs2
, &rhs2_stmt
);
1883 if (is_gimple_ip_invariant (rhs2
))
1885 agg_value
->pass_through
.operand
= rhs2
;
1888 else if (is_gimple_ip_invariant (rhs1
))
1890 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
1891 code
= swap_tree_comparison (code
);
1892 else if (!commutative_tree_code (code
))
1895 agg_value
->pass_through
.operand
= rhs1
;
1902 if (TREE_CODE_CLASS (code
) != tcc_comparison
1903 && !useless_type_conversion_p (TREE_TYPE (lhs
),
1916 if (TREE_CODE (rhs1
) != SSA_NAME
)
1917 index
= load_from_unmodified_param_or_agg (fbi
, fbi
->info
, stmt
,
1919 &agg_value
->by_ref
);
1920 else if (SSA_NAME_IS_DEFAULT_DEF (rhs1
))
1921 index
= ipa_get_param_decl_index (fbi
->info
, SSA_NAME_VAR (rhs1
));
1925 if (agg_value
->offset
>= 0)
1926 agg_value
->type
= TREE_TYPE (rhs1
);
1927 agg_value
->pass_through
.formal_id
= index
;
1928 agg_value
->pass_through
.operation
= code
;
1931 agg_value
->pass_through
.operand
= NULL_TREE
;
1934 /* If STMT is a memory store to the object whose address is BASE, extract
1935 information (offset, size, and value) into CONTENT, and return true,
1936 otherwise we conservatively assume the whole object is modified with
1937 unknown content, and return false. CHECK_REF means that access to object
1938 is expected to be in form of MEM_REF expression. */
1941 extract_mem_content (struct ipa_func_body_info
*fbi
,
1942 gimple
*stmt
, tree base
, bool check_ref
,
1943 struct ipa_known_agg_contents_list
*content
)
1945 HOST_WIDE_INT lhs_offset
, lhs_size
;
1948 if (!is_gimple_assign (stmt
))
1951 tree lhs
= gimple_assign_lhs (stmt
);
1952 tree lhs_base
= get_ref_base_and_extent_hwi (lhs
, &lhs_offset
, &lhs_size
,
1959 if (TREE_CODE (lhs_base
) != MEM_REF
1960 || TREE_OPERAND (lhs_base
, 0) != base
1961 || !integer_zerop (TREE_OPERAND (lhs_base
, 1)))
1964 else if (lhs_base
!= base
)
1967 content
->offset
= lhs_offset
;
1968 content
->size
= lhs_size
;
1969 content
->type
= TREE_TYPE (lhs
);
1970 content
->next
= NULL
;
1972 analyze_agg_content_value (fbi
, &content
->value
, stmt
);
1976 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1977 in ARG is filled in constants or values that are derived from caller's
1978 formal parameter in the way described by some kinds of jump functions. FBI
1979 is the context of the caller function for interprocedural analysis. ARG can
1980 either be an aggregate expression or a pointer to an aggregate. ARG_TYPE is
1981 the type of the aggregate, JFUNC is the jump function for the aggregate. */
1984 determine_known_aggregate_parts (struct ipa_func_body_info
*fbi
,
1985 gcall
*call
, tree arg
,
1987 struct ipa_jump_func
*jfunc
)
1989 struct ipa_known_agg_contents_list
*list
= NULL
, *all_list
= NULL
;
1990 bitmap visited
= NULL
;
1991 int item_count
= 0, value_count
= 0;
1992 HOST_WIDE_INT arg_offset
, arg_size
;
1994 bool check_ref
, by_ref
;
1996 int max_agg_items
= opt_for_fn (fbi
->node
->decl
, param_ipa_max_agg_items
);
1998 if (max_agg_items
== 0)
2001 /* The function operates in three stages. First, we prepare check_ref, r,
2002 arg_base and arg_offset based on what is actually passed as an actual
2005 if (POINTER_TYPE_P (arg_type
))
2008 if (TREE_CODE (arg
) == SSA_NAME
)
2011 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type
)))
2012 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
2017 type_size
= TYPE_SIZE (TREE_TYPE (arg_type
));
2018 arg_size
= tree_to_uhwi (type_size
);
2019 ao_ref_init_from_ptr_and_size (&r
, arg_base
, NULL_TREE
);
2021 else if (TREE_CODE (arg
) == ADDR_EXPR
)
2025 arg
= TREE_OPERAND (arg
, 0);
2026 arg_base
= get_ref_base_and_extent_hwi (arg
, &arg_offset
,
2027 &arg_size
, &reverse
);
2030 if (DECL_P (arg_base
))
2033 ao_ref_init (&r
, arg_base
);
2045 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg
)));
2049 arg_base
= get_ref_base_and_extent_hwi (arg
, &arg_offset
,
2050 &arg_size
, &reverse
);
2054 ao_ref_init (&r
, arg
);
2057 /* Second stage traverses virtual SSA web backwards starting from the call
2058 statement, only looks at individual dominating virtual operand (its
2059 definition dominates the call), as long as it is confident that content
2060 of the aggregate is affected by definition of the virtual operand, it
2061 builds a sorted linked list of ipa_agg_jf_list describing that. */
2063 for (tree dom_vuse
= gimple_vuse (call
);
2064 dom_vuse
&& fbi
->aa_walk_budget
> 0;)
2066 gimple
*stmt
= SSA_NAME_DEF_STMT (dom_vuse
);
2068 if (gimple_code (stmt
) == GIMPLE_PHI
)
2070 dom_vuse
= get_continuation_for_phi (stmt
, &r
, true,
2071 fbi
->aa_walk_budget
,
2072 &visited
, false, NULL
, NULL
);
2076 fbi
->aa_walk_budget
--;
2077 if (stmt_may_clobber_ref_p_1 (stmt
, &r
))
2079 struct ipa_known_agg_contents_list
*content
2080 = XALLOCA (struct ipa_known_agg_contents_list
);
2082 if (!extract_mem_content (fbi
, stmt
, arg_base
, check_ref
, content
))
2085 /* Now we get a dominating virtual operand, and need to check
2086 whether its value is clobbered any other dominating one. */
2087 if ((content
->value
.pass_through
.formal_id
>= 0
2088 || content
->value
.pass_through
.operand
)
2089 && !clobber_by_agg_contents_list_p (all_list
, content
))
2091 struct ipa_known_agg_contents_list
*copy
2092 = XALLOCA (struct ipa_known_agg_contents_list
);
2094 /* Add to the list consisting of only dominating virtual
2095 operands, whose definitions can finally reach the call. */
2096 add_to_agg_contents_list (&list
, (*copy
= *content
, copy
));
2098 if (++value_count
== max_agg_items
)
2102 /* Add to the list consisting of all dominating virtual operands. */
2103 add_to_agg_contents_list (&all_list
, content
);
2105 if (++item_count
== 2 * max_agg_items
)
2108 dom_vuse
= gimple_vuse (stmt
);
2112 BITMAP_FREE (visited
);
2114 /* Third stage just goes over the list and creates an appropriate vector of
2115 ipa_agg_jf_item structures out of it, of course only if there are
2116 any meaningful items to begin with. */
2120 jfunc
->agg
.by_ref
= by_ref
;
2121 build_agg_jump_func_from_list (list
, value_count
, arg_offset
, jfunc
);
2126 /* Return the Ith param type of callee associated with call graph
2130 ipa_get_callee_param_type (struct cgraph_edge
*e
, int i
)
2133 tree type
= (e
->callee
2134 ? TREE_TYPE (e
->callee
->decl
)
2135 : gimple_call_fntype (e
->call_stmt
));
2136 tree t
= TYPE_ARG_TYPES (type
);
2138 for (n
= 0; n
< i
; n
++)
2145 return TREE_VALUE (t
);
2148 t
= DECL_ARGUMENTS (e
->callee
->decl
);
2149 for (n
= 0; n
< i
; n
++)
2156 return TREE_TYPE (t
);
2160 /* Return ipa_bits with VALUE and MASK values, which can be either a newly
2161 allocated structure or a previously existing one shared with other jump
2162 functions and/or transformation summaries. */
2165 ipa_get_ipa_bits_for_value (const widest_int
&value
, const widest_int
&mask
)
2171 ipa_bits
**slot
= ipa_bits_hash_table
->find_slot (&tmp
, INSERT
);
2175 ipa_bits
*res
= ggc_alloc
<ipa_bits
> ();
2183 /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
2184 table in order to avoid creating multiple same ipa_bits structures. */
2187 ipa_set_jfunc_bits (ipa_jump_func
*jf
, const widest_int
&value
,
2188 const widest_int
&mask
)
2190 jf
->bits
= ipa_get_ipa_bits_for_value (value
, mask
);
2193 /* Return a pointer to a value_range just like *TMP, but either find it in
2194 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
2196 static value_range
*
2197 ipa_get_value_range (value_range
*tmp
)
2199 value_range
**slot
= ipa_vr_hash_table
->find_slot (tmp
, INSERT
);
2203 value_range
*vr
= new (ggc_alloc
<value_range
> ()) value_range
;
2210 /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
2211 equiv set. Use hash table in order to avoid creating multiple same copies of
2214 static value_range
*
2215 ipa_get_value_range (enum value_range_kind kind
, tree min
, tree max
)
2217 value_range
tmp (min
, max
, kind
);
2218 return ipa_get_value_range (&tmp
);
2221 /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
2222 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
2223 same value_range structures. */
2226 ipa_set_jfunc_vr (ipa_jump_func
*jf
, enum value_range_kind type
,
2229 jf
->m_vr
= ipa_get_value_range (type
, min
, max
);
2232 /* Assign to JF a pointer to a value_range just like TMP but either fetch a
2233 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
2236 ipa_set_jfunc_vr (ipa_jump_func
*jf
, value_range
*tmp
)
2238 jf
->m_vr
= ipa_get_value_range (tmp
);
2241 /* Compute jump function for all arguments of callsite CS and insert the
2242 information in the jump_functions array in the ipa_edge_args corresponding
2243 to this callsite. */
2246 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info
*fbi
,
2247 struct cgraph_edge
*cs
)
2249 ipa_node_params
*info
= ipa_node_params_sum
->get (cs
->caller
);
2250 ipa_edge_args
*args
= ipa_edge_args_sum
->get_create (cs
);
2251 gcall
*call
= cs
->call_stmt
;
2252 int n
, arg_num
= gimple_call_num_args (call
);
2253 bool useful_context
= false;
2256 if (arg_num
== 0 || args
->jump_functions
)
2258 vec_safe_grow_cleared (args
->jump_functions
, arg_num
, true);
2259 if (flag_devirtualize
)
2260 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, arg_num
, true);
2262 if (gimple_call_internal_p (call
))
2264 if (ipa_func_spec_opts_forbid_analysis_p (cs
->caller
))
2267 for (n
= 0; n
< arg_num
; n
++)
2269 struct ipa_jump_func
*jfunc
= ipa_get_ith_jump_func (args
, n
);
2270 tree arg
= gimple_call_arg (call
, n
);
2271 tree param_type
= ipa_get_callee_param_type (cs
, n
);
2272 if (flag_devirtualize
&& POINTER_TYPE_P (TREE_TYPE (arg
)))
2275 class ipa_polymorphic_call_context
context (cs
->caller
->decl
,
2278 context
.get_dynamic_type (instance
, arg
, NULL
, cs
->call_stmt
,
2279 &fbi
->aa_walk_budget
);
2280 *ipa_get_ith_polymorhic_call_context (args
, n
) = context
;
2281 if (!context
.useless_p ())
2282 useful_context
= true;
2285 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
2287 bool addr_nonzero
= false;
2288 bool strict_overflow
= false;
2290 if (TREE_CODE (arg
) == SSA_NAME
2292 && get_range_query (cfun
)->range_of_expr (vr
, arg
)
2294 addr_nonzero
= true;
2295 else if (tree_single_nonzero_warnv_p (arg
, &strict_overflow
))
2296 addr_nonzero
= true;
2300 tree z
= build_int_cst (TREE_TYPE (arg
), 0);
2301 ipa_set_jfunc_vr (jfunc
, VR_ANTI_RANGE
, z
, z
);
2304 gcc_assert (!jfunc
->m_vr
);
2308 if (TREE_CODE (arg
) == SSA_NAME
2310 /* Limit the ranger query to integral types as the rest
2311 of this file uses value_range's, which only hold
2312 integers and pointers. */
2313 && irange::supports_p (TREE_TYPE (arg
))
2314 && get_range_query (cfun
)->range_of_expr (vr
, arg
)
2315 && !vr
.undefined_p ())
2318 range_fold_unary_expr (&resvr
, NOP_EXPR
, param_type
,
2319 &vr
, TREE_TYPE (arg
));
2320 if (!resvr
.undefined_p () && !resvr
.varying_p ())
2321 ipa_set_jfunc_vr (jfunc
, &resvr
);
2323 gcc_assert (!jfunc
->m_vr
);
2326 gcc_assert (!jfunc
->m_vr
);
2329 if (INTEGRAL_TYPE_P (TREE_TYPE (arg
))
2330 && (TREE_CODE (arg
) == SSA_NAME
|| TREE_CODE (arg
) == INTEGER_CST
))
2332 if (TREE_CODE (arg
) == SSA_NAME
)
2333 ipa_set_jfunc_bits (jfunc
, 0,
2334 widest_int::from (get_nonzero_bits (arg
),
2335 TYPE_SIGN (TREE_TYPE (arg
))));
2337 ipa_set_jfunc_bits (jfunc
, wi::to_widest (arg
), 0);
2339 else if (POINTER_TYPE_P (TREE_TYPE (arg
)))
2341 unsigned HOST_WIDE_INT bitpos
;
2344 get_pointer_alignment_1 (arg
, &align
, &bitpos
);
2345 widest_int mask
= wi::bit_and_not
2346 (wi::mask
<widest_int
> (TYPE_PRECISION (TREE_TYPE (arg
)), false),
2347 align
/ BITS_PER_UNIT
- 1);
2348 widest_int value
= bitpos
/ BITS_PER_UNIT
;
2349 ipa_set_jfunc_bits (jfunc
, value
, mask
);
2352 gcc_assert (!jfunc
->bits
);
2354 if (is_gimple_ip_invariant (arg
)
2356 && is_global_var (arg
)
2357 && TREE_READONLY (arg
)))
2358 ipa_set_jf_constant (jfunc
, arg
, cs
);
2359 else if (!is_gimple_reg_type (TREE_TYPE (arg
))
2360 && TREE_CODE (arg
) == PARM_DECL
)
2362 int index
= ipa_get_param_decl_index (info
, arg
);
2364 gcc_assert (index
>=0);
2365 /* Aggregate passed by value, check for pass-through, otherwise we
2366 will attempt to fill in aggregate contents later in this
2368 if (parm_preserved_before_stmt_p (fbi
, index
, call
, arg
))
2370 ipa_set_jf_simple_pass_through (jfunc
, index
, false);
2374 else if (TREE_CODE (arg
) == SSA_NAME
)
2376 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
2378 int index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (arg
));
2382 agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
, arg
);
2383 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
2388 gimple
*stmt
= SSA_NAME_DEF_STMT (arg
);
2389 if (is_gimple_assign (stmt
))
2390 compute_complex_assign_jump_func (fbi
, info
, jfunc
,
2391 call
, stmt
, arg
, param_type
);
2392 else if (gimple_code (stmt
) == GIMPLE_PHI
)
2393 compute_complex_ancestor_jump_func (fbi
, info
, jfunc
,
2395 as_a
<gphi
*> (stmt
));
2399 /* If ARG is pointer, we cannot use its type to determine the type of aggregate
2400 passed (because type conversions are ignored in gimple). Usually we can
2401 safely get type from function declaration, but in case of K&R prototypes or
2402 variadic functions we can try our luck with type of the pointer passed.
2403 TODO: Since we look for actual initialization of the memory object, we may better
2404 work out the type based on the memory stores we find. */
2406 param_type
= TREE_TYPE (arg
);
2408 if ((jfunc
->type
!= IPA_JF_PASS_THROUGH
2409 || !ipa_get_jf_pass_through_agg_preserved (jfunc
))
2410 && (jfunc
->type
!= IPA_JF_ANCESTOR
2411 || !ipa_get_jf_ancestor_agg_preserved (jfunc
))
2412 && (AGGREGATE_TYPE_P (TREE_TYPE (arg
))
2413 || POINTER_TYPE_P (param_type
)))
2414 determine_known_aggregate_parts (fbi
, call
, arg
, param_type
, jfunc
);
2416 if (!useful_context
)
2417 vec_free (args
->polymorphic_call_contexts
);
2420 /* Compute jump functions for all edges - both direct and indirect - outgoing
2424 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info
*fbi
, basic_block bb
)
2426 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
2428 struct cgraph_edge
*cs
;
2430 FOR_EACH_VEC_ELT_REVERSE (bi
->cg_edges
, i
, cs
)
2432 struct cgraph_node
*callee
= cs
->callee
;
2436 callee
= callee
->ultimate_alias_target ();
2437 /* We do not need to bother analyzing calls to unknown functions
2438 unless they may become known during lto/whopr. */
2439 if (!callee
->definition
&& !flag_lto
2440 && !gimple_call_fnspec (cs
->call_stmt
).known_p ())
2443 ipa_compute_jump_functions_for_edge (fbi
, cs
);
2447 /* If STMT looks like a statement loading a value from a member pointer formal
2448 parameter, return that parameter and store the offset of the field to
2449 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2450 might be clobbered). If USE_DELTA, then we look for a use of the delta
2451 field rather than the pfn. */
2454 ipa_get_stmt_member_ptr_load_param (gimple
*stmt
, bool use_delta
,
2455 HOST_WIDE_INT
*offset_p
)
2457 tree rhs
, rec
, ref_field
, ref_offset
, fld
, ptr_field
, delta_field
;
2459 if (!gimple_assign_single_p (stmt
))
2462 rhs
= gimple_assign_rhs1 (stmt
);
2463 if (TREE_CODE (rhs
) == COMPONENT_REF
)
2465 ref_field
= TREE_OPERAND (rhs
, 1);
2466 rhs
= TREE_OPERAND (rhs
, 0);
2469 ref_field
= NULL_TREE
;
2470 if (TREE_CODE (rhs
) != MEM_REF
)
2472 rec
= TREE_OPERAND (rhs
, 0);
2473 if (TREE_CODE (rec
) != ADDR_EXPR
)
2475 rec
= TREE_OPERAND (rec
, 0);
2476 if (TREE_CODE (rec
) != PARM_DECL
2477 || !type_like_member_ptr_p (TREE_TYPE (rec
), &ptr_field
, &delta_field
))
2479 ref_offset
= TREE_OPERAND (rhs
, 1);
2486 *offset_p
= int_bit_position (fld
);
2490 if (integer_nonzerop (ref_offset
))
2492 return ref_field
== fld
? rec
: NULL_TREE
;
2495 return tree_int_cst_equal (byte_position (fld
), ref_offset
) ? rec
2499 /* Returns true iff T is an SSA_NAME defined by a statement. */
2502 ipa_is_ssa_with_stmt_def (tree t
)
2504 if (TREE_CODE (t
) == SSA_NAME
2505 && !SSA_NAME_IS_DEFAULT_DEF (t
))
2511 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2512 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2513 indirect call graph edge.
2514 If POLYMORPHIC is true record is as a destination of polymorphic call. */
2516 static struct cgraph_edge
*
2517 ipa_note_param_call (struct cgraph_node
*node
, int param_index
,
2518 gcall
*stmt
, bool polymorphic
)
2520 struct cgraph_edge
*cs
;
2522 cs
= node
->get_edge (stmt
);
2523 cs
->indirect_info
->param_index
= param_index
;
2524 cs
->indirect_info
->agg_contents
= 0;
2525 cs
->indirect_info
->member_ptr
= 0;
2526 cs
->indirect_info
->guaranteed_unmodified
= 0;
2527 ipa_node_params
*info
= ipa_node_params_sum
->get (node
);
2528 ipa_set_param_used_by_indirect_call (info
, param_index
, true);
2529 if (cs
->indirect_info
->polymorphic
|| polymorphic
)
2530 ipa_set_param_used_by_polymorphic_call (info
, param_index
, true);
2534 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2535 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2536 intermediate information about each formal parameter. Currently it checks
2537 whether the call calls a pointer that is a formal parameter and if so, the
2538 parameter is marked with the called flag and an indirect call graph edge
2539 describing the call is created. This is very simple for ordinary pointers
2540 represented in SSA but not-so-nice when it comes to member pointers. The
2541 ugly part of this function does nothing more than trying to match the
2542 pattern of such a call. An example of such a pattern is the gimple dump
2543 below, the call is on the last line:
2546 f$__delta_5 = f.__delta;
2547 f$__pfn_24 = f.__pfn;
2551 f$__delta_5 = MEM[(struct *)&f];
2552 f$__pfn_24 = MEM[(struct *)&f + 4B];
2554 and a few lines below:
2557 D.2496_3 = (int) f$__pfn_24;
2558 D.2497_4 = D.2496_3 & 1;
2565 D.2500_7 = (unsigned int) f$__delta_5;
2566 D.2501_8 = &S + D.2500_7;
2567 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2568 D.2503_10 = *D.2502_9;
2569 D.2504_12 = f$__pfn_24 + -1;
2570 D.2505_13 = (unsigned int) D.2504_12;
2571 D.2506_14 = D.2503_10 + D.2505_13;
2572 D.2507_15 = *D.2506_14;
2573 iftmp.11_16 = (String:: *) D.2507_15;
2576 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2577 D.2500_19 = (unsigned int) f$__delta_5;
2578 D.2508_20 = &S + D.2500_19;
2579 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2581 Such patterns are results of simple calls to a member pointer:
2583 int doprinting (int (MyString::* f)(int) const)
2585 MyString S ("somestring");
2590 Moreover, the function also looks for called pointers loaded from aggregates
2591 passed by value or reference. */
2594 ipa_analyze_indirect_call_uses (struct ipa_func_body_info
*fbi
, gcall
*call
,
2597 class ipa_node_params
*info
= fbi
->info
;
2598 HOST_WIDE_INT offset
;
2601 if (SSA_NAME_IS_DEFAULT_DEF (target
))
2603 tree var
= SSA_NAME_VAR (target
);
2604 int index
= ipa_get_param_decl_index (info
, var
);
2606 ipa_note_param_call (fbi
->node
, index
, call
, false);
2611 gimple
*def
= SSA_NAME_DEF_STMT (target
);
2612 bool guaranteed_unmodified
;
2613 if (gimple_assign_single_p (def
)
2614 && ipa_load_from_parm_agg (fbi
, info
->descriptors
, def
,
2615 gimple_assign_rhs1 (def
), &index
, &offset
,
2616 NULL
, &by_ref
, &guaranteed_unmodified
))
2618 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
,
2620 cs
->indirect_info
->offset
= offset
;
2621 cs
->indirect_info
->agg_contents
= 1;
2622 cs
->indirect_info
->by_ref
= by_ref
;
2623 cs
->indirect_info
->guaranteed_unmodified
= guaranteed_unmodified
;
2627 /* Now we need to try to match the complex pattern of calling a member
2629 if (gimple_code (def
) != GIMPLE_PHI
2630 || gimple_phi_num_args (def
) != 2
2631 || !POINTER_TYPE_P (TREE_TYPE (target
))
2632 || TREE_CODE (TREE_TYPE (TREE_TYPE (target
))) != METHOD_TYPE
)
2635 /* First, we need to check whether one of these is a load from a member
2636 pointer that is a parameter to this function. */
2637 tree n1
= PHI_ARG_DEF (def
, 0);
2638 tree n2
= PHI_ARG_DEF (def
, 1);
2639 if (!ipa_is_ssa_with_stmt_def (n1
) || !ipa_is_ssa_with_stmt_def (n2
))
2641 gimple
*d1
= SSA_NAME_DEF_STMT (n1
);
2642 gimple
*d2
= SSA_NAME_DEF_STMT (n2
);
2645 basic_block bb
, virt_bb
;
2646 basic_block join
= gimple_bb (def
);
2647 if ((rec
= ipa_get_stmt_member_ptr_load_param (d1
, false, &offset
)))
2649 if (ipa_get_stmt_member_ptr_load_param (d2
, false, NULL
))
2652 bb
= EDGE_PRED (join
, 0)->src
;
2653 virt_bb
= gimple_bb (d2
);
2655 else if ((rec
= ipa_get_stmt_member_ptr_load_param (d2
, false, &offset
)))
2657 bb
= EDGE_PRED (join
, 1)->src
;
2658 virt_bb
= gimple_bb (d1
);
2663 /* Second, we need to check that the basic blocks are laid out in the way
2664 corresponding to the pattern. */
2666 if (!single_pred_p (virt_bb
) || !single_succ_p (virt_bb
)
2667 || single_pred (virt_bb
) != bb
2668 || single_succ (virt_bb
) != join
)
2671 /* Third, let's see that the branching is done depending on the least
2672 significant bit of the pfn. */
2674 gimple
*branch
= last_stmt (bb
);
2675 if (!branch
|| gimple_code (branch
) != GIMPLE_COND
)
2678 if ((gimple_cond_code (branch
) != NE_EXPR
2679 && gimple_cond_code (branch
) != EQ_EXPR
)
2680 || !integer_zerop (gimple_cond_rhs (branch
)))
2683 tree cond
= gimple_cond_lhs (branch
);
2684 if (!ipa_is_ssa_with_stmt_def (cond
))
2687 def
= SSA_NAME_DEF_STMT (cond
);
2688 if (!is_gimple_assign (def
)
2689 || gimple_assign_rhs_code (def
) != BIT_AND_EXPR
2690 || !integer_onep (gimple_assign_rhs2 (def
)))
2693 cond
= gimple_assign_rhs1 (def
);
2694 if (!ipa_is_ssa_with_stmt_def (cond
))
2697 def
= SSA_NAME_DEF_STMT (cond
);
2699 if (is_gimple_assign (def
)
2700 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
2702 cond
= gimple_assign_rhs1 (def
);
2703 if (!ipa_is_ssa_with_stmt_def (cond
))
2705 def
= SSA_NAME_DEF_STMT (cond
);
2709 rec2
= ipa_get_stmt_member_ptr_load_param (def
,
2710 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2711 == ptrmemfunc_vbit_in_delta
),
2716 index
= ipa_get_param_decl_index (info
, rec
);
2718 && parm_preserved_before_stmt_p (fbi
, index
, call
, rec
))
2720 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
,
2722 cs
->indirect_info
->offset
= offset
;
2723 cs
->indirect_info
->agg_contents
= 1;
2724 cs
->indirect_info
->member_ptr
= 1;
2725 cs
->indirect_info
->guaranteed_unmodified
= 1;
2731 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2732 object referenced in the expression is a formal parameter of the caller
2733 FBI->node (described by FBI->info), create a call note for the
2737 ipa_analyze_virtual_call_uses (struct ipa_func_body_info
*fbi
,
2738 gcall
*call
, tree target
)
2740 tree obj
= OBJ_TYPE_REF_OBJECT (target
);
2742 HOST_WIDE_INT anc_offset
;
2744 if (!flag_devirtualize
)
2747 if (TREE_CODE (obj
) != SSA_NAME
)
2750 class ipa_node_params
*info
= fbi
->info
;
2751 if (SSA_NAME_IS_DEFAULT_DEF (obj
))
2753 if (TREE_CODE (SSA_NAME_VAR (obj
)) != PARM_DECL
)
2757 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (obj
));
2758 gcc_assert (index
>= 0);
2759 if (detect_type_change_ssa (fbi
, obj
, obj_type_ref_class (target
),
2765 gimple
*stmt
= SSA_NAME_DEF_STMT (obj
);
2768 expr
= get_ancestor_addr_info (stmt
, &obj
, &anc_offset
);
2771 index
= ipa_get_param_decl_index (info
,
2772 SSA_NAME_VAR (TREE_OPERAND (expr
, 0)));
2773 gcc_assert (index
>= 0);
2774 if (detect_type_change (fbi
, obj
, expr
, obj_type_ref_class (target
),
2779 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
,
2781 class cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
2782 ii
->offset
= anc_offset
;
2783 ii
->otr_token
= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target
));
2784 ii
->otr_type
= obj_type_ref_class (target
);
2785 ii
->polymorphic
= 1;
2788 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2789 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2790 containing intermediate information about each formal parameter. */
2793 ipa_analyze_call_uses (struct ipa_func_body_info
*fbi
, gcall
*call
)
2795 tree target
= gimple_call_fn (call
);
2798 || (TREE_CODE (target
) != SSA_NAME
2799 && !virtual_method_call_p (target
)))
2802 struct cgraph_edge
*cs
= fbi
->node
->get_edge (call
);
2803 /* If we previously turned the call into a direct call, there is
2804 no need to analyze. */
2805 if (cs
&& !cs
->indirect_unknown_callee
)
2808 if (cs
->indirect_info
->polymorphic
&& flag_devirtualize
)
2811 tree target
= gimple_call_fn (call
);
2812 ipa_polymorphic_call_context
context (current_function_decl
,
2813 target
, call
, &instance
);
2815 gcc_checking_assert (cs
->indirect_info
->otr_type
2816 == obj_type_ref_class (target
));
2817 gcc_checking_assert (cs
->indirect_info
->otr_token
2818 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target
)));
2820 cs
->indirect_info
->vptr_changed
2821 = !context
.get_dynamic_type (instance
,
2822 OBJ_TYPE_REF_OBJECT (target
),
2823 obj_type_ref_class (target
), call
,
2824 &fbi
->aa_walk_budget
);
2825 cs
->indirect_info
->context
= context
;
2828 if (TREE_CODE (target
) == SSA_NAME
)
2829 ipa_analyze_indirect_call_uses (fbi
, call
, target
);
2830 else if (virtual_method_call_p (target
))
2831 ipa_analyze_virtual_call_uses (fbi
, call
, target
);
2835 /* Analyze the call statement STMT with respect to formal parameters (described
2836 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2837 formal parameters are called. */
2840 ipa_analyze_stmt_uses (struct ipa_func_body_info
*fbi
, gimple
*stmt
)
2842 if (is_gimple_call (stmt
))
2843 ipa_analyze_call_uses (fbi
, as_a
<gcall
*> (stmt
));
2846 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2847 If OP is a parameter declaration, mark it as used in the info structure
2851 visit_ref_for_mod_analysis (gimple
*, tree op
, tree
, void *data
)
2853 class ipa_node_params
*info
= (class ipa_node_params
*) data
;
2855 op
= get_base_address (op
);
2857 && TREE_CODE (op
) == PARM_DECL
)
2859 int index
= ipa_get_param_decl_index (info
, op
);
2860 gcc_assert (index
>= 0);
2861 ipa_set_param_used (info
, index
, true);
2867 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2868 the findings in various structures of the associated ipa_node_params
2869 structure, such as parameter flags, notes etc. FBI holds various data about
2870 the function being analyzed. */
2873 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info
*fbi
, basic_block bb
)
2875 gimple_stmt_iterator gsi
;
2876 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2878 gimple
*stmt
= gsi_stmt (gsi
);
2880 if (is_gimple_debug (stmt
))
2883 ipa_analyze_stmt_uses (fbi
, stmt
);
2884 walk_stmt_load_store_addr_ops (stmt
, fbi
->info
,
2885 visit_ref_for_mod_analysis
,
2886 visit_ref_for_mod_analysis
,
2887 visit_ref_for_mod_analysis
);
2889 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2890 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), fbi
->info
,
2891 visit_ref_for_mod_analysis
,
2892 visit_ref_for_mod_analysis
,
2893 visit_ref_for_mod_analysis
);
2896 /* Return true EXPR is a load from a dereference of SSA_NAME NAME. */
2899 load_from_dereferenced_name (tree expr
, tree name
)
2901 tree base
= get_base_address (expr
);
2902 return (TREE_CODE (base
) == MEM_REF
2903 && TREE_OPERAND (base
, 0) == name
);
2906 /* Calculate controlled uses of parameters of NODE. */
2909 ipa_analyze_controlled_uses (struct cgraph_node
*node
)
2911 ipa_node_params
*info
= ipa_node_params_sum
->get (node
);
2913 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2915 tree parm
= ipa_get_param (info
, i
);
2917 bool load_dereferenced
= false;
2919 /* For SSA regs see if parameter is used. For non-SSA we compute
2920 the flag during modification analysis. */
2921 if (is_gimple_reg (parm
))
2923 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
),
2925 if (ddef
&& !has_zero_uses (ddef
))
2927 imm_use_iterator imm_iter
;
2930 ipa_set_param_used (info
, i
, true);
2931 FOR_EACH_IMM_USE_STMT (stmt
, imm_iter
, ddef
)
2933 if (is_gimple_debug (stmt
))
2936 int all_stmt_uses
= 0;
2937 use_operand_p use_p
;
2938 FOR_EACH_IMM_USE_ON_STMT (use_p
, imm_iter
)
2941 if (is_gimple_call (stmt
))
2943 if (gimple_call_internal_p (stmt
))
2945 call_uses
= IPA_UNDESCRIBED_USE
;
2948 int recognized_stmt_uses
;
2949 if (gimple_call_fn (stmt
) == ddef
)
2950 recognized_stmt_uses
= 1;
2952 recognized_stmt_uses
= 0;
2953 unsigned arg_count
= gimple_call_num_args (stmt
);
2954 for (unsigned i
= 0; i
< arg_count
; i
++)
2956 tree arg
= gimple_call_arg (stmt
, i
);
2958 recognized_stmt_uses
++;
2959 else if (load_from_dereferenced_name (arg
, ddef
))
2961 load_dereferenced
= true;
2962 recognized_stmt_uses
++;
2966 if (recognized_stmt_uses
!= all_stmt_uses
)
2968 call_uses
= IPA_UNDESCRIBED_USE
;
2972 call_uses
+= all_stmt_uses
;
2974 else if (gimple_assign_single_p (stmt
))
2976 tree rhs
= gimple_assign_rhs1 (stmt
);
2977 if (all_stmt_uses
!= 1
2978 || !load_from_dereferenced_name (rhs
, ddef
))
2980 call_uses
= IPA_UNDESCRIBED_USE
;
2983 load_dereferenced
= true;
2987 call_uses
= IPA_UNDESCRIBED_USE
;
2996 call_uses
= IPA_UNDESCRIBED_USE
;
2997 ipa_set_controlled_uses (info
, i
, call_uses
);
2998 ipa_set_param_load_dereferenced (info
, i
, load_dereferenced
);
3002 /* Free stuff in BI. */
3005 free_ipa_bb_info (struct ipa_bb_info
*bi
)
3007 bi
->cg_edges
.release ();
3008 bi
->param_aa_statuses
.release ();
3011 /* Dominator walker driving the analysis. */
3013 class analysis_dom_walker
: public dom_walker
3016 analysis_dom_walker (struct ipa_func_body_info
*fbi
)
3017 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
) {}
3019 edge
before_dom_children (basic_block
) final override
;
3022 struct ipa_func_body_info
*m_fbi
;
3026 analysis_dom_walker::before_dom_children (basic_block bb
)
3028 ipa_analyze_params_uses_in_bb (m_fbi
, bb
);
3029 ipa_compute_jump_functions_for_bb (m_fbi
, bb
);
3033 /* Release body info FBI. */
3036 ipa_release_body_info (struct ipa_func_body_info
*fbi
)
3039 struct ipa_bb_info
*bi
;
3041 FOR_EACH_VEC_ELT (fbi
->bb_infos
, i
, bi
)
3042 free_ipa_bb_info (bi
);
3043 fbi
->bb_infos
.release ();
3046 /* Initialize the array describing properties of formal parameters
3047 of NODE, analyze their uses and compute jump functions associated
3048 with actual arguments of calls from within NODE. */
3051 ipa_analyze_node (struct cgraph_node
*node
)
3053 struct ipa_func_body_info fbi
;
3054 class ipa_node_params
*info
;
3056 ipa_check_create_node_params ();
3057 ipa_check_create_edge_args ();
3058 info
= ipa_node_params_sum
->get_create (node
);
3060 if (info
->analysis_done
)
3062 info
->analysis_done
= 1;
3064 if (ipa_func_spec_opts_forbid_analysis_p (node
)
3065 || (count_formal_params (node
->decl
)
3066 >= (1 << IPA_PROP_ARG_INDEX_LIMIT_BITS
)))
3068 gcc_assert (!ipa_get_param_count (info
));
3072 struct function
*func
= DECL_STRUCT_FUNCTION (node
->decl
);
3074 calculate_dominance_info (CDI_DOMINATORS
);
3075 ipa_initialize_node_params (node
);
3076 ipa_analyze_controlled_uses (node
);
3080 fbi
.bb_infos
= vNULL
;
3081 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
), true);
3082 fbi
.param_count
= ipa_get_param_count (info
);
3083 fbi
.aa_walk_budget
= opt_for_fn (node
->decl
, param_ipa_max_aa_steps
);
3085 for (struct cgraph_edge
*cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
3087 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
3088 bi
->cg_edges
.safe_push (cs
);
3091 for (struct cgraph_edge
*cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
3093 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
3094 bi
->cg_edges
.safe_push (cs
);
3097 analysis_dom_walker (&fbi
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
3099 ipa_release_body_info (&fbi
);
3100 free_dominance_info (CDI_DOMINATORS
);
3104 /* Update the jump functions associated with call graph edge E when the call
3105 graph edge CS is being inlined, assuming that E->caller is already (possibly
3106 indirectly) inlined into CS->callee and that E has not been inlined. */
3109 update_jump_functions_after_inlining (struct cgraph_edge
*cs
,
3110 struct cgraph_edge
*e
)
3112 ipa_edge_args
*top
= ipa_edge_args_sum
->get (cs
);
3113 ipa_edge_args
*args
= ipa_edge_args_sum
->get (e
);
3116 int count
= ipa_get_cs_argument_count (args
);
3119 for (i
= 0; i
< count
; i
++)
3121 struct ipa_jump_func
*dst
= ipa_get_ith_jump_func (args
, i
);
3122 class ipa_polymorphic_call_context
*dst_ctx
3123 = ipa_get_ith_polymorhic_call_context (args
, i
);
3127 struct ipa_agg_jf_item
*item
;
3130 FOR_EACH_VEC_ELT (*dst
->agg
.items
, j
, item
)
3133 struct ipa_jump_func
*src
;
3135 if (item
->jftype
!= IPA_JF_PASS_THROUGH
3136 && item
->jftype
!= IPA_JF_LOAD_AGG
)
3139 dst_fid
= item
->value
.pass_through
.formal_id
;
3140 if (!top
|| dst_fid
>= ipa_get_cs_argument_count (top
))
3142 item
->jftype
= IPA_JF_UNKNOWN
;
3146 item
->value
.pass_through
.formal_id
= -1;
3147 src
= ipa_get_ith_jump_func (top
, dst_fid
);
3148 if (src
->type
== IPA_JF_CONST
)
3150 if (item
->jftype
== IPA_JF_PASS_THROUGH
3151 && item
->value
.pass_through
.operation
== NOP_EXPR
)
3153 item
->jftype
= IPA_JF_CONST
;
3154 item
->value
.constant
= src
->value
.constant
.value
;
3158 else if (src
->type
== IPA_JF_PASS_THROUGH
3159 && src
->value
.pass_through
.operation
== NOP_EXPR
)
3161 if (item
->jftype
== IPA_JF_PASS_THROUGH
3162 || !item
->value
.load_agg
.by_ref
3163 || src
->value
.pass_through
.agg_preserved
)
3164 item
->value
.pass_through
.formal_id
3165 = src
->value
.pass_through
.formal_id
;
3167 else if (src
->type
== IPA_JF_ANCESTOR
)
3169 if (item
->jftype
== IPA_JF_PASS_THROUGH
)
3171 if (!src
->value
.ancestor
.offset
)
3172 item
->value
.pass_through
.formal_id
3173 = src
->value
.ancestor
.formal_id
;
3175 else if (src
->value
.ancestor
.agg_preserved
)
3177 gcc_checking_assert (item
->value
.load_agg
.by_ref
);
3179 item
->value
.pass_through
.formal_id
3180 = src
->value
.ancestor
.formal_id
;
3181 item
->value
.load_agg
.offset
3182 += src
->value
.ancestor
.offset
;
3186 if (item
->value
.pass_through
.formal_id
< 0)
3187 item
->jftype
= IPA_JF_UNKNOWN
;
3193 ipa_set_jf_unknown (dst
);
3197 if (dst
->type
== IPA_JF_ANCESTOR
)
3199 struct ipa_jump_func
*src
;
3200 int dst_fid
= dst
->value
.ancestor
.formal_id
;
3201 class ipa_polymorphic_call_context
*src_ctx
3202 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
3204 /* Variable number of arguments can cause havoc if we try to access
3205 one that does not exist in the inlined edge. So make sure we
3207 if (dst_fid
>= ipa_get_cs_argument_count (top
))
3209 ipa_set_jf_unknown (dst
);
3213 src
= ipa_get_ith_jump_func (top
, dst_fid
);
3215 if (src_ctx
&& !src_ctx
->useless_p ())
3217 class ipa_polymorphic_call_context ctx
= *src_ctx
;
3219 /* TODO: Make type preserved safe WRT contexts. */
3220 if (!ipa_get_jf_ancestor_type_preserved (dst
))
3221 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
3222 ctx
.offset_by (dst
->value
.ancestor
.offset
);
3223 if (!ctx
.useless_p ())
3227 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
3229 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
3232 dst_ctx
->combine_with (ctx
);
3236 /* Parameter and argument in ancestor jump function must be pointer
3237 type, which means access to aggregate must be by-reference. */
3238 gcc_assert (!src
->agg
.items
|| src
->agg
.by_ref
);
3240 if (src
->agg
.items
&& dst
->value
.ancestor
.agg_preserved
)
3242 struct ipa_agg_jf_item
*item
;
3245 /* Currently we do not produce clobber aggregate jump functions,
3246 replace with merging when we do. */
3247 gcc_assert (!dst
->agg
.items
);
3249 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
3250 dst
->agg
.by_ref
= src
->agg
.by_ref
;
3251 FOR_EACH_VEC_SAFE_ELT (dst
->agg
.items
, j
, item
)
3252 item
->offset
-= dst
->value
.ancestor
.offset
;
3255 if (src
->type
== IPA_JF_PASS_THROUGH
3256 && src
->value
.pass_through
.operation
== NOP_EXPR
)
3258 dst
->value
.ancestor
.formal_id
= src
->value
.pass_through
.formal_id
;
3259 dst
->value
.ancestor
.agg_preserved
&=
3260 src
->value
.pass_through
.agg_preserved
;
3262 else if (src
->type
== IPA_JF_ANCESTOR
)
3264 dst
->value
.ancestor
.formal_id
= src
->value
.ancestor
.formal_id
;
3265 dst
->value
.ancestor
.offset
+= src
->value
.ancestor
.offset
;
3266 dst
->value
.ancestor
.agg_preserved
&=
3267 src
->value
.ancestor
.agg_preserved
;
3268 dst
->value
.ancestor
.keep_null
|= src
->value
.ancestor
.keep_null
;
3271 ipa_set_jf_unknown (dst
);
3273 else if (dst
->type
== IPA_JF_PASS_THROUGH
)
3275 struct ipa_jump_func
*src
;
3276 /* We must check range due to calls with variable number of arguments
3277 and we cannot combine jump functions with operations. */
3278 if (dst
->value
.pass_through
.operation
== NOP_EXPR
3279 && (top
&& dst
->value
.pass_through
.formal_id
3280 < ipa_get_cs_argument_count (top
)))
3282 int dst_fid
= dst
->value
.pass_through
.formal_id
;
3283 src
= ipa_get_ith_jump_func (top
, dst_fid
);
3284 bool dst_agg_p
= ipa_get_jf_pass_through_agg_preserved (dst
);
3285 class ipa_polymorphic_call_context
*src_ctx
3286 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
3288 if (src_ctx
&& !src_ctx
->useless_p ())
3290 class ipa_polymorphic_call_context ctx
= *src_ctx
;
3292 /* TODO: Make type preserved safe WRT contexts. */
3293 if (!ipa_get_jf_pass_through_type_preserved (dst
))
3294 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
3295 if (!ctx
.useless_p ())
3299 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
3301 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
3303 dst_ctx
->combine_with (ctx
);
3308 case IPA_JF_UNKNOWN
:
3309 ipa_set_jf_unknown (dst
);
3312 ipa_set_jf_cst_copy (dst
, src
);
3315 case IPA_JF_PASS_THROUGH
:
3317 int formal_id
= ipa_get_jf_pass_through_formal_id (src
);
3318 enum tree_code operation
;
3319 operation
= ipa_get_jf_pass_through_operation (src
);
3321 if (operation
== NOP_EXPR
)
3325 && ipa_get_jf_pass_through_agg_preserved (src
);
3326 ipa_set_jf_simple_pass_through (dst
, formal_id
, agg_p
);
3328 else if (TREE_CODE_CLASS (operation
) == tcc_unary
)
3329 ipa_set_jf_unary_pass_through (dst
, formal_id
, operation
);
3332 tree operand
= ipa_get_jf_pass_through_operand (src
);
3333 ipa_set_jf_arith_pass_through (dst
, formal_id
, operand
,
3338 case IPA_JF_ANCESTOR
:
3342 && ipa_get_jf_ancestor_agg_preserved (src
);
3343 ipa_set_ancestor_jf (dst
,
3344 ipa_get_jf_ancestor_offset (src
),
3345 ipa_get_jf_ancestor_formal_id (src
),
3347 ipa_get_jf_ancestor_keep_null (src
));
3355 && (dst_agg_p
|| !src
->agg
.by_ref
))
3357 /* Currently we do not produce clobber aggregate jump
3358 functions, replace with merging when we do. */
3359 gcc_assert (!dst
->agg
.items
);
3361 dst
->agg
.by_ref
= src
->agg
.by_ref
;
3362 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
3366 ipa_set_jf_unknown (dst
);
3371 /* If TARGET is an addr_expr of a function declaration, make it the
3372 (SPECULATIVE)destination of an indirect edge IE and return the edge.
3373 Otherwise, return NULL. */
3375 struct cgraph_edge
*
3376 ipa_make_edge_direct_to_target (struct cgraph_edge
*ie
, tree target
,
3379 struct cgraph_node
*callee
;
3380 bool unreachable
= false;
3382 if (TREE_CODE (target
) == ADDR_EXPR
)
3383 target
= TREE_OPERAND (target
, 0);
3384 if (TREE_CODE (target
) != FUNCTION_DECL
)
3386 target
= canonicalize_constructor_val (target
, NULL
);
3387 if (!target
|| TREE_CODE (target
) != FUNCTION_DECL
)
3389 /* Member pointer call that goes through a VMT lookup. */
3390 if (ie
->indirect_info
->member_ptr
3391 /* Or if target is not an invariant expression and we do not
3392 know if it will evaulate to function at runtime.
3393 This can happen when folding through &VAR, where &VAR
3394 is IP invariant, but VAR itself is not.
3396 TODO: Revisit this when GCC 5 is branched. It seems that
3397 member_ptr check is not needed and that we may try to fold
3398 the expression and see if VAR is readonly. */
3399 || !is_gimple_ip_invariant (target
))
3401 if (dump_enabled_p ())
3403 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, ie
->call_stmt
,
3404 "discovered direct call non-invariant %s\n",
3405 ie
->caller
->dump_name ());
3411 if (dump_enabled_p ())
3413 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, ie
->call_stmt
,
3414 "discovered direct call to non-function in %s, "
3415 "making it __builtin_unreachable\n",
3416 ie
->caller
->dump_name ());
3419 target
= builtin_decl_unreachable ();
3420 callee
= cgraph_node::get_create (target
);
3424 callee
= cgraph_node::get (target
);
3427 callee
= cgraph_node::get (target
);
3429 /* Because may-edges are not explicitely represented and vtable may be external,
3430 we may create the first reference to the object in the unit. */
3431 if (!callee
|| callee
->inlined_to
)
3434 /* We are better to ensure we can refer to it.
3435 In the case of static functions we are out of luck, since we already
3436 removed its body. In the case of public functions we may or may
3437 not introduce the reference. */
3438 if (!canonicalize_constructor_val (target
, NULL
)
3439 || !TREE_PUBLIC (target
))
3442 fprintf (dump_file
, "ipa-prop: Discovered call to a known target "
3443 "(%s -> %s) but cannot refer to it. Giving up.\n",
3444 ie
->caller
->dump_name (),
3445 ie
->callee
->dump_name ());
3448 callee
= cgraph_node::get_create (target
);
3451 /* If the edge is already speculated. */
3452 if (speculative
&& ie
->speculative
)
3456 cgraph_edge
*e2
= ie
->speculative_call_for_target (callee
);
3460 fprintf (dump_file
, "ipa-prop: Discovered call to a "
3461 "speculative target (%s -> %s) but the call is "
3462 "already speculated to different target. "
3464 ie
->caller
->dump_name (), callee
->dump_name ());
3470 "ipa-prop: Discovered call to a speculative target "
3471 "(%s -> %s) this agree with previous speculation.\n",
3472 ie
->caller
->dump_name (), callee
->dump_name ());
3478 if (!dbg_cnt (devirt
))
3481 ipa_check_create_node_params ();
3483 /* We cannot make edges to inline clones. It is bug that someone removed
3484 the cgraph node too early. */
3485 gcc_assert (!callee
->inlined_to
);
3487 if (dump_file
&& !unreachable
)
3489 fprintf (dump_file
, "ipa-prop: Discovered %s call to a %s target "
3490 "(%s -> %s), for stmt ",
3491 ie
->indirect_info
->polymorphic
? "a virtual" : "an indirect",
3492 speculative
? "speculative" : "known",
3493 ie
->caller
->dump_name (),
3494 callee
->dump_name ());
3496 print_gimple_stmt (dump_file
, ie
->call_stmt
, 2, TDF_SLIM
);
3498 fprintf (dump_file
, "with uid %i\n", ie
->lto_stmt_uid
);
3500 if (dump_enabled_p ())
3502 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, ie
->call_stmt
,
3503 "converting indirect call in %s to direct call to %s\n",
3504 ie
->caller
->dump_name (), callee
->dump_name ());
3508 struct cgraph_edge
*orig
= ie
;
3509 ie
= cgraph_edge::make_direct (ie
, callee
);
3510 /* If we resolved speculative edge the cost is already up to date
3511 for direct call (adjusted by inline_edge_duplication_hook). */
3514 ipa_call_summary
*es
= ipa_call_summaries
->get (ie
);
3515 es
->call_stmt_size
-= (eni_size_weights
.indirect_call_cost
3516 - eni_size_weights
.call_cost
);
3517 es
->call_stmt_time
-= (eni_time_weights
.indirect_call_cost
3518 - eni_time_weights
.call_cost
);
3523 if (!callee
->can_be_discarded_p ())
3526 alias
= dyn_cast
<cgraph_node
*> (callee
->noninterposable_alias ());
3530 /* make_speculative will update ie's cost to direct call cost. */
3531 ie
= ie
->make_speculative
3532 (callee
, ie
->count
.apply_scale (8, 10));
3538 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3539 CONSTRUCTOR and return it. Return NULL if the search fails for some
3543 find_constructor_constant_at_offset (tree constructor
, HOST_WIDE_INT req_offset
)
3545 tree type
= TREE_TYPE (constructor
);
3546 if (TREE_CODE (type
) != ARRAY_TYPE
3547 && TREE_CODE (type
) != RECORD_TYPE
)
3552 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor
), ix
, index
, val
)
3554 HOST_WIDE_INT elt_offset
;
3555 if (TREE_CODE (type
) == ARRAY_TYPE
)
3558 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (type
));
3559 gcc_assert (TREE_CODE (unit_size
) == INTEGER_CST
);
3563 if (TREE_CODE (index
) == RANGE_EXPR
)
3564 off
= wi::to_offset (TREE_OPERAND (index
, 0));
3566 off
= wi::to_offset (index
);
3567 if (TYPE_DOMAIN (type
) && TYPE_MIN_VALUE (TYPE_DOMAIN (type
)))
3569 tree low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
3570 gcc_assert (TREE_CODE (unit_size
) == INTEGER_CST
);
3571 off
= wi::sext (off
- wi::to_offset (low_bound
),
3572 TYPE_PRECISION (TREE_TYPE (index
)));
3574 off
*= wi::to_offset (unit_size
);
3575 /* ??? Handle more than just the first index of a
3579 off
= wi::to_offset (unit_size
) * ix
;
3581 off
= wi::lshift (off
, LOG2_BITS_PER_UNIT
);
3582 if (!wi::fits_shwi_p (off
) || wi::neg_p (off
))
3584 elt_offset
= off
.to_shwi ();
3586 else if (TREE_CODE (type
) == RECORD_TYPE
)
3588 gcc_checking_assert (index
&& TREE_CODE (index
) == FIELD_DECL
);
3589 if (DECL_BIT_FIELD (index
))
3591 elt_offset
= int_bit_position (index
);
3596 if (elt_offset
> req_offset
)
3599 if (TREE_CODE (val
) == CONSTRUCTOR
)
3600 return find_constructor_constant_at_offset (val
,
3601 req_offset
- elt_offset
);
3603 if (elt_offset
== req_offset
3604 && is_gimple_reg_type (TREE_TYPE (val
))
3605 && is_gimple_ip_invariant (val
))
3611 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3612 invariant from a static constructor and if so, return it. Otherwise return
3616 ipa_find_agg_cst_from_init (tree scalar
, HOST_WIDE_INT offset
, bool by_ref
)
3620 if (TREE_CODE (scalar
) != ADDR_EXPR
)
3622 scalar
= TREE_OPERAND (scalar
, 0);
3626 || !is_global_var (scalar
)
3627 || !TREE_READONLY (scalar
)
3628 || !DECL_INITIAL (scalar
)
3629 || TREE_CODE (DECL_INITIAL (scalar
)) != CONSTRUCTOR
)
3632 return find_constructor_constant_at_offset (DECL_INITIAL (scalar
), offset
);
3635 /* Retrieve value from AGG_JFUNC for the given OFFSET or return NULL if there
3636 is none. BY_REF specifies whether the value has to be passed by reference
3640 ipa_find_agg_cst_from_jfunc_items (struct ipa_agg_jump_function
*agg_jfunc
,
3641 ipa_node_params
*src_info
,
3642 cgraph_node
*src_node
,
3643 HOST_WIDE_INT offset
, bool by_ref
)
3645 if (by_ref
!= agg_jfunc
->by_ref
)
3648 for (const ipa_agg_jf_item
&item
: agg_jfunc
->items
)
3649 if (item
.offset
== offset
)
3650 return ipa_agg_value_from_jfunc (src_info
, src_node
, &item
);
3655 /* Remove a reference to SYMBOL from the list of references of a node given by
3656 reference description RDESC. Return true if the reference has been
3657 successfully found and removed. */
3660 remove_described_reference (symtab_node
*symbol
, struct ipa_cst_ref_desc
*rdesc
)
3662 struct ipa_ref
*to_del
;
3663 struct cgraph_edge
*origin
;
3668 to_del
= origin
->caller
->find_reference (symbol
, origin
->call_stmt
,
3669 origin
->lto_stmt_uid
);
3673 to_del
->remove_reference ();
3675 fprintf (dump_file
, "ipa-prop: Removed a reference from %s to %s.\n",
3676 origin
->caller
->dump_name (), symbol
->dump_name ());
3680 /* If JFUNC has a reference description with refcount different from
3681 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3682 NULL. JFUNC must be a constant jump function. */
3684 static struct ipa_cst_ref_desc
*
3685 jfunc_rdesc_usable (struct ipa_jump_func
*jfunc
)
3687 struct ipa_cst_ref_desc
*rdesc
= ipa_get_jf_constant_rdesc (jfunc
);
3688 if (rdesc
&& rdesc
->refcount
!= IPA_UNDESCRIBED_USE
)
3694 /* If the value of constant jump function JFUNC is an address of a function
3695 declaration, return the associated call graph node. Otherwise return
3698 static symtab_node
*
3699 symtab_node_for_jfunc (struct ipa_jump_func
*jfunc
)
3701 gcc_checking_assert (jfunc
->type
== IPA_JF_CONST
);
3702 tree cst
= ipa_get_jf_constant (jfunc
);
3703 if (TREE_CODE (cst
) != ADDR_EXPR
3704 || (TREE_CODE (TREE_OPERAND (cst
, 0)) != FUNCTION_DECL
3705 && TREE_CODE (TREE_OPERAND (cst
, 0)) != VAR_DECL
))
3708 return symtab_node::get (TREE_OPERAND (cst
, 0));
3712 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3713 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3714 the edge specified in the rdesc. Return false if either the symbol or the
3715 reference could not be found, otherwise return true. */
3718 try_decrement_rdesc_refcount (struct ipa_jump_func
*jfunc
)
3720 struct ipa_cst_ref_desc
*rdesc
;
3721 if (jfunc
->type
== IPA_JF_CONST
3722 && (rdesc
= jfunc_rdesc_usable (jfunc
))
3723 && --rdesc
->refcount
== 0)
3725 symtab_node
*symbol
= symtab_node_for_jfunc (jfunc
);
3729 return remove_described_reference (symbol
, rdesc
);
3734 /* Try to find a destination for indirect edge IE that corresponds to a simple
3735 call or a call of a member function pointer and where the destination is a
3736 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3737 the type of the parameter to which the result of JFUNC is passed. If it can
3738 be determined, return the newly direct edge, otherwise return NULL.
3739 NEW_ROOT and NEW_ROOT_INFO is the node and its info that JFUNC lattices are
3742 static struct cgraph_edge
*
3743 try_make_edge_direct_simple_call (struct cgraph_edge
*ie
,
3744 struct ipa_jump_func
*jfunc
, tree target_type
,
3745 struct cgraph_node
*new_root
,
3746 class ipa_node_params
*new_root_info
)
3748 struct cgraph_edge
*cs
;
3749 tree target
= NULL_TREE
;
3750 bool agg_contents
= ie
->indirect_info
->agg_contents
;
3751 tree scalar
= ipa_value_from_jfunc (new_root_info
, jfunc
, target_type
);
3755 target
= ipa_find_agg_cst_from_init (scalar
, ie
->indirect_info
->offset
,
3756 ie
->indirect_info
->by_ref
);
3757 if (!target
&& ie
->indirect_info
->guaranteed_unmodified
)
3758 target
= ipa_find_agg_cst_from_jfunc_items (&jfunc
->agg
, new_root_info
,
3760 ie
->indirect_info
->offset
,
3761 ie
->indirect_info
->by_ref
);
3767 cs
= ipa_make_edge_direct_to_target (ie
, target
);
3769 if (cs
&& !agg_contents
)
3772 gcc_checking_assert (cs
->callee
3774 || jfunc
->type
!= IPA_JF_CONST
3775 || !symtab_node_for_jfunc (jfunc
)
3776 || cs
->callee
== symtab_node_for_jfunc (jfunc
)));
3777 ok
= try_decrement_rdesc_refcount (jfunc
);
3778 gcc_checking_assert (ok
);
3784 /* Return the target to be used in cases of impossible devirtualization. IE
3785 and target (the latter can be NULL) are dumped when dumping is enabled. */
3788 ipa_impossible_devirt_target (struct cgraph_edge
*ie
, tree target
)
3794 "Type inconsistent devirtualization: %s->%s\n",
3795 ie
->caller
->dump_name (),
3796 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target
)));
3799 "No devirtualization target in %s\n",
3800 ie
->caller
->dump_name ());
3802 tree new_target
= builtin_decl_unreachable ();
3803 cgraph_node::get_create (new_target
);
3807 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3808 call based on a formal parameter which is described by jump function JFUNC
3809 and if it can be determined, make it direct and return the direct edge.
3810 Otherwise, return NULL. CTX describes the polymorphic context that the
3811 parameter the call is based on brings along with it. NEW_ROOT and
3812 NEW_ROOT_INFO is the node and its info that JFUNC lattices are relative
3815 static struct cgraph_edge
*
3816 try_make_edge_direct_virtual_call (struct cgraph_edge
*ie
,
3817 struct ipa_jump_func
*jfunc
,
3818 class ipa_polymorphic_call_context ctx
,
3819 struct cgraph_node
*new_root
,
3820 class ipa_node_params
*new_root_info
)
3823 bool speculative
= false;
3825 if (!opt_for_fn (ie
->caller
->decl
, flag_devirtualize
))
3828 gcc_assert (!ie
->indirect_info
->by_ref
);
3830 /* Try to do lookup via known virtual table pointer value. */
3831 if (!ie
->indirect_info
->vptr_changed
3832 || opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
))
3835 unsigned HOST_WIDE_INT offset
;
3837 if (jfunc
->type
== IPA_JF_CONST
)
3838 t
= ipa_find_agg_cst_from_init (ipa_get_jf_constant (jfunc
),
3839 ie
->indirect_info
->offset
, true);
3841 t
= ipa_find_agg_cst_from_jfunc_items (&jfunc
->agg
, new_root_info
,
3843 ie
->indirect_info
->offset
, true);
3844 if (t
&& vtable_pointer_value_to_vtable (t
, &vtable
, &offset
))
3847 t
= gimple_get_virt_method_for_vtable (ie
->indirect_info
->otr_token
,
3848 vtable
, offset
, &can_refer
);
3852 || fndecl_built_in_p (t
, BUILT_IN_UNREACHABLE
)
3853 || !possible_polymorphic_call_target_p
3854 (ie
, cgraph_node::get (t
)))
3856 /* Do not speculate builtin_unreachable, it is stupid! */
3857 if (!ie
->indirect_info
->vptr_changed
)
3858 target
= ipa_impossible_devirt_target (ie
, target
);
3865 speculative
= ie
->indirect_info
->vptr_changed
;
3871 ipa_polymorphic_call_context
ie_context (ie
);
3872 vec
<cgraph_node
*>targets
;
3875 ctx
.offset_by (ie
->indirect_info
->offset
);
3876 if (ie
->indirect_info
->vptr_changed
)
3877 ctx
.possible_dynamic_type_change (ie
->in_polymorphic_cdtor
,
3878 ie
->indirect_info
->otr_type
);
3879 ctx
.combine_with (ie_context
, ie
->indirect_info
->otr_type
);
3880 targets
= possible_polymorphic_call_targets
3881 (ie
->indirect_info
->otr_type
,
3882 ie
->indirect_info
->otr_token
,
3884 if (final
&& targets
.length () <= 1)
3886 speculative
= false;
3887 if (targets
.length () == 1)
3888 target
= targets
[0]->decl
;
3890 target
= ipa_impossible_devirt_target (ie
, NULL_TREE
);
3892 else if (!target
&& opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
)
3893 && !ie
->speculative
&& ie
->maybe_hot_p ())
3896 n
= try_speculative_devirtualization (ie
->indirect_info
->otr_type
,
3897 ie
->indirect_info
->otr_token
,
3898 ie
->indirect_info
->context
);
3908 if (!possible_polymorphic_call_target_p
3909 (ie
, cgraph_node::get_create (target
)))
3913 target
= ipa_impossible_devirt_target (ie
, target
);
3915 return ipa_make_edge_direct_to_target (ie
, target
, speculative
);
3921 /* Update the param called notes associated with NODE when CS is being inlined,
3922 assuming NODE is (potentially indirectly) inlined into CS->callee.
3923 Moreover, if the callee is discovered to be constant, create a new cgraph
3924 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3925 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3928 update_indirect_edges_after_inlining (struct cgraph_edge
*cs
,
3929 struct cgraph_node
*node
,
3930 vec
<cgraph_edge
*> *new_edges
)
3932 class ipa_edge_args
*top
;
3933 struct cgraph_edge
*ie
, *next_ie
, *new_direct_edge
;
3934 struct cgraph_node
*new_root
;
3935 class ipa_node_params
*new_root_info
, *inlined_node_info
;
3938 ipa_check_create_edge_args ();
3939 top
= ipa_edge_args_sum
->get (cs
);
3940 new_root
= cs
->caller
->inlined_to
3941 ? cs
->caller
->inlined_to
: cs
->caller
;
3942 new_root_info
= ipa_node_params_sum
->get (new_root
);
3943 inlined_node_info
= ipa_node_params_sum
->get (cs
->callee
->function_symbol ());
3945 for (ie
= node
->indirect_calls
; ie
; ie
= next_ie
)
3947 class cgraph_indirect_call_info
*ici
= ie
->indirect_info
;
3948 struct ipa_jump_func
*jfunc
;
3951 next_ie
= ie
->next_callee
;
3953 if (ici
->param_index
== -1)
3956 /* We must check range due to calls with variable number of arguments: */
3957 if (!top
|| ici
->param_index
>= ipa_get_cs_argument_count (top
))
3959 ici
->param_index
= -1;
3963 param_index
= ici
->param_index
;
3964 jfunc
= ipa_get_ith_jump_func (top
, param_index
);
3966 auto_vec
<cgraph_node
*, 4> spec_targets
;
3967 if (ie
->speculative
)
3968 for (cgraph_edge
*direct
= ie
->first_speculative_call_target ();
3970 direct
= direct
->next_speculative_call_target ())
3971 spec_targets
.safe_push (direct
->callee
);
3973 if (!opt_for_fn (node
->decl
, flag_indirect_inlining
))
3974 new_direct_edge
= NULL
;
3975 else if (ici
->polymorphic
)
3977 ipa_polymorphic_call_context ctx
;
3978 ctx
= ipa_context_from_jfunc (new_root_info
, cs
, param_index
, jfunc
);
3979 new_direct_edge
= try_make_edge_direct_virtual_call (ie
, jfunc
, ctx
,
3985 tree target_type
= ipa_get_type (inlined_node_info
, param_index
);
3986 new_direct_edge
= try_make_edge_direct_simple_call (ie
, jfunc
,
3992 /* If speculation was removed, then we need to do nothing. */
3993 if (new_direct_edge
&& new_direct_edge
!= ie
3994 && spec_targets
.contains (new_direct_edge
->callee
))
3996 new_direct_edge
->indirect_inlining_edge
= 1;
3998 if (!new_direct_edge
->speculative
)
4001 else if (new_direct_edge
)
4003 new_direct_edge
->indirect_inlining_edge
= 1;
4006 new_edges
->safe_push (new_direct_edge
);
4009 /* If speculative edge was introduced we still need to update
4010 call info of the indirect edge. */
4011 if (!new_direct_edge
->speculative
)
4014 if (jfunc
->type
== IPA_JF_PASS_THROUGH
4015 && ipa_get_jf_pass_through_operation (jfunc
) == NOP_EXPR
)
4017 if (ici
->agg_contents
4018 && !ipa_get_jf_pass_through_agg_preserved (jfunc
)
4019 && !ici
->polymorphic
)
4020 ici
->param_index
= -1;
4023 ici
->param_index
= ipa_get_jf_pass_through_formal_id (jfunc
);
4024 if (ici
->polymorphic
4025 && !ipa_get_jf_pass_through_type_preserved (jfunc
))
4026 ici
->vptr_changed
= true;
4027 ipa_set_param_used_by_indirect_call (new_root_info
,
4028 ici
->param_index
, true);
4029 if (ici
->polymorphic
)
4030 ipa_set_param_used_by_polymorphic_call (new_root_info
,
4031 ici
->param_index
, true);
4034 else if (jfunc
->type
== IPA_JF_ANCESTOR
)
4036 if (ici
->agg_contents
4037 && !ipa_get_jf_ancestor_agg_preserved (jfunc
)
4038 && !ici
->polymorphic
)
4039 ici
->param_index
= -1;
4042 ici
->param_index
= ipa_get_jf_ancestor_formal_id (jfunc
);
4043 ici
->offset
+= ipa_get_jf_ancestor_offset (jfunc
);
4044 if (ici
->polymorphic
4045 && !ipa_get_jf_ancestor_type_preserved (jfunc
))
4046 ici
->vptr_changed
= true;
4047 ipa_set_param_used_by_indirect_call (new_root_info
,
4048 ici
->param_index
, true);
4049 if (ici
->polymorphic
)
4050 ipa_set_param_used_by_polymorphic_call (new_root_info
,
4051 ici
->param_index
, true);
4055 /* Either we can find a destination for this edge now or never. */
4056 ici
->param_index
= -1;
4062 /* Recursively traverse subtree of NODE (including node) made of inlined
4063 cgraph_edges when CS has been inlined and invoke
4064 update_indirect_edges_after_inlining on all nodes and
4065 update_jump_functions_after_inlining on all non-inlined edges that lead out
4066 of this subtree. Newly discovered indirect edges will be added to
4067 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
4071 propagate_info_to_inlined_callees (struct cgraph_edge
*cs
,
4072 struct cgraph_node
*node
,
4073 vec
<cgraph_edge
*> *new_edges
)
4075 struct cgraph_edge
*e
;
4078 res
= update_indirect_edges_after_inlining (cs
, node
, new_edges
);
4080 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4081 if (!e
->inline_failed
)
4082 res
|= propagate_info_to_inlined_callees (cs
, e
->callee
, new_edges
);
4084 update_jump_functions_after_inlining (cs
, e
);
4085 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4086 update_jump_functions_after_inlining (cs
, e
);
4091 /* Combine two controlled uses counts as done during inlining. */
4094 combine_controlled_uses_counters (int c
, int d
)
4096 if (c
== IPA_UNDESCRIBED_USE
|| d
== IPA_UNDESCRIBED_USE
)
4097 return IPA_UNDESCRIBED_USE
;
4102 /* Propagate number of controlled users from CS->caleee to the new root of the
4103 tree of inlined nodes. */
4106 propagate_controlled_uses (struct cgraph_edge
*cs
)
4108 ipa_edge_args
*args
= ipa_edge_args_sum
->get (cs
);
4111 struct cgraph_node
*new_root
= cs
->caller
->inlined_to
4112 ? cs
->caller
->inlined_to
: cs
->caller
;
4113 ipa_node_params
*new_root_info
= ipa_node_params_sum
->get (new_root
);
4114 ipa_node_params
*old_root_info
= ipa_node_params_sum
->get (cs
->callee
);
4120 count
= MIN (ipa_get_cs_argument_count (args
),
4121 ipa_get_param_count (old_root_info
));
4122 for (i
= 0; i
< count
; i
++)
4124 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
4125 struct ipa_cst_ref_desc
*rdesc
;
4127 if (jf
->type
== IPA_JF_PASS_THROUGH
)
4130 src_idx
= ipa_get_jf_pass_through_formal_id (jf
);
4131 c
= ipa_get_controlled_uses (new_root_info
, src_idx
);
4132 d
= ipa_get_controlled_uses (old_root_info
, i
);
4134 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf
)
4135 == NOP_EXPR
|| c
== IPA_UNDESCRIBED_USE
);
4136 c
= combine_controlled_uses_counters (c
, d
);
4137 ipa_set_controlled_uses (new_root_info
, src_idx
, c
);
4139 if (c
!= IPA_UNDESCRIBED_USE
)
4141 lderef
= (ipa_get_param_load_dereferenced (new_root_info
, src_idx
)
4142 || ipa_get_param_load_dereferenced (old_root_info
, i
));
4143 ipa_set_param_load_dereferenced (new_root_info
, src_idx
, lderef
);
4146 if (c
== 0 && !lderef
&& new_root_info
->ipcp_orig_node
)
4148 struct cgraph_node
*n
;
4149 struct ipa_ref
*ref
;
4150 tree t
= new_root_info
->known_csts
[src_idx
];
4152 if (t
&& TREE_CODE (t
) == ADDR_EXPR
4153 && TREE_CODE (TREE_OPERAND (t
, 0)) == FUNCTION_DECL
4154 && (n
= cgraph_node::get (TREE_OPERAND (t
, 0)))
4155 && (ref
= new_root
->find_reference (n
, NULL
, 0)))
4158 fprintf (dump_file
, "ipa-prop: Removing cloning-created "
4159 "reference from %s to %s.\n",
4160 new_root
->dump_name (),
4162 ref
->remove_reference ();
4166 else if (jf
->type
== IPA_JF_CONST
4167 && (rdesc
= jfunc_rdesc_usable (jf
)))
4169 int d
= ipa_get_controlled_uses (old_root_info
, i
);
4170 int c
= rdesc
->refcount
;
4171 tree cst
= ipa_get_jf_constant (jf
);
4172 rdesc
->refcount
= combine_controlled_uses_counters (c
, d
);
4173 if (rdesc
->refcount
!= IPA_UNDESCRIBED_USE
4174 && ipa_get_param_load_dereferenced (old_root_info
, i
)
4175 && TREE_CODE (cst
) == ADDR_EXPR
4176 && TREE_CODE (TREE_OPERAND (cst
, 0)) == VAR_DECL
)
4178 symtab_node
*n
= symtab_node::get (TREE_OPERAND (cst
, 0));
4179 new_root
->create_reference (n
, IPA_REF_LOAD
, NULL
);
4181 fprintf (dump_file
, "ipa-prop: Address IPA constant will reach "
4182 "a load so adding LOAD reference from %s to %s.\n",
4183 new_root
->dump_name (), n
->dump_name ());
4185 if (rdesc
->refcount
== 0)
4187 gcc_checking_assert (TREE_CODE (cst
) == ADDR_EXPR
4188 && ((TREE_CODE (TREE_OPERAND (cst
, 0))
4190 || (TREE_CODE (TREE_OPERAND (cst
, 0))
4193 symtab_node
*n
= symtab_node::get (TREE_OPERAND (cst
, 0));
4196 remove_described_reference (n
, rdesc
);
4197 cgraph_node
*clone
= cs
->caller
;
4198 while (clone
->inlined_to
4199 && clone
->ipcp_clone
4200 && clone
!= rdesc
->cs
->caller
)
4202 struct ipa_ref
*ref
;
4203 ref
= clone
->find_reference (n
, NULL
, 0);
4207 fprintf (dump_file
, "ipa-prop: Removing "
4208 "cloning-created reference "
4210 clone
->dump_name (),
4212 ref
->remove_reference ();
4214 clone
= clone
->callers
->caller
;
4221 for (i
= ipa_get_param_count (old_root_info
);
4222 i
< ipa_get_cs_argument_count (args
);
4225 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
4227 if (jf
->type
== IPA_JF_CONST
)
4229 struct ipa_cst_ref_desc
*rdesc
= jfunc_rdesc_usable (jf
);
4231 rdesc
->refcount
= IPA_UNDESCRIBED_USE
;
4233 else if (jf
->type
== IPA_JF_PASS_THROUGH
)
4234 ipa_set_controlled_uses (new_root_info
,
4235 jf
->value
.pass_through
.formal_id
,
4236 IPA_UNDESCRIBED_USE
);
4240 /* Update jump functions and call note functions on inlining the call site CS.
4241 CS is expected to lead to a node already cloned by
4242 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
4243 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
4247 ipa_propagate_indirect_call_infos (struct cgraph_edge
*cs
,
4248 vec
<cgraph_edge
*> *new_edges
)
4251 /* Do nothing if the preparation phase has not been carried out yet
4252 (i.e. during early inlining). */
4253 if (!ipa_node_params_sum
)
4255 gcc_assert (ipa_edge_args_sum
);
4257 propagate_controlled_uses (cs
);
4258 changed
= propagate_info_to_inlined_callees (cs
, cs
->callee
, new_edges
);
4259 ipa_node_params_sum
->remove (cs
->callee
);
4261 ipa_edge_args
*args
= ipa_edge_args_sum
->get (cs
);
4265 if (args
->jump_functions
)
4267 struct ipa_jump_func
*jf
;
4269 FOR_EACH_VEC_ELT (*args
->jump_functions
, i
, jf
)
4270 if (jf
->type
== IPA_JF_CONST
4271 && ipa_get_jf_constant_rdesc (jf
))
4278 ipa_edge_args_sum
->remove (cs
);
4280 if (ipcp_transformation_sum
)
4281 ipcp_transformation_sum
->remove (cs
->callee
);
4286 /* Ensure that array of edge arguments infos is big enough to accommodate a
4287 structure for all edges and reallocates it if not. Also, allocate
4288 associated hash tables is they do not already exist. */
4291 ipa_check_create_edge_args (void)
4293 if (!ipa_edge_args_sum
)
4295 = (new (ggc_alloc_no_dtor
<ipa_edge_args_sum_t
> ())
4296 ipa_edge_args_sum_t (symtab
, true));
4297 if (!ipa_bits_hash_table
)
4298 ipa_bits_hash_table
= hash_table
<ipa_bit_ggc_hash_traits
>::create_ggc (37);
4299 if (!ipa_vr_hash_table
)
4300 ipa_vr_hash_table
= hash_table
<ipa_vr_ggc_hash_traits
>::create_ggc (37);
4303 /* Free all ipa_edge structures. */
4306 ipa_free_all_edge_args (void)
4308 if (!ipa_edge_args_sum
)
4311 ggc_delete (ipa_edge_args_sum
);
4312 ipa_edge_args_sum
= NULL
;
4315 /* Free all ipa_node_params structures. */
4318 ipa_free_all_node_params (void)
4320 if (ipa_node_params_sum
)
4321 ggc_delete (ipa_node_params_sum
);
4322 ipa_node_params_sum
= NULL
;
4325 /* Initialize IPA CP transformation summary and also allocate any necessary hash
4326 tables if they do not already exist. */
4329 ipcp_transformation_initialize (void)
4331 if (!ipa_bits_hash_table
)
4332 ipa_bits_hash_table
= hash_table
<ipa_bit_ggc_hash_traits
>::create_ggc (37);
4333 if (!ipa_vr_hash_table
)
4334 ipa_vr_hash_table
= hash_table
<ipa_vr_ggc_hash_traits
>::create_ggc (37);
4335 if (ipcp_transformation_sum
== NULL
)
4337 ipcp_transformation_sum
= ipcp_transformation_t::create_ggc (symtab
);
4338 ipcp_transformation_sum
->disable_insertion_hook ();
4342 /* Release the IPA CP transformation summary. */
4345 ipcp_free_transformation_sum (void)
4347 if (!ipcp_transformation_sum
)
4350 ipcp_transformation_sum
->~function_summary
<ipcp_transformation
*> ();
4351 ggc_free (ipcp_transformation_sum
);
4352 ipcp_transformation_sum
= NULL
;
4355 /* Set the aggregate replacements of NODE to be AGGVALS. */
4358 ipa_set_node_agg_value_chain (struct cgraph_node
*node
,
4359 vec
<ipa_argagg_value
, va_gc
> *aggs
)
4361 ipcp_transformation_initialize ();
4362 ipcp_transformation
*s
= ipcp_transformation_sum
->get_create (node
);
4363 s
->m_agg_values
= aggs
;
4366 /* Hook that is called by cgraph.cc when an edge is removed. Adjust reference
4367 count data structures accordingly. */
4370 ipa_edge_args_sum_t::remove (cgraph_edge
*cs
, ipa_edge_args
*args
)
4372 if (args
->jump_functions
)
4374 struct ipa_jump_func
*jf
;
4376 FOR_EACH_VEC_ELT (*args
->jump_functions
, i
, jf
)
4378 struct ipa_cst_ref_desc
*rdesc
;
4379 try_decrement_rdesc_refcount (jf
);
4380 if (jf
->type
== IPA_JF_CONST
4381 && (rdesc
= ipa_get_jf_constant_rdesc (jf
))
4388 /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
4389 reference count data strucutres accordingly. */
4392 ipa_edge_args_sum_t::duplicate (cgraph_edge
*src
, cgraph_edge
*dst
,
4393 ipa_edge_args
*old_args
, ipa_edge_args
*new_args
)
4397 new_args
->jump_functions
= vec_safe_copy (old_args
->jump_functions
);
4398 if (old_args
->polymorphic_call_contexts
)
4399 new_args
->polymorphic_call_contexts
4400 = vec_safe_copy (old_args
->polymorphic_call_contexts
);
4402 for (i
= 0; i
< vec_safe_length (old_args
->jump_functions
); i
++)
4404 struct ipa_jump_func
*src_jf
= ipa_get_ith_jump_func (old_args
, i
);
4405 struct ipa_jump_func
*dst_jf
= ipa_get_ith_jump_func (new_args
, i
);
4407 dst_jf
->agg
.items
= vec_safe_copy (dst_jf
->agg
.items
);
4409 if (src_jf
->type
== IPA_JF_CONST
)
4411 struct ipa_cst_ref_desc
*src_rdesc
= jfunc_rdesc_usable (src_jf
);
4414 dst_jf
->value
.constant
.rdesc
= NULL
;
4415 else if (src
->caller
== dst
->caller
)
4417 /* Creation of a speculative edge. If the source edge is the one
4418 grabbing a reference, we must create a new (duplicate)
4419 reference description. Otherwise they refer to the same
4420 description corresponding to a reference taken in a function
4421 src->caller is inlined to. In that case we just must
4422 increment the refcount. */
4423 if (src_rdesc
->cs
== src
)
4425 symtab_node
*n
= symtab_node_for_jfunc (src_jf
);
4426 gcc_checking_assert (n
);
4428 = src
->caller
->find_reference (n
, src
->call_stmt
,
4430 gcc_checking_assert (ref
);
4431 dst
->caller
->clone_reference (ref
, ref
->stmt
);
4433 ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
4434 dst_rdesc
->cs
= dst
;
4435 dst_rdesc
->refcount
= src_rdesc
->refcount
;
4436 dst_rdesc
->next_duplicate
= NULL
;
4437 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
4441 src_rdesc
->refcount
++;
4442 dst_jf
->value
.constant
.rdesc
= src_rdesc
;
4445 else if (src_rdesc
->cs
== src
)
4447 struct ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
4448 dst_rdesc
->cs
= dst
;
4449 dst_rdesc
->refcount
= src_rdesc
->refcount
;
4450 dst_rdesc
->next_duplicate
= src_rdesc
->next_duplicate
;
4451 src_rdesc
->next_duplicate
= dst_rdesc
;
4452 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
4456 struct ipa_cst_ref_desc
*dst_rdesc
;
4457 /* This can happen during inlining, when a JFUNC can refer to a
4458 reference taken in a function up in the tree of inline clones.
4459 We need to find the duplicate that refers to our tree of
4462 gcc_assert (dst
->caller
->inlined_to
);
4463 for (dst_rdesc
= src_rdesc
->next_duplicate
;
4465 dst_rdesc
= dst_rdesc
->next_duplicate
)
4467 struct cgraph_node
*top
;
4468 top
= dst_rdesc
->cs
->caller
->inlined_to
4469 ? dst_rdesc
->cs
->caller
->inlined_to
4470 : dst_rdesc
->cs
->caller
;
4471 if (dst
->caller
->inlined_to
== top
)
4474 gcc_assert (dst_rdesc
);
4475 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
4478 else if (dst_jf
->type
== IPA_JF_PASS_THROUGH
4479 && src
->caller
== dst
->caller
)
4481 struct cgraph_node
*inline_root
= dst
->caller
->inlined_to
4482 ? dst
->caller
->inlined_to
: dst
->caller
;
4483 ipa_node_params
*root_info
= ipa_node_params_sum
->get (inline_root
);
4484 int idx
= ipa_get_jf_pass_through_formal_id (dst_jf
);
4486 int c
= ipa_get_controlled_uses (root_info
, idx
);
4487 if (c
!= IPA_UNDESCRIBED_USE
)
4490 ipa_set_controlled_uses (root_info
, idx
, c
);
4496 /* Analyze newly added function into callgraph. */
4499 ipa_add_new_function (cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
4501 if (node
->has_gimple_body_p ())
4502 ipa_analyze_node (node
);
4505 /* Hook that is called by summary when a node is duplicated. */
4508 ipa_node_params_t::duplicate(cgraph_node
*, cgraph_node
*,
4509 ipa_node_params
*old_info
,
4510 ipa_node_params
*new_info
)
4512 new_info
->descriptors
= vec_safe_copy (old_info
->descriptors
);
4513 new_info
->lattices
= NULL
;
4514 new_info
->ipcp_orig_node
= old_info
->ipcp_orig_node
;
4515 new_info
->known_csts
= old_info
->known_csts
.copy ();
4516 new_info
->known_contexts
= old_info
->known_contexts
.copy ();
4518 new_info
->analysis_done
= old_info
->analysis_done
;
4519 new_info
->node_enqueued
= old_info
->node_enqueued
;
4520 new_info
->versionable
= old_info
->versionable
;
4523 /* Duplication of ipcp transformation summaries. */
4526 ipcp_transformation_t::duplicate(cgraph_node
*, cgraph_node
*dst
,
4527 ipcp_transformation
*src_trans
,
4528 ipcp_transformation
*dst_trans
)
4530 /* Avoid redundant work of duplicating vectors we will never use. */
4531 if (dst
->inlined_to
)
4533 dst_trans
->m_agg_values
= vec_safe_copy (src_trans
->m_agg_values
);
4534 dst_trans
->bits
= vec_safe_copy (src_trans
->bits
);
4535 dst_trans
->m_vr
= vec_safe_copy (src_trans
->m_vr
);
4538 /* Register our cgraph hooks if they are not already there. */
4541 ipa_register_cgraph_hooks (void)
4543 ipa_check_create_node_params ();
4544 ipa_check_create_edge_args ();
4546 function_insertion_hook_holder
=
4547 symtab
->add_cgraph_insertion_hook (&ipa_add_new_function
, NULL
);
4550 /* Unregister our cgraph hooks if they are not already there. */
4553 ipa_unregister_cgraph_hooks (void)
4555 if (function_insertion_hook_holder
)
4556 symtab
->remove_cgraph_insertion_hook (function_insertion_hook_holder
);
4557 function_insertion_hook_holder
= NULL
;
4560 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4561 longer needed after ipa-cp. */
4564 ipa_free_all_structures_after_ipa_cp (void)
4566 if (!optimize
&& !in_lto_p
)
4568 ipa_free_all_edge_args ();
4569 ipa_free_all_node_params ();
4570 ipcp_sources_pool
.release ();
4571 ipcp_cst_values_pool
.release ();
4572 ipcp_poly_ctx_values_pool
.release ();
4573 ipcp_agg_lattice_pool
.release ();
4574 ipa_unregister_cgraph_hooks ();
4575 ipa_refdesc_pool
.release ();
4579 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4580 longer needed after indirect inlining. */
4583 ipa_free_all_structures_after_iinln (void)
4585 ipa_free_all_edge_args ();
4586 ipa_free_all_node_params ();
4587 ipa_unregister_cgraph_hooks ();
4588 ipcp_sources_pool
.release ();
4589 ipcp_cst_values_pool
.release ();
4590 ipcp_poly_ctx_values_pool
.release ();
4591 ipcp_agg_lattice_pool
.release ();
4592 ipa_refdesc_pool
.release ();
4595 /* Print ipa_tree_map data structures of all functions in the
4599 ipa_print_node_params (FILE *f
, struct cgraph_node
*node
)
4602 class ipa_node_params
*info
;
4604 if (!node
->definition
)
4606 info
= ipa_node_params_sum
->get (node
);
4607 fprintf (f
, " function %s parameter descriptors:\n", node
->dump_name ());
4610 fprintf (f
, " no params return\n");
4613 count
= ipa_get_param_count (info
);
4614 for (i
= 0; i
< count
; i
++)
4619 ipa_dump_param (f
, info
, i
);
4620 if (ipa_is_param_used (info
, i
))
4621 fprintf (f
, " used");
4622 if (ipa_is_param_used_by_ipa_predicates (info
, i
))
4623 fprintf (f
, " used_by_ipa_predicates");
4624 if (ipa_is_param_used_by_indirect_call (info
, i
))
4625 fprintf (f
, " used_by_indirect_call");
4626 if (ipa_is_param_used_by_polymorphic_call (info
, i
))
4627 fprintf (f
, " used_by_polymorphic_call");
4628 c
= ipa_get_controlled_uses (info
, i
);
4629 if (c
== IPA_UNDESCRIBED_USE
)
4630 fprintf (f
, " undescribed_use");
4632 fprintf (f
, " controlled_uses=%i %s", c
,
4633 ipa_get_param_load_dereferenced (info
, i
)
4634 ? "(load_dereferenced)" : "");
4639 /* Print ipa_tree_map data structures of all functions in the
4643 ipa_print_all_params (FILE * f
)
4645 struct cgraph_node
*node
;
4647 fprintf (f
, "\nFunction parameters:\n");
4648 FOR_EACH_FUNCTION (node
)
4649 ipa_print_node_params (f
, node
);
4652 /* Stream out jump function JUMP_FUNC to OB. */
4655 ipa_write_jump_function (struct output_block
*ob
,
4656 struct ipa_jump_func
*jump_func
)
4658 struct ipa_agg_jf_item
*item
;
4659 struct bitpack_d bp
;
4663 /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4664 as well as WPA memory by handling them specially. */
4665 if (jump_func
->type
== IPA_JF_CONST
4666 && TREE_CODE (jump_func
->value
.constant
.value
) == ADDR_EXPR
)
4669 streamer_write_uhwi (ob
, jump_func
->type
* 2 + flag
);
4670 switch (jump_func
->type
)
4672 case IPA_JF_UNKNOWN
:
4676 EXPR_LOCATION (jump_func
->value
.constant
.value
) == UNKNOWN_LOCATION
);
4677 stream_write_tree (ob
,
4679 ? TREE_OPERAND (jump_func
->value
.constant
.value
, 0)
4680 : jump_func
->value
.constant
.value
, true);
4682 case IPA_JF_PASS_THROUGH
:
4683 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.operation
);
4684 if (jump_func
->value
.pass_through
.operation
== NOP_EXPR
)
4686 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4687 bp
= bitpack_create (ob
->main_stream
);
4688 bp_pack_value (&bp
, jump_func
->value
.pass_through
.agg_preserved
, 1);
4689 streamer_write_bitpack (&bp
);
4691 else if (TREE_CODE_CLASS (jump_func
->value
.pass_through
.operation
)
4693 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4696 stream_write_tree (ob
, jump_func
->value
.pass_through
.operand
, true);
4697 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4700 case IPA_JF_ANCESTOR
:
4701 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.offset
);
4702 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.formal_id
);
4703 bp
= bitpack_create (ob
->main_stream
);
4704 bp_pack_value (&bp
, jump_func
->value
.ancestor
.agg_preserved
, 1);
4705 bp_pack_value (&bp
, jump_func
->value
.ancestor
.keep_null
, 1);
4706 streamer_write_bitpack (&bp
);
4709 fatal_error (UNKNOWN_LOCATION
, "invalid jump function in LTO stream");
4712 count
= vec_safe_length (jump_func
->agg
.items
);
4713 streamer_write_uhwi (ob
, count
);
4716 bp
= bitpack_create (ob
->main_stream
);
4717 bp_pack_value (&bp
, jump_func
->agg
.by_ref
, 1);
4718 streamer_write_bitpack (&bp
);
4721 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, i
, item
)
4723 stream_write_tree (ob
, item
->type
, true);
4724 streamer_write_uhwi (ob
, item
->offset
);
4725 streamer_write_uhwi (ob
, item
->jftype
);
4726 switch (item
->jftype
)
4728 case IPA_JF_UNKNOWN
:
4731 stream_write_tree (ob
, item
->value
.constant
, true);
4733 case IPA_JF_PASS_THROUGH
:
4734 case IPA_JF_LOAD_AGG
:
4735 streamer_write_uhwi (ob
, item
->value
.pass_through
.operation
);
4736 streamer_write_uhwi (ob
, item
->value
.pass_through
.formal_id
);
4737 if (TREE_CODE_CLASS (item
->value
.pass_through
.operation
)
4739 stream_write_tree (ob
, item
->value
.pass_through
.operand
, true);
4740 if (item
->jftype
== IPA_JF_LOAD_AGG
)
4742 stream_write_tree (ob
, item
->value
.load_agg
.type
, true);
4743 streamer_write_uhwi (ob
, item
->value
.load_agg
.offset
);
4744 bp
= bitpack_create (ob
->main_stream
);
4745 bp_pack_value (&bp
, item
->value
.load_agg
.by_ref
, 1);
4746 streamer_write_bitpack (&bp
);
4750 fatal_error (UNKNOWN_LOCATION
,
4751 "invalid jump function in LTO stream");
4755 bp
= bitpack_create (ob
->main_stream
);
4756 bp_pack_value (&bp
, !!jump_func
->bits
, 1);
4757 streamer_write_bitpack (&bp
);
4758 if (jump_func
->bits
)
4760 streamer_write_widest_int (ob
, jump_func
->bits
->value
);
4761 streamer_write_widest_int (ob
, jump_func
->bits
->mask
);
4763 bp_pack_value (&bp
, !!jump_func
->m_vr
, 1);
4764 streamer_write_bitpack (&bp
);
4765 if (jump_func
->m_vr
)
4767 streamer_write_enum (ob
->main_stream
, value_rang_type
,
4768 VR_LAST
, jump_func
->m_vr
->kind ());
4769 stream_write_tree (ob
, jump_func
->m_vr
->min (), true);
4770 stream_write_tree (ob
, jump_func
->m_vr
->max (), true);
4774 /* Read in jump function JUMP_FUNC from IB. */
4777 ipa_read_jump_function (class lto_input_block
*ib
,
4778 struct ipa_jump_func
*jump_func
,
4779 struct cgraph_edge
*cs
,
4780 class data_in
*data_in
,
4783 enum jump_func_type jftype
;
4784 enum tree_code operation
;
4786 int val
= streamer_read_uhwi (ib
);
4787 bool flag
= val
& 1;
4789 jftype
= (enum jump_func_type
) (val
/ 2);
4792 case IPA_JF_UNKNOWN
:
4793 ipa_set_jf_unknown (jump_func
);
4797 tree t
= stream_read_tree (ib
, data_in
);
4798 if (flag
&& prevails
)
4799 t
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (t
)), t
);
4800 ipa_set_jf_constant (jump_func
, t
, cs
);
4803 case IPA_JF_PASS_THROUGH
:
4804 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
4805 if (operation
== NOP_EXPR
)
4807 int formal_id
= streamer_read_uhwi (ib
);
4808 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4809 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4810 ipa_set_jf_simple_pass_through (jump_func
, formal_id
, agg_preserved
);
4812 else if (TREE_CODE_CLASS (operation
) == tcc_unary
)
4814 int formal_id
= streamer_read_uhwi (ib
);
4815 ipa_set_jf_unary_pass_through (jump_func
, formal_id
, operation
);
4819 tree operand
= stream_read_tree (ib
, data_in
);
4820 int formal_id
= streamer_read_uhwi (ib
);
4821 ipa_set_jf_arith_pass_through (jump_func
, formal_id
, operand
,
4825 case IPA_JF_ANCESTOR
:
4827 HOST_WIDE_INT offset
= streamer_read_uhwi (ib
);
4828 int formal_id
= streamer_read_uhwi (ib
);
4829 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4830 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4831 bool keep_null
= bp_unpack_value (&bp
, 1);
4832 ipa_set_ancestor_jf (jump_func
, offset
, formal_id
, agg_preserved
,
4837 fatal_error (UNKNOWN_LOCATION
, "invalid jump function in LTO stream");
4840 count
= streamer_read_uhwi (ib
);
4843 jump_func
->agg
.items
= NULL
;
4844 vec_safe_reserve (jump_func
->agg
.items
, count
, true);
4848 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4849 jump_func
->agg
.by_ref
= bp_unpack_value (&bp
, 1);
4851 for (i
= 0; i
< count
; i
++)
4853 struct ipa_agg_jf_item item
;
4854 item
.type
= stream_read_tree (ib
, data_in
);
4855 item
.offset
= streamer_read_uhwi (ib
);
4856 item
.jftype
= (enum jump_func_type
) streamer_read_uhwi (ib
);
4858 switch (item
.jftype
)
4860 case IPA_JF_UNKNOWN
:
4863 item
.value
.constant
= stream_read_tree (ib
, data_in
);
4865 case IPA_JF_PASS_THROUGH
:
4866 case IPA_JF_LOAD_AGG
:
4867 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
4868 item
.value
.pass_through
.operation
= operation
;
4869 item
.value
.pass_through
.formal_id
= streamer_read_uhwi (ib
);
4870 if (TREE_CODE_CLASS (operation
) == tcc_unary
)
4871 item
.value
.pass_through
.operand
= NULL_TREE
;
4873 item
.value
.pass_through
.operand
= stream_read_tree (ib
, data_in
);
4874 if (item
.jftype
== IPA_JF_LOAD_AGG
)
4876 struct bitpack_d bp
;
4877 item
.value
.load_agg
.type
= stream_read_tree (ib
, data_in
);
4878 item
.value
.load_agg
.offset
= streamer_read_uhwi (ib
);
4879 bp
= streamer_read_bitpack (ib
);
4880 item
.value
.load_agg
.by_ref
= bp_unpack_value (&bp
, 1);
4884 fatal_error (UNKNOWN_LOCATION
,
4885 "invalid jump function in LTO stream");
4888 jump_func
->agg
.items
->quick_push (item
);
4891 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4892 bool bits_known
= bp_unpack_value (&bp
, 1);
4895 widest_int value
= streamer_read_widest_int (ib
);
4896 widest_int mask
= streamer_read_widest_int (ib
);
4898 ipa_set_jfunc_bits (jump_func
, value
, mask
);
4901 jump_func
->bits
= NULL
;
4903 struct bitpack_d vr_bp
= streamer_read_bitpack (ib
);
4904 bool vr_known
= bp_unpack_value (&vr_bp
, 1);
4907 enum value_range_kind type
= streamer_read_enum (ib
, value_range_kind
,
4909 tree min
= stream_read_tree (ib
, data_in
);
4910 tree max
= stream_read_tree (ib
, data_in
);
4912 ipa_set_jfunc_vr (jump_func
, type
, min
, max
);
4915 jump_func
->m_vr
= NULL
;
4918 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4919 relevant to indirect inlining to OB. */
4922 ipa_write_indirect_edge_info (struct output_block
*ob
,
4923 struct cgraph_edge
*cs
)
4925 class cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4926 struct bitpack_d bp
;
4928 streamer_write_hwi (ob
, ii
->param_index
);
4929 bp
= bitpack_create (ob
->main_stream
);
4930 bp_pack_value (&bp
, ii
->polymorphic
, 1);
4931 bp_pack_value (&bp
, ii
->agg_contents
, 1);
4932 bp_pack_value (&bp
, ii
->member_ptr
, 1);
4933 bp_pack_value (&bp
, ii
->by_ref
, 1);
4934 bp_pack_value (&bp
, ii
->guaranteed_unmodified
, 1);
4935 bp_pack_value (&bp
, ii
->vptr_changed
, 1);
4936 streamer_write_bitpack (&bp
);
4937 if (ii
->agg_contents
|| ii
->polymorphic
)
4938 streamer_write_hwi (ob
, ii
->offset
);
4940 gcc_assert (ii
->offset
== 0);
4942 if (ii
->polymorphic
)
4944 streamer_write_hwi (ob
, ii
->otr_token
);
4945 stream_write_tree (ob
, ii
->otr_type
, true);
4946 ii
->context
.stream_out (ob
);
4950 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4951 relevant to indirect inlining from IB. */
4954 ipa_read_indirect_edge_info (class lto_input_block
*ib
,
4955 class data_in
*data_in
,
4956 struct cgraph_edge
*cs
,
4957 class ipa_node_params
*info
)
4959 class cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4960 struct bitpack_d bp
;
4962 ii
->param_index
= (int) streamer_read_hwi (ib
);
4963 bp
= streamer_read_bitpack (ib
);
4964 ii
->polymorphic
= bp_unpack_value (&bp
, 1);
4965 ii
->agg_contents
= bp_unpack_value (&bp
, 1);
4966 ii
->member_ptr
= bp_unpack_value (&bp
, 1);
4967 ii
->by_ref
= bp_unpack_value (&bp
, 1);
4968 ii
->guaranteed_unmodified
= bp_unpack_value (&bp
, 1);
4969 ii
->vptr_changed
= bp_unpack_value (&bp
, 1);
4970 if (ii
->agg_contents
|| ii
->polymorphic
)
4971 ii
->offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4974 if (ii
->polymorphic
)
4976 ii
->otr_token
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4977 ii
->otr_type
= stream_read_tree (ib
, data_in
);
4978 ii
->context
.stream_in (ib
, data_in
);
4980 if (info
&& ii
->param_index
>= 0)
4982 if (ii
->polymorphic
)
4983 ipa_set_param_used_by_polymorphic_call (info
,
4984 ii
->param_index
, true);
4985 ipa_set_param_used_by_indirect_call (info
,
4986 ii
->param_index
, true);
4990 /* Stream out NODE info to OB. */
4993 ipa_write_node_info (struct output_block
*ob
, struct cgraph_node
*node
)
4996 lto_symtab_encoder_t encoder
;
4997 ipa_node_params
*info
= ipa_node_params_sum
->get (node
);
4999 struct cgraph_edge
*e
;
5000 struct bitpack_d bp
;
5002 encoder
= ob
->decl_state
->symtab_node_encoder
;
5003 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
5004 streamer_write_uhwi (ob
, node_ref
);
5006 streamer_write_uhwi (ob
, ipa_get_param_count (info
));
5007 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
5008 streamer_write_uhwi (ob
, ipa_get_param_move_cost (info
, j
));
5009 bp
= bitpack_create (ob
->main_stream
);
5010 gcc_assert (info
->analysis_done
5011 || ipa_get_param_count (info
) == 0);
5012 gcc_assert (!info
->node_enqueued
);
5013 gcc_assert (!info
->ipcp_orig_node
);
5014 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
5016 /* TODO: We could just not stream the bit in the undescribed case. */
5017 bool d
= (ipa_get_controlled_uses (info
, j
) != IPA_UNDESCRIBED_USE
)
5018 ? ipa_get_param_load_dereferenced (info
, j
) : true;
5019 bp_pack_value (&bp
, d
, 1);
5020 bp_pack_value (&bp
, ipa_is_param_used (info
, j
), 1);
5022 streamer_write_bitpack (&bp
);
5023 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
5025 streamer_write_hwi (ob
, ipa_get_controlled_uses (info
, j
));
5026 stream_write_tree (ob
, ipa_get_type (info
, j
), true);
5028 for (e
= node
->callees
; e
; e
= e
->next_callee
)
5030 ipa_edge_args
*args
= ipa_edge_args_sum
->get (e
);
5034 streamer_write_uhwi (ob
, 0);
5038 streamer_write_uhwi (ob
,
5039 ipa_get_cs_argument_count (args
) * 2
5040 + (args
->polymorphic_call_contexts
!= NULL
));
5041 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
5043 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
5044 if (args
->polymorphic_call_contexts
!= NULL
)
5045 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
5048 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
5050 ipa_edge_args
*args
= ipa_edge_args_sum
->get (e
);
5052 streamer_write_uhwi (ob
, 0);
5055 streamer_write_uhwi (ob
,
5056 ipa_get_cs_argument_count (args
) * 2
5057 + (args
->polymorphic_call_contexts
!= NULL
));
5058 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
5060 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
5061 if (args
->polymorphic_call_contexts
!= NULL
)
5062 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
5065 ipa_write_indirect_edge_info (ob
, e
);
5069 /* Stream in edge E from IB. */
5072 ipa_read_edge_info (class lto_input_block
*ib
,
5073 class data_in
*data_in
,
5074 struct cgraph_edge
*e
, bool prevails
)
5076 int count
= streamer_read_uhwi (ib
);
5077 bool contexts_computed
= count
& 1;
5083 && (e
->possibly_call_in_translation_unit_p ()
5084 /* Also stream in jump functions to builtins in hope that they
5085 will get fnspecs. */
5086 || fndecl_built_in_p (e
->callee
->decl
, BUILT_IN_NORMAL
)))
5088 ipa_edge_args
*args
= ipa_edge_args_sum
->get_create (e
);
5089 vec_safe_grow_cleared (args
->jump_functions
, count
, true);
5090 if (contexts_computed
)
5091 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
, true);
5092 for (int k
= 0; k
< count
; k
++)
5094 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
5096 if (contexts_computed
)
5097 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in
5103 for (int k
= 0; k
< count
; k
++)
5105 struct ipa_jump_func dummy
;
5106 ipa_read_jump_function (ib
, &dummy
, e
,
5108 if (contexts_computed
)
5110 class ipa_polymorphic_call_context ctx
;
5111 ctx
.stream_in (ib
, data_in
);
5117 /* Stream in NODE info from IB. */
5120 ipa_read_node_info (class lto_input_block
*ib
, struct cgraph_node
*node
,
5121 class data_in
*data_in
)
5124 struct cgraph_edge
*e
;
5125 struct bitpack_d bp
;
5126 bool prevails
= node
->prevailing_p ();
5127 ipa_node_params
*info
5128 = prevails
? ipa_node_params_sum
->get_create (node
) : NULL
;
5130 int param_count
= streamer_read_uhwi (ib
);
5133 ipa_alloc_node_params (node
, param_count
);
5134 for (k
= 0; k
< param_count
; k
++)
5135 (*info
->descriptors
)[k
].move_cost
= streamer_read_uhwi (ib
);
5136 if (ipa_get_param_count (info
) != 0)
5137 info
->analysis_done
= true;
5138 info
->node_enqueued
= false;
5141 for (k
= 0; k
< param_count
; k
++)
5142 streamer_read_uhwi (ib
);
5144 bp
= streamer_read_bitpack (ib
);
5145 for (k
= 0; k
< param_count
; k
++)
5147 bool load_dereferenced
= bp_unpack_value (&bp
, 1);
5148 bool used
= bp_unpack_value (&bp
, 1);
5152 ipa_set_param_load_dereferenced (info
, k
, load_dereferenced
);
5153 ipa_set_param_used (info
, k
, used
);
5156 for (k
= 0; k
< param_count
; k
++)
5158 int nuses
= streamer_read_hwi (ib
);
5159 tree type
= stream_read_tree (ib
, data_in
);
5163 ipa_set_controlled_uses (info
, k
, nuses
);
5164 (*info
->descriptors
)[k
].decl_or_type
= type
;
5167 for (e
= node
->callees
; e
; e
= e
->next_callee
)
5168 ipa_read_edge_info (ib
, data_in
, e
, prevails
);
5169 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
5171 ipa_read_edge_info (ib
, data_in
, e
, prevails
);
5172 ipa_read_indirect_edge_info (ib
, data_in
, e
, info
);
5176 /* Write jump functions for nodes in SET. */
5179 ipa_prop_write_jump_functions (void)
5181 struct output_block
*ob
;
5182 unsigned int count
= 0;
5183 lto_symtab_encoder_iterator lsei
;
5184 lto_symtab_encoder_t encoder
;
5186 if (!ipa_node_params_sum
|| !ipa_edge_args_sum
)
5189 ob
= create_output_block (LTO_section_jump_functions
);
5190 encoder
= ob
->decl_state
->symtab_node_encoder
;
5192 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5193 lsei_next_function_in_partition (&lsei
))
5195 cgraph_node
*node
= lsei_cgraph_node (lsei
);
5196 if (node
->has_gimple_body_p ()
5197 && ipa_node_params_sum
->get (node
) != NULL
)
5201 streamer_write_uhwi (ob
, count
);
5203 /* Process all of the functions. */
5204 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5205 lsei_next_function_in_partition (&lsei
))
5207 cgraph_node
*node
= lsei_cgraph_node (lsei
);
5208 if (node
->has_gimple_body_p ()
5209 && ipa_node_params_sum
->get (node
) != NULL
)
5210 ipa_write_node_info (ob
, node
);
5212 streamer_write_char_stream (ob
->main_stream
, 0);
5213 produce_asm (ob
, NULL
);
5214 destroy_output_block (ob
);
5217 /* Read section in file FILE_DATA of length LEN with data DATA. */
5220 ipa_prop_read_section (struct lto_file_decl_data
*file_data
, const char *data
,
5223 const struct lto_function_header
*header
=
5224 (const struct lto_function_header
*) data
;
5225 const int cfg_offset
= sizeof (struct lto_function_header
);
5226 const int main_offset
= cfg_offset
+ header
->cfg_size
;
5227 const int string_offset
= main_offset
+ header
->main_size
;
5228 class data_in
*data_in
;
5232 lto_input_block
ib_main ((const char *) data
+ main_offset
,
5233 header
->main_size
, file_data
->mode_table
);
5236 lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
5237 header
->string_size
, vNULL
);
5238 count
= streamer_read_uhwi (&ib_main
);
5240 for (i
= 0; i
< count
; i
++)
5243 struct cgraph_node
*node
;
5244 lto_symtab_encoder_t encoder
;
5246 index
= streamer_read_uhwi (&ib_main
);
5247 encoder
= file_data
->symtab_node_encoder
;
5248 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
5250 gcc_assert (node
->definition
);
5251 ipa_read_node_info (&ib_main
, node
, data_in
);
5253 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
5255 lto_data_in_delete (data_in
);
5258 /* Read ipcp jump functions. */
5261 ipa_prop_read_jump_functions (void)
5263 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
5264 struct lto_file_decl_data
*file_data
;
5267 ipa_check_create_node_params ();
5268 ipa_check_create_edge_args ();
5269 ipa_register_cgraph_hooks ();
5271 while ((file_data
= file_data_vec
[j
++]))
5275 = lto_get_summary_section_data (file_data
, LTO_section_jump_functions
,
5278 ipa_prop_read_section (file_data
, data
, len
);
5283 write_ipcp_transformation_info (output_block
*ob
, cgraph_node
*node
)
5286 unsigned int count
= 0;
5287 lto_symtab_encoder_t encoder
;
5289 encoder
= ob
->decl_state
->symtab_node_encoder
;
5290 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
5291 streamer_write_uhwi (ob
, node_ref
);
5293 ipcp_transformation
*ts
= ipcp_get_transformation_summary (node
);
5294 if (ts
&& !vec_safe_is_empty (ts
->m_agg_values
))
5296 streamer_write_uhwi (ob
, ts
->m_agg_values
->length ());
5297 for (const ipa_argagg_value
&av
: ts
->m_agg_values
)
5299 struct bitpack_d bp
;
5301 stream_write_tree (ob
, av
.value
, true);
5302 streamer_write_uhwi (ob
, av
.unit_offset
);
5303 streamer_write_uhwi (ob
, av
.index
);
5305 bp
= bitpack_create (ob
->main_stream
);
5306 bp_pack_value (&bp
, av
.by_ref
, 1);
5307 streamer_write_bitpack (&bp
);
5311 streamer_write_uhwi (ob
, 0);
5313 if (ts
&& vec_safe_length (ts
->m_vr
) > 0)
5315 count
= ts
->m_vr
->length ();
5316 streamer_write_uhwi (ob
, count
);
5317 for (unsigned i
= 0; i
< count
; ++i
)
5319 struct bitpack_d bp
;
5320 ipa_vr
*parm_vr
= &(*ts
->m_vr
)[i
];
5321 bp
= bitpack_create (ob
->main_stream
);
5322 bp_pack_value (&bp
, parm_vr
->known
, 1);
5323 streamer_write_bitpack (&bp
);
5326 streamer_write_enum (ob
->main_stream
, value_rang_type
,
5327 VR_LAST
, parm_vr
->type
);
5328 streamer_write_wide_int (ob
, parm_vr
->min
);
5329 streamer_write_wide_int (ob
, parm_vr
->max
);
5334 streamer_write_uhwi (ob
, 0);
5336 if (ts
&& vec_safe_length (ts
->bits
) > 0)
5338 count
= ts
->bits
->length ();
5339 streamer_write_uhwi (ob
, count
);
5341 for (unsigned i
= 0; i
< count
; ++i
)
5343 const ipa_bits
*bits_jfunc
= (*ts
->bits
)[i
];
5344 struct bitpack_d bp
= bitpack_create (ob
->main_stream
);
5345 bp_pack_value (&bp
, !!bits_jfunc
, 1);
5346 streamer_write_bitpack (&bp
);
5349 streamer_write_widest_int (ob
, bits_jfunc
->value
);
5350 streamer_write_widest_int (ob
, bits_jfunc
->mask
);
5355 streamer_write_uhwi (ob
, 0);
5358 /* Stream in the aggregate value replacement chain for NODE from IB. */
5361 read_ipcp_transformation_info (lto_input_block
*ib
, cgraph_node
*node
,
5364 unsigned int count
, i
;
5366 count
= streamer_read_uhwi (ib
);
5369 ipcp_transformation_initialize ();
5370 ipcp_transformation
*ts
= ipcp_transformation_sum
->get_create (node
);
5371 vec_safe_grow_cleared (ts
->m_agg_values
, count
, true);
5372 for (i
= 0; i
<count
; i
++)
5374 ipa_argagg_value
*av
= &(*ts
->m_agg_values
)[i
];;
5376 av
->value
= stream_read_tree (ib
, data_in
);
5377 av
->unit_offset
= streamer_read_uhwi (ib
);
5378 av
->index
= streamer_read_uhwi (ib
);
5380 bitpack_d bp
= streamer_read_bitpack (ib
);
5381 av
->by_ref
= bp_unpack_value (&bp
, 1);
5385 count
= streamer_read_uhwi (ib
);
5388 ipcp_transformation_initialize ();
5389 ipcp_transformation
*ts
= ipcp_transformation_sum
->get_create (node
);
5390 vec_safe_grow_cleared (ts
->m_vr
, count
, true);
5391 for (i
= 0; i
< count
; i
++)
5394 parm_vr
= &(*ts
->m_vr
)[i
];
5395 struct bitpack_d bp
;
5396 bp
= streamer_read_bitpack (ib
);
5397 parm_vr
->known
= bp_unpack_value (&bp
, 1);
5400 parm_vr
->type
= streamer_read_enum (ib
, value_range_kind
,
5402 parm_vr
->min
= streamer_read_wide_int (ib
);
5403 parm_vr
->max
= streamer_read_wide_int (ib
);
5407 count
= streamer_read_uhwi (ib
);
5410 ipcp_transformation_initialize ();
5411 ipcp_transformation
*ts
= ipcp_transformation_sum
->get_create (node
);
5412 vec_safe_grow_cleared (ts
->bits
, count
, true);
5414 for (i
= 0; i
< count
; i
++)
5416 struct bitpack_d bp
= streamer_read_bitpack (ib
);
5417 bool known
= bp_unpack_value (&bp
, 1);
5420 const widest_int value
= streamer_read_widest_int (ib
);
5421 const widest_int mask
= streamer_read_widest_int (ib
);
5423 = ipa_get_ipa_bits_for_value (value
, mask
);
5424 (*ts
->bits
)[i
] = bits
;
5430 /* Write all aggregate replacement for nodes in set. */
5433 ipcp_write_transformation_summaries (void)
5435 struct cgraph_node
*node
;
5436 struct output_block
*ob
;
5437 unsigned int count
= 0;
5438 lto_symtab_encoder_iterator lsei
;
5439 lto_symtab_encoder_t encoder
;
5441 ob
= create_output_block (LTO_section_ipcp_transform
);
5442 encoder
= ob
->decl_state
->symtab_node_encoder
;
5444 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5445 lsei_next_function_in_partition (&lsei
))
5447 node
= lsei_cgraph_node (lsei
);
5448 if (node
->has_gimple_body_p ())
5452 streamer_write_uhwi (ob
, count
);
5454 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5455 lsei_next_function_in_partition (&lsei
))
5457 node
= lsei_cgraph_node (lsei
);
5458 if (node
->has_gimple_body_p ())
5459 write_ipcp_transformation_info (ob
, node
);
5461 streamer_write_char_stream (ob
->main_stream
, 0);
5462 produce_asm (ob
, NULL
);
5463 destroy_output_block (ob
);
5466 /* Read replacements section in file FILE_DATA of length LEN with data
5470 read_replacements_section (struct lto_file_decl_data
*file_data
,
5474 const struct lto_function_header
*header
=
5475 (const struct lto_function_header
*) data
;
5476 const int cfg_offset
= sizeof (struct lto_function_header
);
5477 const int main_offset
= cfg_offset
+ header
->cfg_size
;
5478 const int string_offset
= main_offset
+ header
->main_size
;
5479 class data_in
*data_in
;
5483 lto_input_block
ib_main ((const char *) data
+ main_offset
,
5484 header
->main_size
, file_data
->mode_table
);
5486 data_in
= lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
5487 header
->string_size
, vNULL
);
5488 count
= streamer_read_uhwi (&ib_main
);
5490 for (i
= 0; i
< count
; i
++)
5493 struct cgraph_node
*node
;
5494 lto_symtab_encoder_t encoder
;
5496 index
= streamer_read_uhwi (&ib_main
);
5497 encoder
= file_data
->symtab_node_encoder
;
5498 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
5500 gcc_assert (node
->definition
);
5501 read_ipcp_transformation_info (&ib_main
, node
, data_in
);
5503 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
5505 lto_data_in_delete (data_in
);
5508 /* Read IPA-CP aggregate replacements. */
5511 ipcp_read_transformation_summaries (void)
5513 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
5514 struct lto_file_decl_data
*file_data
;
5517 while ((file_data
= file_data_vec
[j
++]))
5521 = lto_get_summary_section_data (file_data
, LTO_section_ipcp_transform
,
5524 read_replacements_section (file_data
, data
, len
);
5528 /* Adjust the aggregate replacements in TS to reflect parameters skipped in
5529 NODE but also if any parameter was IPA-SRAed into a scalar go ahead with
5530 substitution of the default_definitions of that new param with the
5531 appropriate constant.
5533 If after adjustments there are no aggregate replacements left, the
5534 m_agg_values will be set to NULL. In other cases, it may be shrunk.
5536 Return true if any values were already substituted for scalarized parameters
5537 and update_cfg shuld be run after replace_uses_by. */
5540 adjust_agg_replacement_values (cgraph_node
*node
,
5541 ipcp_transformation
*ts
,
5542 const vec
<ipa_param_descriptor
, va_gc
>
5545 clone_info
*cinfo
= clone_info::get (node
);
5546 if (!cinfo
|| !cinfo
->param_adjustments
)
5549 bool removed_item
= false;
5550 bool done_replacement
= false;
5551 unsigned dst_index
= 0;
5552 unsigned count
= ts
->m_agg_values
->length ();
5553 for (unsigned i
= 0; i
< count
; i
++)
5555 ipa_argagg_value
*v
= &(*ts
->m_agg_values
)[i
];
5556 gcc_checking_assert (v
->index
>= 0);
5558 tree cst_type
= TREE_TYPE (v
->value
);
5561 = cinfo
->param_adjustments
->get_updated_index_or_split (v
->index
,
5569 (*ts
->m_agg_values
)[dst_index
] = *v
;
5574 removed_item
= true;
5577 tree parm
= ipa_get_param (descriptors
, split_idx
);
5578 tree ddef
= ssa_default_def (cfun
, parm
);
5581 replace_uses_by (ddef
, v
->value
);
5582 done_replacement
= true;
5590 ggc_free (ts
->m_agg_values
);
5591 ts
->m_agg_values
= NULL
;
5593 else if (removed_item
)
5594 ts
->m_agg_values
->truncate (dst_index
);
5596 return done_replacement
;
5599 /* Dominator walker driving the ipcp modification phase. */
5601 class ipcp_modif_dom_walker
: public dom_walker
5604 ipcp_modif_dom_walker (struct ipa_func_body_info
*fbi
,
5605 vec
<ipa_param_descriptor
, va_gc
> *descs
,
5606 ipcp_transformation
*ts
, bool *sc
)
5607 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
), m_descriptors (descs
),
5608 m_ts (ts
), m_something_changed (sc
) {}
5610 edge
before_dom_children (basic_block
) final override
;
5612 { return gimple_purge_all_dead_eh_edges (m_need_eh_cleanup
); }
5615 struct ipa_func_body_info
*m_fbi
;
5616 vec
<ipa_param_descriptor
, va_gc
> *m_descriptors
;
5617 ipcp_transformation
*m_ts
;
5618 bool *m_something_changed
;
5619 auto_bitmap m_need_eh_cleanup
;
5623 ipcp_modif_dom_walker::before_dom_children (basic_block bb
)
5625 gimple_stmt_iterator gsi
;
5626 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5628 gimple
*stmt
= gsi_stmt (gsi
);
5630 HOST_WIDE_INT bit_offset
;
5635 if (!gimple_assign_load_p (stmt
))
5637 rhs
= gimple_assign_rhs1 (stmt
);
5638 if (!is_gimple_reg_type (TREE_TYPE (rhs
)))
5643 while (handled_component_p (t
))
5645 /* V_C_E can do things like convert an array of integers to one
5646 bigger integer and similar things we do not handle below. */
5647 if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
5652 t
= TREE_OPERAND (t
, 0);
5657 if (!ipa_load_from_parm_agg (m_fbi
, m_descriptors
, stmt
, rhs
, &index
,
5658 &bit_offset
, &size
, &by_ref
))
5660 unsigned unit_offset
= bit_offset
/ BITS_PER_UNIT
;
5661 ipa_argagg_value_list
avl (m_ts
);
5662 tree v
= avl
.get_value (index
, unit_offset
, by_ref
);
5665 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v
))), size
))
5668 gcc_checking_assert (is_gimple_ip_invariant (v
));
5669 if (!useless_type_conversion_p (TREE_TYPE (rhs
), TREE_TYPE (v
)))
5671 if (fold_convertible_p (TREE_TYPE (rhs
), v
))
5672 val
= fold_build1 (NOP_EXPR
, TREE_TYPE (rhs
), v
);
5673 else if (TYPE_SIZE (TREE_TYPE (rhs
))
5674 == TYPE_SIZE (TREE_TYPE (v
)))
5675 val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (rhs
), v
);
5680 fprintf (dump_file
, " const ");
5681 print_generic_expr (dump_file
, v
);
5682 fprintf (dump_file
, " can't be converted to type of ");
5683 print_generic_expr (dump_file
, rhs
);
5684 fprintf (dump_file
, "\n");
5692 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5694 fprintf (dump_file
, "Modifying stmt:\n ");
5695 print_gimple_stmt (dump_file
, stmt
, 0);
5697 gimple_assign_set_rhs_from_tree (&gsi
, val
);
5700 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5702 fprintf (dump_file
, "into:\n ");
5703 print_gimple_stmt (dump_file
, stmt
, 0);
5704 fprintf (dump_file
, "\n");
5707 *m_something_changed
= true;
5708 if (maybe_clean_eh_stmt (stmt
))
5709 bitmap_set_bit (m_need_eh_cleanup
, bb
->index
);
5714 /* Return true if we have recorded VALUE and MASK about PARM.
5715 Set VALUE and MASk accordingly. */
5718 ipcp_get_parm_bits (tree parm
, tree
*value
, widest_int
*mask
)
5720 cgraph_node
*cnode
= cgraph_node::get (current_function_decl
);
5721 ipcp_transformation
*ts
= ipcp_get_transformation_summary (cnode
);
5722 if (!ts
|| vec_safe_length (ts
->bits
) == 0)
5726 for (tree p
= DECL_ARGUMENTS (current_function_decl
);
5727 p
!= parm
; p
= DECL_CHAIN (p
))
5730 /* Ignore static chain. */
5735 clone_info
*cinfo
= clone_info::get (cnode
);
5736 if (cinfo
&& cinfo
->param_adjustments
)
5738 i
= cinfo
->param_adjustments
->get_original_index (i
);
5743 vec
<ipa_bits
*, va_gc
> &bits
= *ts
->bits
;
5746 *mask
= bits
[i
]->mask
;
5747 *value
= wide_int_to_tree (TREE_TYPE (parm
), bits
[i
]->value
);
5752 /* Update bits info of formal parameters as described in
5753 ipcp_transformation. */
5756 ipcp_update_bits (struct cgraph_node
*node
)
5758 ipcp_transformation
*ts
= ipcp_get_transformation_summary (node
);
5760 if (!ts
|| vec_safe_length (ts
->bits
) == 0)
5762 vec
<ipa_bits
*, va_gc
> &bits
= *ts
->bits
;
5763 unsigned count
= bits
.length ();
5767 auto_vec
<int, 16> new_indices
;
5768 bool need_remapping
= false;
5769 clone_info
*cinfo
= clone_info::get (node
);
5770 if (cinfo
&& cinfo
->param_adjustments
)
5772 cinfo
->param_adjustments
->get_updated_indices (&new_indices
);
5773 need_remapping
= true;
5775 auto_vec
<tree
, 16> parm_decls
;
5776 push_function_arg_decls (&parm_decls
, node
->decl
);
5778 for (unsigned i
= 0; i
< count
; ++i
)
5783 if (i
>= new_indices
.length ())
5785 int idx
= new_indices
[i
];
5788 parm
= parm_decls
[idx
];
5791 parm
= parm_decls
[i
];
5792 gcc_checking_assert (parm
);
5796 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm
))
5797 || POINTER_TYPE_P (TREE_TYPE (parm
)))
5798 || !is_gimple_reg (parm
))
5801 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
), parm
);
5807 fprintf (dump_file
, "Adjusting mask for param %u to ", i
);
5808 print_hex (bits
[i
]->mask
, dump_file
);
5809 fprintf (dump_file
, "\n");
5812 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef
)))
5814 unsigned prec
= TYPE_PRECISION (TREE_TYPE (ddef
));
5815 signop sgn
= TYPE_SIGN (TREE_TYPE (ddef
));
5817 wide_int nonzero_bits
= wide_int::from (bits
[i
]->mask
, prec
, UNSIGNED
)
5818 | wide_int::from (bits
[i
]->value
, prec
, sgn
);
5819 set_nonzero_bits (ddef
, nonzero_bits
);
5823 unsigned tem
= bits
[i
]->mask
.to_uhwi ();
5824 unsigned HOST_WIDE_INT bitpos
= bits
[i
]->value
.to_uhwi ();
5825 unsigned align
= tem
& -tem
;
5826 unsigned misalign
= bitpos
& (align
- 1);
5831 fprintf (dump_file
, "Adjusting align: %u, misalign: %u\n", align
, misalign
);
5833 unsigned old_align
, old_misalign
;
5834 struct ptr_info_def
*pi
= get_ptr_info (ddef
);
5835 bool old_known
= get_ptr_info_alignment (pi
, &old_align
, &old_misalign
);
5838 && old_align
> align
)
5842 fprintf (dump_file
, "But alignment was already %u.\n", old_align
);
5843 if ((old_misalign
& (align
- 1)) != misalign
)
5844 fprintf (dump_file
, "old_misalign (%u) and misalign (%u) mismatch\n",
5845 old_misalign
, misalign
);
5851 && ((misalign
& (old_align
- 1)) != old_misalign
)
5853 fprintf (dump_file
, "old_misalign (%u) and misalign (%u) mismatch\n",
5854 old_misalign
, misalign
);
5856 set_ptr_info_alignment (pi
, align
, misalign
);
5863 ipa_vr::nonzero_p (tree expr_type
) const
5865 if (type
== VR_ANTI_RANGE
&& wi::eq_p (min
, 0) && wi::eq_p (max
, 0))
5868 unsigned prec
= TYPE_PRECISION (expr_type
);
5869 return (type
== VR_RANGE
5870 && TYPE_UNSIGNED (expr_type
)
5871 && wi::eq_p (min
, wi::one (prec
))
5872 && wi::eq_p (max
, wi::max_value (prec
, TYPE_SIGN (expr_type
))));
5875 /* Update value range of formal parameters as described in
5876 ipcp_transformation. */
5879 ipcp_update_vr (struct cgraph_node
*node
)
5881 ipcp_transformation
*ts
= ipcp_get_transformation_summary (node
);
5882 if (!ts
|| vec_safe_length (ts
->m_vr
) == 0)
5884 const vec
<ipa_vr
, va_gc
> &vr
= *ts
->m_vr
;
5885 unsigned count
= vr
.length ();
5889 auto_vec
<int, 16> new_indices
;
5890 bool need_remapping
= false;
5891 clone_info
*cinfo
= clone_info::get (node
);
5892 if (cinfo
&& cinfo
->param_adjustments
)
5894 cinfo
->param_adjustments
->get_updated_indices (&new_indices
);
5895 need_remapping
= true;
5897 auto_vec
<tree
, 16> parm_decls
;
5898 push_function_arg_decls (&parm_decls
, node
->decl
);
5900 for (unsigned i
= 0; i
< count
; ++i
)
5906 if (i
>= new_indices
.length ())
5908 remapped_idx
= new_indices
[i
];
5909 if (remapped_idx
< 0)
5915 parm
= parm_decls
[remapped_idx
];
5917 gcc_checking_assert (parm
);
5918 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
), parm
);
5920 if (!ddef
|| !is_gimple_reg (parm
))
5924 && (vr
[i
].type
== VR_RANGE
|| vr
[i
].type
== VR_ANTI_RANGE
))
5926 tree type
= TREE_TYPE (ddef
);
5927 unsigned prec
= TYPE_PRECISION (type
);
5928 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef
)))
5932 fprintf (dump_file
, "Setting value range of param %u "
5933 "(now %i) ", i
, remapped_idx
);
5934 fprintf (dump_file
, "%s[",
5935 (vr
[i
].type
== VR_ANTI_RANGE
) ? "~" : "");
5936 print_decs (vr
[i
].min
, dump_file
);
5937 fprintf (dump_file
, ", ");
5938 print_decs (vr
[i
].max
, dump_file
);
5939 fprintf (dump_file
, "]\n");
5941 value_range
v (type
,
5942 wide_int_storage::from (vr
[i
].min
, prec
,
5944 wide_int_storage::from (vr
[i
].max
, prec
,
5947 set_range_info (ddef
, v
);
5949 else if (POINTER_TYPE_P (TREE_TYPE (ddef
))
5950 && vr
[i
].nonzero_p (TREE_TYPE (ddef
)))
5953 fprintf (dump_file
, "Setting nonnull for %u\n", i
);
5954 set_ptr_nonnull (ddef
);
5960 /* IPCP transformation phase doing propagation of aggregate values. */
5963 ipcp_transform_function (struct cgraph_node
*node
)
5965 vec
<ipa_param_descriptor
, va_gc
> *descriptors
= NULL
;
5966 struct ipa_func_body_info fbi
;
5969 gcc_checking_assert (cfun
);
5970 gcc_checking_assert (current_function_decl
);
5973 fprintf (dump_file
, "Modification phase of node %s\n",
5974 node
->dump_name ());
5976 ipcp_update_bits (node
);
5977 ipcp_update_vr (node
);
5978 ipcp_transformation
*ts
= ipcp_get_transformation_summary (node
);
5979 if (!ts
|| vec_safe_is_empty (ts
->m_agg_values
))
5981 param_count
= count_formal_params (node
->decl
);
5982 if (param_count
== 0)
5984 vec_safe_grow_cleared (descriptors
, param_count
, true);
5985 ipa_populate_param_decls (node
, *descriptors
);
5987 bool cfg_changed
= adjust_agg_replacement_values (node
, ts
, *descriptors
);
5988 if (vec_safe_is_empty (ts
->m_agg_values
))
5990 vec_free (descriptors
);
5992 fprintf (dump_file
, " All affected aggregate parameters were either "
5993 "removed or converted into scalars, phase done.\n");
5995 delete_unreachable_blocks_update_callgraph (node
, false);
6000 fprintf (dump_file
, " Aggregate replacements:");
6001 ipa_argagg_value_list
avs (ts
);
6002 avs
.dump (dump_file
);
6007 fbi
.bb_infos
= vNULL
;
6008 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
), true);
6009 fbi
.param_count
= param_count
;
6010 fbi
.aa_walk_budget
= opt_for_fn (node
->decl
, param_ipa_max_aa_steps
);
6012 bool modified_mem_access
= false;
6013 calculate_dominance_info (CDI_DOMINATORS
);
6014 ipcp_modif_dom_walker
walker (&fbi
, descriptors
, ts
, &modified_mem_access
);
6015 walker
.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
6016 free_dominance_info (CDI_DOMINATORS
);
6017 cfg_changed
|= walker
.cleanup_eh ();
6020 struct ipa_bb_info
*bi
;
6021 FOR_EACH_VEC_ELT (fbi
.bb_infos
, i
, bi
)
6022 free_ipa_bb_info (bi
);
6023 fbi
.bb_infos
.release ();
6025 ipcp_transformation
*s
= ipcp_transformation_sum
->get (node
);
6026 s
->m_agg_values
= NULL
;
6030 vec_free (descriptors
);
6032 delete_unreachable_blocks_update_callgraph (node
, false);
6034 return modified_mem_access
? TODO_update_ssa_only_virtuals
: 0;
6038 #include "gt-ipa-prop.h"