1 /* Interprocedural analyses.
2 Copyright (C) 2005-2022 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
30 #include "tree-streamer.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-iterator.h"
35 #include "gimple-fold.h"
38 #include "stor-layout.h"
39 #include "print-tree.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
50 #include "ipa-utils.h"
54 #include "tree-cfgcleanup.h"
56 #include "symtab-clones.h"
57 #include "attr-fnspec.h"
58 #include "gimple-range.h"
60 /* Function summary where the parameter infos are actually stored. */
61 ipa_node_params_t
*ipa_node_params_sum
= NULL
;
63 function_summary
<ipcp_transformation
*> *ipcp_transformation_sum
= NULL
;
65 /* Edge summary for IPA-CP edge information. */
66 ipa_edge_args_sum_t
*ipa_edge_args_sum
;
68 /* Traits for a hash table for reusing already existing ipa_bits. */
70 struct ipa_bit_ggc_hash_traits
: public ggc_cache_remove
<ipa_bits
*>
72 typedef ipa_bits
*value_type
;
73 typedef ipa_bits
*compare_type
;
75 hash (const ipa_bits
*p
)
77 hashval_t t
= (hashval_t
) p
->value
.to_shwi ();
78 return iterative_hash_host_wide_int (p
->mask
.to_shwi (), t
);
81 equal (const ipa_bits
*a
, const ipa_bits
*b
)
83 return a
->value
== b
->value
&& a
->mask
== b
->mask
;
85 static const bool empty_zero_p
= true;
87 mark_empty (ipa_bits
*&p
)
92 is_empty (const ipa_bits
*p
)
97 is_deleted (const ipa_bits
*p
)
99 return p
== reinterpret_cast<const ipa_bits
*> (1);
102 mark_deleted (ipa_bits
*&p
)
104 p
= reinterpret_cast<ipa_bits
*> (1);
108 /* Hash table for avoid repeated allocations of equal ipa_bits. */
109 static GTY ((cache
)) hash_table
<ipa_bit_ggc_hash_traits
> *ipa_bits_hash_table
;
111 /* Traits for a hash table for reusing value_ranges used for IPA. Note that
112 the equiv bitmap is not hashed and is expected to be NULL. */
114 struct ipa_vr_ggc_hash_traits
: public ggc_cache_remove
<value_range
*>
116 typedef value_range
*value_type
;
117 typedef value_range
*compare_type
;
119 hash (const value_range
*p
)
121 inchash::hash
hstate (p
->kind ());
122 inchash::add_expr (p
->min (), hstate
);
123 inchash::add_expr (p
->max (), hstate
);
124 return hstate
.end ();
127 equal (const value_range
*a
, const value_range
*b
)
129 return (types_compatible_p (a
->type (), b
->type ())
132 static const bool empty_zero_p
= true;
134 mark_empty (value_range
*&p
)
139 is_empty (const value_range
*p
)
144 is_deleted (const value_range
*p
)
146 return p
== reinterpret_cast<const value_range
*> (1);
149 mark_deleted (value_range
*&p
)
151 p
= reinterpret_cast<value_range
*> (1);
155 /* Hash table for avoid repeated allocations of equal value_ranges. */
156 static GTY ((cache
)) hash_table
<ipa_vr_ggc_hash_traits
> *ipa_vr_hash_table
;
158 /* Holders of ipa cgraph hooks: */
159 static struct cgraph_node_hook_list
*function_insertion_hook_holder
;
161 /* Description of a reference to an IPA constant. */
162 struct ipa_cst_ref_desc
164 /* Edge that corresponds to the statement which took the reference. */
165 struct cgraph_edge
*cs
;
166 /* Linked list of duplicates created when call graph edges are cloned. */
167 struct ipa_cst_ref_desc
*next_duplicate
;
168 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
169 if out of control. */
173 /* Allocation pool for reference descriptions. */
175 static object_allocator
<ipa_cst_ref_desc
> ipa_refdesc_pool
176 ("IPA-PROP ref descriptions");
178 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
179 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
182 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node
*node
)
184 tree fs_opts
= DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node
->decl
);
188 return !opt_for_fn (node
->decl
, optimize
) || !opt_for_fn (node
->decl
, flag_ipa_cp
);
191 /* Return index of the formal whose tree is PTREE in function which corresponds
195 ipa_get_param_decl_index_1 (vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
200 count
= vec_safe_length (descriptors
);
201 for (i
= 0; i
< count
; i
++)
202 if ((*descriptors
)[i
].decl_or_type
== ptree
)
208 /* Return index of the formal whose tree is PTREE in function which corresponds
212 ipa_get_param_decl_index (class ipa_node_params
*info
, tree ptree
)
214 return ipa_get_param_decl_index_1 (info
->descriptors
, ptree
);
217 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
221 ipa_populate_param_decls (struct cgraph_node
*node
,
222 vec
<ipa_param_descriptor
, va_gc
> &descriptors
)
230 gcc_assert (gimple_has_body_p (fndecl
));
231 fnargs
= DECL_ARGUMENTS (fndecl
);
233 for (parm
= fnargs
; parm
; parm
= DECL_CHAIN (parm
))
235 descriptors
[param_num
].decl_or_type
= parm
;
236 unsigned int cost
= estimate_move_cost (TREE_TYPE (parm
), true);
237 descriptors
[param_num
].move_cost
= cost
;
238 /* Watch overflow, move_cost is a bitfield. */
239 gcc_checking_assert (cost
== descriptors
[param_num
].move_cost
);
244 /* Return how many formal parameters FNDECL has. */
247 count_formal_params (tree fndecl
)
251 gcc_assert (gimple_has_body_p (fndecl
));
253 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
259 /* Return the declaration of Ith formal parameter of the function corresponding
260 to INFO. Note there is no setter function as this array is built just once
261 using ipa_initialize_node_params. */
264 ipa_dump_param (FILE *file
, class ipa_node_params
*info
, int i
)
266 fprintf (file
, "param #%i", i
);
267 if ((*info
->descriptors
)[i
].decl_or_type
)
270 print_generic_expr (file
, (*info
->descriptors
)[i
].decl_or_type
);
274 /* If necessary, allocate vector of parameter descriptors in info of NODE.
275 Return true if they were allocated, false if not. */
278 ipa_alloc_node_params (struct cgraph_node
*node
, int param_count
)
280 ipa_node_params
*info
= ipa_node_params_sum
->get_create (node
);
282 if (!info
->descriptors
&& param_count
)
284 vec_safe_grow_cleared (info
->descriptors
, param_count
, true);
291 /* Initialize the ipa_node_params structure associated with NODE by counting
292 the function parameters, creating the descriptors and populating their
296 ipa_initialize_node_params (struct cgraph_node
*node
)
298 ipa_node_params
*info
= ipa_node_params_sum
->get_create (node
);
300 if (!info
->descriptors
301 && ipa_alloc_node_params (node
, count_formal_params (node
->decl
)))
302 ipa_populate_param_decls (node
, *info
->descriptors
);
305 /* Print the jump functions associated with call graph edge CS to file F. */
308 ipa_print_node_jump_functions_for_edge (FILE *f
, struct cgraph_edge
*cs
)
310 ipa_edge_args
*args
= ipa_edge_args_sum
->get (cs
);
311 int count
= ipa_get_cs_argument_count (args
);
313 for (int i
= 0; i
< count
; i
++)
315 struct ipa_jump_func
*jump_func
;
316 enum jump_func_type type
;
318 jump_func
= ipa_get_ith_jump_func (args
, i
);
319 type
= jump_func
->type
;
321 fprintf (f
, " param %d: ", i
);
322 if (type
== IPA_JF_UNKNOWN
)
323 fprintf (f
, "UNKNOWN\n");
324 else if (type
== IPA_JF_CONST
)
326 tree val
= jump_func
->value
.constant
.value
;
327 fprintf (f
, "CONST: ");
328 print_generic_expr (f
, val
);
329 if (TREE_CODE (val
) == ADDR_EXPR
330 && TREE_CODE (TREE_OPERAND (val
, 0)) == CONST_DECL
)
333 print_generic_expr (f
, DECL_INITIAL (TREE_OPERAND (val
, 0)));
337 else if (type
== IPA_JF_PASS_THROUGH
)
339 fprintf (f
, "PASS THROUGH: ");
340 fprintf (f
, "%d, op %s",
341 jump_func
->value
.pass_through
.formal_id
,
342 get_tree_code_name(jump_func
->value
.pass_through
.operation
));
343 if (jump_func
->value
.pass_through
.operation
!= NOP_EXPR
)
346 print_generic_expr (f
, jump_func
->value
.pass_through
.operand
);
348 if (jump_func
->value
.pass_through
.agg_preserved
)
349 fprintf (f
, ", agg_preserved");
352 else if (type
== IPA_JF_ANCESTOR
)
354 fprintf (f
, "ANCESTOR: ");
355 fprintf (f
, "%d, offset " HOST_WIDE_INT_PRINT_DEC
,
356 jump_func
->value
.ancestor
.formal_id
,
357 jump_func
->value
.ancestor
.offset
);
358 if (jump_func
->value
.ancestor
.agg_preserved
)
359 fprintf (f
, ", agg_preserved");
360 if (jump_func
->value
.ancestor
.keep_null
)
361 fprintf (f
, ", keep_null");
365 if (jump_func
->agg
.items
)
367 struct ipa_agg_jf_item
*item
;
370 fprintf (f
, " Aggregate passed by %s:\n",
371 jump_func
->agg
.by_ref
? "reference" : "value");
372 FOR_EACH_VEC_ELT (*jump_func
->agg
.items
, j
, item
)
374 fprintf (f
, " offset: " HOST_WIDE_INT_PRINT_DEC
", ",
376 fprintf (f
, "type: ");
377 print_generic_expr (f
, item
->type
);
379 if (item
->jftype
== IPA_JF_PASS_THROUGH
)
380 fprintf (f
, "PASS THROUGH: %d,",
381 item
->value
.pass_through
.formal_id
);
382 else if (item
->jftype
== IPA_JF_LOAD_AGG
)
384 fprintf (f
, "LOAD AGG: %d",
385 item
->value
.pass_through
.formal_id
);
386 fprintf (f
, " [offset: " HOST_WIDE_INT_PRINT_DEC
", by %s],",
387 item
->value
.load_agg
.offset
,
388 item
->value
.load_agg
.by_ref
? "reference"
392 if (item
->jftype
== IPA_JF_PASS_THROUGH
393 || item
->jftype
== IPA_JF_LOAD_AGG
)
395 fprintf (f
, " op %s",
396 get_tree_code_name (item
->value
.pass_through
.operation
));
397 if (item
->value
.pass_through
.operation
!= NOP_EXPR
)
400 print_generic_expr (f
, item
->value
.pass_through
.operand
);
403 else if (item
->jftype
== IPA_JF_CONST
)
405 fprintf (f
, "CONST: ");
406 print_generic_expr (f
, item
->value
.constant
);
408 else if (item
->jftype
== IPA_JF_UNKNOWN
)
409 fprintf (f
, "UNKNOWN: " HOST_WIDE_INT_PRINT_DEC
" bits",
410 tree_to_uhwi (TYPE_SIZE (item
->type
)));
415 class ipa_polymorphic_call_context
*ctx
416 = ipa_get_ith_polymorhic_call_context (args
, i
);
417 if (ctx
&& !ctx
->useless_p ())
419 fprintf (f
, " Context: ");
420 ctx
->dump (dump_file
);
425 fprintf (f
, " value: ");
426 print_hex (jump_func
->bits
->value
, f
);
427 fprintf (f
, ", mask: ");
428 print_hex (jump_func
->bits
->mask
, f
);
432 fprintf (f
, " Unknown bits\n");
438 (jump_func
->m_vr
->kind () == VR_ANTI_RANGE
) ? "~" : "");
439 print_decs (wi::to_wide (jump_func
->m_vr
->min ()), f
);
441 print_decs (wi::to_wide (jump_func
->m_vr
->max ()), f
);
445 fprintf (f
, " Unknown VR\n");
450 /* Print the jump functions of all arguments on all call graph edges going from
454 ipa_print_node_jump_functions (FILE *f
, struct cgraph_node
*node
)
456 struct cgraph_edge
*cs
;
458 fprintf (f
, " Jump functions of caller %s:\n", node
->dump_name ());
459 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
462 fprintf (f
, " callsite %s -> %s : \n",
464 cs
->callee
->dump_name ());
465 if (!ipa_edge_args_info_available_for_edge_p (cs
))
466 fprintf (f
, " no arg info\n");
468 ipa_print_node_jump_functions_for_edge (f
, cs
);
471 for (cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
473 class cgraph_indirect_call_info
*ii
;
475 ii
= cs
->indirect_info
;
476 if (ii
->agg_contents
)
477 fprintf (f
, " indirect %s callsite, calling param %i, "
478 "offset " HOST_WIDE_INT_PRINT_DEC
", %s",
479 ii
->member_ptr
? "member ptr" : "aggregate",
480 ii
->param_index
, ii
->offset
,
481 ii
->by_ref
? "by reference" : "by_value");
483 fprintf (f
, " indirect %s callsite, calling param %i, "
484 "offset " HOST_WIDE_INT_PRINT_DEC
,
485 ii
->polymorphic
? "polymorphic" : "simple", ii
->param_index
,
490 fprintf (f
, ", for stmt ");
491 print_gimple_stmt (f
, cs
->call_stmt
, 0, TDF_SLIM
);
496 ii
->context
.dump (f
);
497 if (!ipa_edge_args_info_available_for_edge_p (cs
))
498 fprintf (f
, " no arg info\n");
500 ipa_print_node_jump_functions_for_edge (f
, cs
);
504 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
507 ipa_print_all_jump_functions (FILE *f
)
509 struct cgraph_node
*node
;
511 fprintf (f
, "\nJump functions:\n");
512 FOR_EACH_FUNCTION (node
)
514 ipa_print_node_jump_functions (f
, node
);
518 /* Set jfunc to be a know-really nothing jump function. */
521 ipa_set_jf_unknown (struct ipa_jump_func
*jfunc
)
523 jfunc
->type
= IPA_JF_UNKNOWN
;
526 /* Set JFUNC to be a copy of another jmp (to be used by jump function
527 combination code). The two functions will share their rdesc. */
530 ipa_set_jf_cst_copy (struct ipa_jump_func
*dst
,
531 struct ipa_jump_func
*src
)
534 gcc_checking_assert (src
->type
== IPA_JF_CONST
);
535 dst
->type
= IPA_JF_CONST
;
536 dst
->value
.constant
= src
->value
.constant
;
539 /* Set JFUNC to be a constant jmp function. */
542 ipa_set_jf_constant (struct ipa_jump_func
*jfunc
, tree constant
,
543 struct cgraph_edge
*cs
)
545 jfunc
->type
= IPA_JF_CONST
;
546 jfunc
->value
.constant
.value
= unshare_expr_without_location (constant
);
548 if (TREE_CODE (constant
) == ADDR_EXPR
549 && (TREE_CODE (TREE_OPERAND (constant
, 0)) == FUNCTION_DECL
550 || (TREE_CODE (TREE_OPERAND (constant
, 0)) == VAR_DECL
551 && TREE_STATIC (TREE_OPERAND (constant
, 0)))))
553 struct ipa_cst_ref_desc
*rdesc
;
555 rdesc
= ipa_refdesc_pool
.allocate ();
557 rdesc
->next_duplicate
= NULL
;
559 jfunc
->value
.constant
.rdesc
= rdesc
;
562 jfunc
->value
.constant
.rdesc
= NULL
;
565 /* Set JFUNC to be a simple pass-through jump function. */
567 ipa_set_jf_simple_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
570 jfunc
->type
= IPA_JF_PASS_THROUGH
;
571 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
572 jfunc
->value
.pass_through
.formal_id
= formal_id
;
573 jfunc
->value
.pass_through
.operation
= NOP_EXPR
;
574 jfunc
->value
.pass_through
.agg_preserved
= agg_preserved
;
577 /* Set JFUNC to be an unary pass through jump function. */
580 ipa_set_jf_unary_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
581 enum tree_code operation
)
583 jfunc
->type
= IPA_JF_PASS_THROUGH
;
584 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
585 jfunc
->value
.pass_through
.formal_id
= formal_id
;
586 jfunc
->value
.pass_through
.operation
= operation
;
587 jfunc
->value
.pass_through
.agg_preserved
= false;
589 /* Set JFUNC to be an arithmetic pass through jump function. */
592 ipa_set_jf_arith_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
593 tree operand
, enum tree_code operation
)
595 jfunc
->type
= IPA_JF_PASS_THROUGH
;
596 jfunc
->value
.pass_through
.operand
= unshare_expr_without_location (operand
);
597 jfunc
->value
.pass_through
.formal_id
= formal_id
;
598 jfunc
->value
.pass_through
.operation
= operation
;
599 jfunc
->value
.pass_through
.agg_preserved
= false;
602 /* Set JFUNC to be an ancestor jump function. */
605 ipa_set_ancestor_jf (struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
,
606 int formal_id
, bool agg_preserved
, bool keep_null
)
608 jfunc
->type
= IPA_JF_ANCESTOR
;
609 jfunc
->value
.ancestor
.formal_id
= formal_id
;
610 jfunc
->value
.ancestor
.offset
= offset
;
611 jfunc
->value
.ancestor
.agg_preserved
= agg_preserved
;
612 jfunc
->value
.ancestor
.keep_null
= keep_null
;
615 /* Get IPA BB information about the given BB. FBI is the context of analyzis
616 of this function body. */
618 static struct ipa_bb_info
*
619 ipa_get_bb_info (struct ipa_func_body_info
*fbi
, basic_block bb
)
621 gcc_checking_assert (fbi
);
622 return &fbi
->bb_infos
[bb
->index
];
625 /* Structure to be passed in between detect_type_change and
626 check_stmt_for_type_change. */
628 struct prop_type_change_info
630 /* Offset into the object where there is the virtual method pointer we are
632 HOST_WIDE_INT offset
;
633 /* The declaration or SSA_NAME pointer of the base that we are checking for
636 /* Set to true if dynamic type change has been detected. */
637 bool type_maybe_changed
;
640 /* Return true if STMT can modify a virtual method table pointer.
642 This function makes special assumptions about both constructors and
643 destructors which are all the functions that are allowed to alter the VMT
644 pointers. It assumes that destructors begin with assignment into all VMT
645 pointers and that constructors essentially look in the following way:
647 1) The very first thing they do is that they call constructors of ancestor
648 sub-objects that have them.
650 2) Then VMT pointers of this and all its ancestors is set to new values
651 corresponding to the type corresponding to the constructor.
653 3) Only afterwards, other stuff such as constructor of member sub-objects
654 and the code written by the user is run. Only this may include calling
655 virtual functions, directly or indirectly.
657 There is no way to call a constructor of an ancestor sub-object in any
660 This means that we do not have to care whether constructors get the correct
661 type information because they will always change it (in fact, if we define
662 the type to be given by the VMT pointer, it is undefined).
664 The most important fact to derive from the above is that if, for some
665 statement in the section 3, we try to detect whether the dynamic type has
666 changed, we can safely ignore all calls as we examine the function body
667 backwards until we reach statements in section 2 because these calls cannot
668 be ancestor constructors or destructors (if the input is not bogus) and so
669 do not change the dynamic type (this holds true only for automatically
670 allocated objects but at the moment we devirtualize only these). We then
671 must detect that statements in section 2 change the dynamic type and can try
672 to derive the new type. That is enough and we can stop, we will never see
673 the calls into constructors of sub-objects in this code. Therefore we can
674 safely ignore all call statements that we traverse.
678 stmt_may_be_vtbl_ptr_store (gimple
*stmt
)
680 if (is_gimple_call (stmt
))
682 if (gimple_clobber_p (stmt
))
684 else if (is_gimple_assign (stmt
))
686 tree lhs
= gimple_assign_lhs (stmt
);
688 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs
)))
690 if (flag_strict_aliasing
691 && !POINTER_TYPE_P (TREE_TYPE (lhs
)))
694 if (TREE_CODE (lhs
) == COMPONENT_REF
695 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
697 /* In the future we might want to use get_ref_base_and_extent to find
698 if there is a field corresponding to the offset and if so, proceed
699 almost like if it was a component ref. */
705 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
706 to check whether a particular statement may modify the virtual table
707 pointerIt stores its result into DATA, which points to a
708 prop_type_change_info structure. */
711 check_stmt_for_type_change (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef
, void *data
)
713 gimple
*stmt
= SSA_NAME_DEF_STMT (vdef
);
714 struct prop_type_change_info
*tci
= (struct prop_type_change_info
*) data
;
716 if (stmt_may_be_vtbl_ptr_store (stmt
))
718 tci
->type_maybe_changed
= true;
725 /* See if ARG is PARAM_DECl describing instance passed by pointer
726 or reference in FUNCTION. Return false if the dynamic type may change
727 in between beggining of the function until CALL is invoked.
729 Generally functions are not allowed to change type of such instances,
730 but they call destructors. We assume that methods cannot destroy the THIS
731 pointer. Also as a special cases, constructor and destructors may change
732 type of the THIS pointer. */
735 param_type_may_change_p (tree function
, tree arg
, gimple
*call
)
737 /* Pure functions cannot do any changes on the dynamic type;
738 that require writting to memory. */
739 if (flags_from_decl_or_type (function
) & (ECF_PURE
| ECF_CONST
))
741 /* We need to check if we are within inlined consturctor
742 or destructor (ideally we would have way to check that the
743 inline cdtor is actually working on ARG, but we don't have
744 easy tie on this, so punt on all non-pure cdtors.
745 We may also record the types of cdtors and once we know type
746 of the instance match them.
748 Also code unification optimizations may merge calls from
749 different blocks making return values unreliable. So
750 do nothing during late optimization. */
751 if (DECL_STRUCT_FUNCTION (function
)->after_inlining
)
753 if (TREE_CODE (arg
) == SSA_NAME
754 && SSA_NAME_IS_DEFAULT_DEF (arg
)
755 && TREE_CODE (SSA_NAME_VAR (arg
)) == PARM_DECL
)
757 /* Normal (non-THIS) argument. */
758 if ((SSA_NAME_VAR (arg
) != DECL_ARGUMENTS (function
)
759 || TREE_CODE (TREE_TYPE (function
)) != METHOD_TYPE
)
760 /* THIS pointer of an method - here we want to watch constructors
761 and destructors as those definitely may change the dynamic
763 || (TREE_CODE (TREE_TYPE (function
)) == METHOD_TYPE
764 && !DECL_CXX_CONSTRUCTOR_P (function
)
765 && !DECL_CXX_DESTRUCTOR_P (function
)
766 && (SSA_NAME_VAR (arg
) == DECL_ARGUMENTS (function
))))
768 /* Walk the inline stack and watch out for ctors/dtors. */
769 for (tree block
= gimple_block (call
); block
&& TREE_CODE (block
) == BLOCK
;
770 block
= BLOCK_SUPERCONTEXT (block
))
771 if (inlined_polymorphic_ctor_dtor_block_p (block
, false))
779 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
780 callsite CALL) by looking for assignments to its virtual table pointer. If
781 it is, return true. ARG is the object itself (not a pointer
782 to it, unless dereferenced). BASE is the base of the memory access as
783 returned by get_ref_base_and_extent, as is the offset.
785 This is helper function for detect_type_change and detect_type_change_ssa
786 that does the heavy work which is usually unnecesary. */
789 detect_type_change_from_memory_writes (ipa_func_body_info
*fbi
, tree arg
,
790 tree base
, tree comp_type
, gcall
*call
,
791 HOST_WIDE_INT offset
)
793 struct prop_type_change_info tci
;
796 gcc_checking_assert (DECL_P (arg
)
797 || TREE_CODE (arg
) == MEM_REF
798 || handled_component_p (arg
));
800 comp_type
= TYPE_MAIN_VARIANT (comp_type
);
802 /* Const calls cannot call virtual methods through VMT and so type changes do
804 if (!flag_devirtualize
|| !gimple_vuse (call
)
805 /* Be sure expected_type is polymorphic. */
807 || TREE_CODE (comp_type
) != RECORD_TYPE
808 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))
809 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))))
812 if (fbi
->aa_walk_budget
== 0)
815 ao_ref_init (&ao
, arg
);
818 ao
.size
= POINTER_SIZE
;
819 ao
.max_size
= ao
.size
;
822 tci
.object
= get_base_address (arg
);
823 tci
.type_maybe_changed
= false;
826 = walk_aliased_vdefs (&ao
, gimple_vuse (call
), check_stmt_for_type_change
,
827 &tci
, NULL
, NULL
, fbi
->aa_walk_budget
);
829 fbi
->aa_walk_budget
-= walked
;
831 fbi
->aa_walk_budget
= 0;
833 if (walked
>= 0 && !tci
.type_maybe_changed
)
839 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
840 If it is, return true. ARG is the object itself (not a pointer
841 to it, unless dereferenced). BASE is the base of the memory access as
842 returned by get_ref_base_and_extent, as is the offset. */
845 detect_type_change (ipa_func_body_info
*fbi
, tree arg
, tree base
,
846 tree comp_type
, gcall
*call
,
847 HOST_WIDE_INT offset
)
849 if (!flag_devirtualize
)
852 if (TREE_CODE (base
) == MEM_REF
853 && !param_type_may_change_p (current_function_decl
,
854 TREE_OPERAND (base
, 0),
857 return detect_type_change_from_memory_writes (fbi
, arg
, base
, comp_type
,
861 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
862 SSA name (its dereference will become the base and the offset is assumed to
866 detect_type_change_ssa (ipa_func_body_info
*fbi
, tree arg
, tree comp_type
,
869 gcc_checking_assert (TREE_CODE (arg
) == SSA_NAME
);
870 if (!flag_devirtualize
871 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
874 if (!param_type_may_change_p (current_function_decl
, arg
, call
))
877 arg
= build2 (MEM_REF
, ptr_type_node
, arg
,
878 build_int_cst (ptr_type_node
, 0));
880 return detect_type_change_from_memory_writes (fbi
, arg
, arg
, comp_type
,
884 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
885 boolean variable pointed to by DATA. */
888 mark_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
891 bool *b
= (bool *) data
;
896 /* Find the nearest valid aa status for parameter specified by INDEX that
899 static struct ipa_param_aa_status
*
900 find_dominating_aa_status (struct ipa_func_body_info
*fbi
, basic_block bb
,
905 bb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
908 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
909 if (!bi
->param_aa_statuses
.is_empty ()
910 && bi
->param_aa_statuses
[index
].valid
)
911 return &bi
->param_aa_statuses
[index
];
915 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
916 structures and/or intialize the result with a dominating description as
919 static struct ipa_param_aa_status
*
920 parm_bb_aa_status_for_bb (struct ipa_func_body_info
*fbi
, basic_block bb
,
923 gcc_checking_assert (fbi
);
924 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
925 if (bi
->param_aa_statuses
.is_empty ())
926 bi
->param_aa_statuses
.safe_grow_cleared (fbi
->param_count
, true);
927 struct ipa_param_aa_status
*paa
= &bi
->param_aa_statuses
[index
];
930 gcc_checking_assert (!paa
->parm_modified
931 && !paa
->ref_modified
932 && !paa
->pt_modified
);
933 struct ipa_param_aa_status
*dom_paa
;
934 dom_paa
= find_dominating_aa_status (fbi
, bb
, index
);
944 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
945 a value known not to be modified in this function before reaching the
946 statement STMT. FBI holds information about the function we have so far
947 gathered but do not survive the summary building stage. */
950 parm_preserved_before_stmt_p (struct ipa_func_body_info
*fbi
, int index
,
951 gimple
*stmt
, tree parm_load
)
953 struct ipa_param_aa_status
*paa
;
954 bool modified
= false;
957 tree base
= get_base_address (parm_load
);
958 gcc_assert (TREE_CODE (base
) == PARM_DECL
);
959 if (TREE_READONLY (base
))
962 gcc_checking_assert (fbi
);
963 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
964 if (paa
->parm_modified
|| fbi
->aa_walk_budget
== 0)
967 gcc_checking_assert (gimple_vuse (stmt
) != NULL_TREE
);
968 ao_ref_init (&refd
, parm_load
);
969 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
970 &modified
, NULL
, NULL
,
971 fbi
->aa_walk_budget
);
975 fbi
->aa_walk_budget
= 0;
978 fbi
->aa_walk_budget
-= walked
;
980 paa
->parm_modified
= true;
984 /* If STMT is an assignment that loads a value from an parameter declaration,
985 return the index of the parameter in ipa_node_params which has not been
986 modified. Otherwise return -1. */
989 load_from_unmodified_param (struct ipa_func_body_info
*fbi
,
990 vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
996 if (!gimple_assign_single_p (stmt
))
999 op1
= gimple_assign_rhs1 (stmt
);
1000 if (TREE_CODE (op1
) != PARM_DECL
)
1003 index
= ipa_get_param_decl_index_1 (descriptors
, op1
);
1005 || !parm_preserved_before_stmt_p (fbi
, index
, stmt
, op1
))
1011 /* Return true if memory reference REF (which must be a load through parameter
1012 with INDEX) loads data that are known to be unmodified in this function
1013 before reaching statement STMT. */
1016 parm_ref_data_preserved_p (struct ipa_func_body_info
*fbi
,
1017 int index
, gimple
*stmt
, tree ref
)
1019 struct ipa_param_aa_status
*paa
;
1020 bool modified
= false;
1023 gcc_checking_assert (fbi
);
1024 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
1025 if (paa
->ref_modified
|| fbi
->aa_walk_budget
== 0)
1028 gcc_checking_assert (gimple_vuse (stmt
));
1029 ao_ref_init (&refd
, ref
);
1030 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
1031 &modified
, NULL
, NULL
,
1032 fbi
->aa_walk_budget
);
1036 fbi
->aa_walk_budget
= 0;
1039 fbi
->aa_walk_budget
-= walked
;
1041 paa
->ref_modified
= true;
1045 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1046 is known to be unmodified in this function before reaching call statement
1047 CALL into which it is passed. FBI describes the function body. */
1050 parm_ref_data_pass_through_p (struct ipa_func_body_info
*fbi
, int index
,
1051 gimple
*call
, tree parm
)
1053 bool modified
= false;
1056 /* It's unnecessary to calculate anything about memory contnets for a const
1057 function because it is not goin to use it. But do not cache the result
1058 either. Also, no such calculations for non-pointers. */
1059 if (!gimple_vuse (call
)
1060 || !POINTER_TYPE_P (TREE_TYPE (parm
)))
1063 struct ipa_param_aa_status
*paa
= parm_bb_aa_status_for_bb (fbi
,
1066 if (paa
->pt_modified
|| fbi
->aa_walk_budget
== 0)
1069 ao_ref_init_from_ptr_and_size (&refd
, parm
, NULL_TREE
);
1070 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (call
), mark_modified
,
1071 &modified
, NULL
, NULL
,
1072 fbi
->aa_walk_budget
);
1075 fbi
->aa_walk_budget
= 0;
1079 fbi
->aa_walk_budget
-= walked
;
1081 paa
->pt_modified
= true;
1085 /* Return true if we can prove that OP is a memory reference loading
1086 data from an aggregate passed as a parameter.
1088 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1089 false if it cannot prove that the value has not been modified before the
1090 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1091 if it cannot prove the value has not been modified, in that case it will
1092 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1094 INFO and PARMS_AINFO describe parameters of the current function (but the
1095 latter can be NULL), STMT is the load statement. If function returns true,
1096 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1097 within the aggregate and whether it is a load from a value passed by
1098 reference respectively. */
1101 ipa_load_from_parm_agg (struct ipa_func_body_info
*fbi
,
1102 vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
1103 gimple
*stmt
, tree op
, int *index_p
,
1104 HOST_WIDE_INT
*offset_p
, poly_int64
*size_p
,
1105 bool *by_ref_p
, bool *guaranteed_unmodified
)
1110 tree base
= get_ref_base_and_extent_hwi (op
, offset_p
, &size
, &reverse
);
1115 /* We can not propagate across volatile loads. */
1116 if (TREE_THIS_VOLATILE (op
))
1121 int index
= ipa_get_param_decl_index_1 (descriptors
, base
);
1123 && parm_preserved_before_stmt_p (fbi
, index
, stmt
, op
))
1129 if (guaranteed_unmodified
)
1130 *guaranteed_unmodified
= true;
1136 if (TREE_CODE (base
) != MEM_REF
1137 || TREE_CODE (TREE_OPERAND (base
, 0)) != SSA_NAME
1138 || !integer_zerop (TREE_OPERAND (base
, 1)))
1141 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base
, 0)))
1143 tree parm
= SSA_NAME_VAR (TREE_OPERAND (base
, 0));
1144 index
= ipa_get_param_decl_index_1 (descriptors
, parm
);
1148 /* This branch catches situations where a pointer parameter is not a
1149 gimple register, for example:
1151 void hip7(S*) (struct S * p)
1153 void (*<T2e4>) (struct S *) D.1867;
1158 D.1867_2 = p.1_1->f;
1163 gimple
*def
= SSA_NAME_DEF_STMT (TREE_OPERAND (base
, 0));
1164 index
= load_from_unmodified_param (fbi
, descriptors
, def
);
1169 bool data_preserved
= parm_ref_data_preserved_p (fbi
, index
, stmt
, op
);
1170 if (!data_preserved
&& !guaranteed_unmodified
)
1177 if (guaranteed_unmodified
)
1178 *guaranteed_unmodified
= data_preserved
;
1184 /* If STMT is an assignment that loads a value from a parameter declaration,
1185 or from an aggregate passed as the parameter either by value or reference,
1186 return the index of the parameter in ipa_node_params. Otherwise return -1.
1188 FBI holds gathered information about the function. INFO describes
1189 parameters of the function, STMT is the assignment statement. If it is a
1190 memory load from an aggregate, *OFFSET_P is filled with offset within the
1191 aggregate, and *BY_REF_P specifies whether the aggregate is passed by
1195 load_from_unmodified_param_or_agg (struct ipa_func_body_info
*fbi
,
1196 class ipa_node_params
*info
,
1198 HOST_WIDE_INT
*offset_p
,
1201 int index
= load_from_unmodified_param (fbi
, info
->descriptors
, stmt
);
1204 /* Load value from a parameter declaration. */
1211 if (!gimple_assign_load_p (stmt
))
1214 tree rhs
= gimple_assign_rhs1 (stmt
);
1216 /* Skip memory reference containing VIEW_CONVERT_EXPR. */
1217 for (tree t
= rhs
; handled_component_p (t
); t
= TREE_OPERAND (t
, 0))
1218 if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
1221 /* Skip memory reference containing bit-field. */
1222 if (TREE_CODE (rhs
) == BIT_FIELD_REF
1223 || contains_bitfld_component_ref_p (rhs
))
1226 if (!ipa_load_from_parm_agg (fbi
, info
->descriptors
, stmt
, rhs
, &index
,
1227 offset_p
, &size
, by_ref_p
))
1230 gcc_assert (!maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (rhs
))),
1234 tree param_type
= ipa_get_type (info
, index
);
1236 if (!param_type
|| !AGGREGATE_TYPE_P (param_type
))
1239 else if (TREE_THIS_VOLATILE (rhs
))
1245 /* Walk pointer adjustemnts from OP (such as POINTER_PLUS and ADDR_EXPR)
1246 to find original pointer. Initialize RET to the pointer which results from
1248 If offset is known return true and initialize OFFSET_RET. */
1251 unadjusted_ptr_and_unit_offset (tree op
, tree
*ret
, poly_int64
*offset_ret
)
1253 poly_int64 offset
= 0;
1254 bool offset_known
= true;
1257 for (i
= 0; i
< param_ipa_jump_function_lookups
; i
++)
1259 if (TREE_CODE (op
) == ADDR_EXPR
)
1261 poly_int64 extra_offset
= 0;
1262 tree base
= get_addr_base_and_unit_offset (TREE_OPERAND (op
, 0),
1266 base
= get_base_address (TREE_OPERAND (op
, 0));
1267 if (TREE_CODE (base
) != MEM_REF
)
1269 offset_known
= false;
1273 if (TREE_CODE (base
) != MEM_REF
)
1275 offset
+= extra_offset
;
1277 op
= TREE_OPERAND (base
, 0);
1278 if (mem_ref_offset (base
).to_shwi (&extra_offset
))
1279 offset
+= extra_offset
;
1281 offset_known
= false;
1283 else if (TREE_CODE (op
) == SSA_NAME
1284 && !SSA_NAME_IS_DEFAULT_DEF (op
))
1286 gimple
*pstmt
= SSA_NAME_DEF_STMT (op
);
1288 if (gimple_assign_single_p (pstmt
))
1289 op
= gimple_assign_rhs1 (pstmt
);
1290 else if (is_gimple_assign (pstmt
)
1291 && gimple_assign_rhs_code (pstmt
) == POINTER_PLUS_EXPR
)
1293 poly_int64 extra_offset
= 0;
1294 if (ptrdiff_tree_p (gimple_assign_rhs2 (pstmt
),
1296 offset
+= extra_offset
;
1298 offset_known
= false;
1299 op
= gimple_assign_rhs1 (pstmt
);
1308 *offset_ret
= offset
;
1309 return offset_known
;
1312 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1313 of an assignment statement STMT, try to determine whether we are actually
1314 handling any of the following cases and construct an appropriate jump
1315 function into JFUNC if so:
1317 1) The passed value is loaded from a formal parameter which is not a gimple
1318 register (most probably because it is addressable, the value has to be
1319 scalar) and we can guarantee the value has not changed. This case can
1320 therefore be described by a simple pass-through jump function. For example:
1329 2) The passed value can be described by a simple arithmetic pass-through
1336 D.2064_4 = a.1(D) + 4;
1339 This case can also occur in combination of the previous one, e.g.:
1347 D.2064_4 = a.0_3 + 4;
1350 3) The passed value is an address of an object within another one (which
1351 also passed by reference). Such situations are described by an ancestor
1352 jump function and describe situations such as:
1354 B::foo() (struct B * const this)
1358 D.1845_2 = &this_1(D)->D.1748;
1361 INFO is the structure describing individual parameters access different
1362 stages of IPA optimizations. PARMS_AINFO contains the information that is
1363 only needed for intraprocedural analysis. */
1366 compute_complex_assign_jump_func (struct ipa_func_body_info
*fbi
,
1367 class ipa_node_params
*info
,
1368 struct ipa_jump_func
*jfunc
,
1369 gcall
*call
, gimple
*stmt
, tree name
,
1372 HOST_WIDE_INT offset
, size
;
1373 tree op1
, tc_ssa
, base
, ssa
;
1377 op1
= gimple_assign_rhs1 (stmt
);
1379 if (TREE_CODE (op1
) == SSA_NAME
)
1381 if (SSA_NAME_IS_DEFAULT_DEF (op1
))
1382 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (op1
));
1384 index
= load_from_unmodified_param (fbi
, info
->descriptors
,
1385 SSA_NAME_DEF_STMT (op1
));
1390 index
= load_from_unmodified_param (fbi
, info
->descriptors
, stmt
);
1391 tc_ssa
= gimple_assign_lhs (stmt
);
1396 switch (gimple_assign_rhs_class (stmt
))
1398 case GIMPLE_BINARY_RHS
:
1400 tree op2
= gimple_assign_rhs2 (stmt
);
1401 if (!is_gimple_ip_invariant (op2
)
1402 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt
))
1404 && !useless_type_conversion_p (TREE_TYPE (name
),
1408 ipa_set_jf_arith_pass_through (jfunc
, index
, op2
,
1409 gimple_assign_rhs_code (stmt
));
1412 case GIMPLE_SINGLE_RHS
:
1414 bool agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
,
1416 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1419 case GIMPLE_UNARY_RHS
:
1420 if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
)))
1421 ipa_set_jf_unary_pass_through (jfunc
, index
,
1422 gimple_assign_rhs_code (stmt
));
1428 if (TREE_CODE (op1
) != ADDR_EXPR
)
1430 op1
= TREE_OPERAND (op1
, 0);
1431 base
= get_ref_base_and_extent_hwi (op1
, &offset
, &size
, &reverse
);
1432 offset_int mem_offset
;
1434 || TREE_CODE (base
) != MEM_REF
1435 || !mem_ref_offset (base
).is_constant (&mem_offset
))
1437 offset
+= mem_offset
.to_short_addr () * BITS_PER_UNIT
;
1438 ssa
= TREE_OPERAND (base
, 0);
1439 if (TREE_CODE (ssa
) != SSA_NAME
1440 || !SSA_NAME_IS_DEFAULT_DEF (ssa
)
1444 /* Dynamic types are changed in constructors and destructors. */
1445 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (ssa
));
1446 if (index
>= 0 && param_type
&& POINTER_TYPE_P (param_type
))
1447 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1448 parm_ref_data_pass_through_p (fbi
, index
, call
, ssa
),
1452 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1455 iftmp.1_3 = &obj_2(D)->D.1762;
1457 The base of the MEM_REF must be a default definition SSA NAME of a
1458 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1459 whole MEM_REF expression is returned and the offset calculated from any
1460 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1461 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1464 get_ancestor_addr_info (gimple
*assign
, tree
*obj_p
, HOST_WIDE_INT
*offset
)
1467 tree expr
, parm
, obj
;
1470 if (!gimple_assign_single_p (assign
))
1472 expr
= gimple_assign_rhs1 (assign
);
1474 if (TREE_CODE (expr
) != ADDR_EXPR
)
1476 expr
= TREE_OPERAND (expr
, 0);
1478 expr
= get_ref_base_and_extent_hwi (expr
, offset
, &size
, &reverse
);
1480 offset_int mem_offset
;
1482 || TREE_CODE (expr
) != MEM_REF
1483 || !mem_ref_offset (expr
).is_constant (&mem_offset
))
1485 parm
= TREE_OPERAND (expr
, 0);
1486 if (TREE_CODE (parm
) != SSA_NAME
1487 || !SSA_NAME_IS_DEFAULT_DEF (parm
)
1488 || TREE_CODE (SSA_NAME_VAR (parm
)) != PARM_DECL
)
1491 *offset
+= mem_offset
.to_short_addr () * BITS_PER_UNIT
;
1497 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1498 statement PHI, try to find out whether NAME is in fact a
1499 multiple-inheritance typecast from a descendant into an ancestor of a formal
1500 parameter and thus can be described by an ancestor jump function and if so,
1501 write the appropriate function into JFUNC.
1503 Essentially we want to match the following pattern:
1511 iftmp.1_3 = &obj_2(D)->D.1762;
1514 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1515 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1519 compute_complex_ancestor_jump_func (struct ipa_func_body_info
*fbi
,
1520 class ipa_node_params
*info
,
1521 struct ipa_jump_func
*jfunc
,
1522 gcall
*call
, gphi
*phi
)
1524 HOST_WIDE_INT offset
;
1525 gimple
*assign
, *cond
;
1526 basic_block phi_bb
, assign_bb
, cond_bb
;
1527 tree tmp
, parm
, expr
, obj
;
1530 if (gimple_phi_num_args (phi
) != 2)
1533 if (integer_zerop (PHI_ARG_DEF (phi
, 1)))
1534 tmp
= PHI_ARG_DEF (phi
, 0);
1535 else if (integer_zerop (PHI_ARG_DEF (phi
, 0)))
1536 tmp
= PHI_ARG_DEF (phi
, 1);
1539 if (TREE_CODE (tmp
) != SSA_NAME
1540 || SSA_NAME_IS_DEFAULT_DEF (tmp
)
1541 || !POINTER_TYPE_P (TREE_TYPE (tmp
))
1542 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp
))) != RECORD_TYPE
)
1545 assign
= SSA_NAME_DEF_STMT (tmp
);
1546 assign_bb
= gimple_bb (assign
);
1547 if (!single_pred_p (assign_bb
))
1549 expr
= get_ancestor_addr_info (assign
, &obj
, &offset
);
1552 parm
= TREE_OPERAND (expr
, 0);
1553 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (parm
));
1557 cond_bb
= single_pred (assign_bb
);
1558 cond
= last_stmt (cond_bb
);
1560 || gimple_code (cond
) != GIMPLE_COND
1561 || gimple_cond_code (cond
) != NE_EXPR
1562 || gimple_cond_lhs (cond
) != parm
1563 || !integer_zerop (gimple_cond_rhs (cond
)))
1566 phi_bb
= gimple_bb (phi
);
1567 for (i
= 0; i
< 2; i
++)
1569 basic_block pred
= EDGE_PRED (phi_bb
, i
)->src
;
1570 if (pred
!= assign_bb
&& pred
!= cond_bb
)
1574 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1575 parm_ref_data_pass_through_p (fbi
, index
, call
, parm
),
1579 /* Inspect the given TYPE and return true iff it has the same structure (the
1580 same number of fields of the same types) as a C++ member pointer. If
1581 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1582 corresponding fields there. */
1585 type_like_member_ptr_p (tree type
, tree
*method_ptr
, tree
*delta
)
1589 if (TREE_CODE (type
) != RECORD_TYPE
)
1592 fld
= TYPE_FIELDS (type
);
1593 if (!fld
|| !POINTER_TYPE_P (TREE_TYPE (fld
))
1594 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld
))) != METHOD_TYPE
1595 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1601 fld
= DECL_CHAIN (fld
);
1602 if (!fld
|| INTEGRAL_TYPE_P (fld
)
1603 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1608 if (DECL_CHAIN (fld
))
1614 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1615 return the rhs of its defining statement, and this statement is stored in
1616 *RHS_STMT. Otherwise return RHS as it is. */
1619 get_ssa_def_if_simple_copy (tree rhs
, gimple
**rhs_stmt
)
1621 while (TREE_CODE (rhs
) == SSA_NAME
&& !SSA_NAME_IS_DEFAULT_DEF (rhs
))
1623 gimple
*def_stmt
= SSA_NAME_DEF_STMT (rhs
);
1625 if (gimple_assign_single_p (def_stmt
))
1626 rhs
= gimple_assign_rhs1 (def_stmt
);
1629 *rhs_stmt
= def_stmt
;
1634 /* Simple linked list, describing contents of an aggregate before call. */
1636 struct ipa_known_agg_contents_list
1638 /* Offset and size of the described part of the aggregate. */
1639 HOST_WIDE_INT offset
, size
;
1641 /* Type of the described part of the aggregate. */
1644 /* Known constant value or jump function data describing contents. */
1645 struct ipa_load_agg_data value
;
1647 /* Pointer to the next structure in the list. */
1648 struct ipa_known_agg_contents_list
*next
;
1651 /* Add an aggregate content item into a linked list of
1652 ipa_known_agg_contents_list structure, in which all elements
1653 are sorted ascendingly by offset. */
1656 add_to_agg_contents_list (struct ipa_known_agg_contents_list
**plist
,
1657 struct ipa_known_agg_contents_list
*item
)
1659 struct ipa_known_agg_contents_list
*list
= *plist
;
1661 for (; list
; list
= list
->next
)
1663 if (list
->offset
>= item
->offset
)
1666 plist
= &list
->next
;
1673 /* Check whether a given aggregate content is clobbered by certain element in
1674 a linked list of ipa_known_agg_contents_list. */
1677 clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list
*list
,
1678 struct ipa_known_agg_contents_list
*item
)
1680 for (; list
; list
= list
->next
)
1682 if (list
->offset
>= item
->offset
)
1683 return list
->offset
< item
->offset
+ item
->size
;
1685 if (list
->offset
+ list
->size
> item
->offset
)
1692 /* Build aggregate jump function from LIST, assuming there are exactly
1693 VALUE_COUNT entries there and that offset of the passed argument
1694 is ARG_OFFSET and store it into JFUNC. */
1697 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list
*list
,
1698 int value_count
, HOST_WIDE_INT arg_offset
,
1699 struct ipa_jump_func
*jfunc
)
1701 vec_safe_reserve (jfunc
->agg
.items
, value_count
, true);
1702 for (; list
; list
= list
->next
)
1704 struct ipa_agg_jf_item item
;
1705 tree operand
= list
->value
.pass_through
.operand
;
1707 if (list
->value
.pass_through
.formal_id
>= 0)
1709 /* Content value is derived from some formal paramerter. */
1710 if (list
->value
.offset
>= 0)
1711 item
.jftype
= IPA_JF_LOAD_AGG
;
1713 item
.jftype
= IPA_JF_PASS_THROUGH
;
1715 item
.value
.load_agg
= list
->value
;
1717 item
.value
.pass_through
.operand
1718 = unshare_expr_without_location (operand
);
1722 /* Content value is known constant. */
1723 item
.jftype
= IPA_JF_CONST
;
1724 item
.value
.constant
= unshare_expr_without_location (operand
);
1729 item
.type
= list
->type
;
1730 gcc_assert (tree_to_shwi (TYPE_SIZE (list
->type
)) == list
->size
);
1732 item
.offset
= list
->offset
- arg_offset
;
1733 gcc_assert ((item
.offset
% BITS_PER_UNIT
) == 0);
1735 jfunc
->agg
.items
->quick_push (item
);
1739 /* Given an assignment statement STMT, try to collect information into
1740 AGG_VALUE that will be used to construct jump function for RHS of the
1741 assignment, from which content value of an aggregate part comes.
1743 Besides constant and simple pass-through jump functions, also try to
1744 identify whether it matches the following pattern that can be described by
1745 a load-value-from-aggregate jump function, which is a derivative of simple
1746 pass-through jump function.
1752 *(q_5 + 4) = *(p_3(D) + 28) op 1;
1756 Here IPA_LOAD_AGG_DATA data structure is informative enough to describe
1757 constant, simple pass-through and load-vale-from-aggregate. If value
1758 is constant, it will be kept in field OPERAND, and field FORMAL_ID is
1759 set to -1. For simple pass-through and load-value-from-aggregate, field
1760 FORMAL_ID specifies the related formal parameter index, and field
1761 OFFSET can be used to distinguish them, -1 means simple pass-through,
1762 otherwise means load-value-from-aggregate. */
1765 analyze_agg_content_value (struct ipa_func_body_info
*fbi
,
1766 struct ipa_load_agg_data
*agg_value
,
1769 tree lhs
= gimple_assign_lhs (stmt
);
1770 tree rhs1
= gimple_assign_rhs1 (stmt
);
1771 enum tree_code code
;
1774 /* Initialize jump function data for the aggregate part. */
1775 memset (agg_value
, 0, sizeof (*agg_value
));
1776 agg_value
->pass_through
.operation
= NOP_EXPR
;
1777 agg_value
->pass_through
.formal_id
= -1;
1778 agg_value
->offset
= -1;
1780 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs
)) /* TODO: Support aggregate type. */
1781 || TREE_THIS_VOLATILE (lhs
)
1782 || TREE_CODE (lhs
) == BIT_FIELD_REF
1783 || contains_bitfld_component_ref_p (lhs
))
1786 /* Skip SSA copies. */
1787 while (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
1789 if (TREE_CODE (rhs1
) != SSA_NAME
|| SSA_NAME_IS_DEFAULT_DEF (rhs1
))
1792 stmt
= SSA_NAME_DEF_STMT (rhs1
);
1793 if (!is_gimple_assign (stmt
))
1796 rhs1
= gimple_assign_rhs1 (stmt
);
1799 if (gphi
*phi
= dyn_cast
<gphi
*> (stmt
))
1801 /* Also special case like the following (a is a formal parameter):
1803 _12 = *a_11(D).dim[0].stride;
1805 # iftmp.22_9 = PHI <_12(2), 1(3)>
1807 parm.6.dim[0].stride = iftmp.22_9;
1809 __x_MOD_foo (&parm.6, b_31(D));
1811 The aggregate function describing parm.6.dim[0].stride is encoded as a
1812 PASS-THROUGH jump function with ASSERT_EXPR operation whith operand 1
1813 (the constant from the PHI node). */
1815 if (gimple_phi_num_args (phi
) != 2)
1817 tree arg0
= gimple_phi_arg_def (phi
, 0);
1818 tree arg1
= gimple_phi_arg_def (phi
, 1);
1821 if (is_gimple_ip_invariant (arg1
))
1826 else if (is_gimple_ip_invariant (arg0
))
1834 rhs1
= get_ssa_def_if_simple_copy (rhs1
, &stmt
);
1835 if (!is_gimple_assign (stmt
))
1839 agg_value
->pass_through
.operand
= operand
;
1841 else if (is_gimple_assign (stmt
))
1843 code
= gimple_assign_rhs_code (stmt
);
1844 switch (gimple_assign_rhs_class (stmt
))
1846 case GIMPLE_SINGLE_RHS
:
1847 if (is_gimple_ip_invariant (rhs1
))
1849 agg_value
->pass_through
.operand
= rhs1
;
1855 case GIMPLE_UNARY_RHS
:
1856 /* NOTE: A GIMPLE_UNARY_RHS operation might not be tcc_unary
1857 (truth_not_expr is example), GIMPLE_BINARY_RHS does not imply
1858 tcc_binary, this subtleness is somewhat misleading.
1860 Since tcc_unary is widely used in IPA-CP code to check an operation
1861 with one operand, here we only allow tc_unary operation to avoid
1862 possible problem. Then we can use (opclass == tc_unary) or not to
1863 distinguish unary and binary. */
1864 if (TREE_CODE_CLASS (code
) != tcc_unary
|| CONVERT_EXPR_CODE_P (code
))
1867 rhs1
= get_ssa_def_if_simple_copy (rhs1
, &stmt
);
1870 case GIMPLE_BINARY_RHS
:
1872 gimple
*rhs1_stmt
= stmt
;
1873 gimple
*rhs2_stmt
= stmt
;
1874 tree rhs2
= gimple_assign_rhs2 (stmt
);
1876 rhs1
= get_ssa_def_if_simple_copy (rhs1
, &rhs1_stmt
);
1877 rhs2
= get_ssa_def_if_simple_copy (rhs2
, &rhs2_stmt
);
1879 if (is_gimple_ip_invariant (rhs2
))
1881 agg_value
->pass_through
.operand
= rhs2
;
1884 else if (is_gimple_ip_invariant (rhs1
))
1886 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
1887 code
= swap_tree_comparison (code
);
1888 else if (!commutative_tree_code (code
))
1891 agg_value
->pass_through
.operand
= rhs1
;
1898 if (TREE_CODE_CLASS (code
) != tcc_comparison
1899 && !useless_type_conversion_p (TREE_TYPE (lhs
),
1912 if (TREE_CODE (rhs1
) != SSA_NAME
)
1913 index
= load_from_unmodified_param_or_agg (fbi
, fbi
->info
, stmt
,
1915 &agg_value
->by_ref
);
1916 else if (SSA_NAME_IS_DEFAULT_DEF (rhs1
))
1917 index
= ipa_get_param_decl_index (fbi
->info
, SSA_NAME_VAR (rhs1
));
1921 if (agg_value
->offset
>= 0)
1922 agg_value
->type
= TREE_TYPE (rhs1
);
1923 agg_value
->pass_through
.formal_id
= index
;
1924 agg_value
->pass_through
.operation
= code
;
1927 agg_value
->pass_through
.operand
= NULL_TREE
;
1930 /* If STMT is a memory store to the object whose address is BASE, extract
1931 information (offset, size, and value) into CONTENT, and return true,
1932 otherwise we conservatively assume the whole object is modified with
1933 unknown content, and return false. CHECK_REF means that access to object
1934 is expected to be in form of MEM_REF expression. */
1937 extract_mem_content (struct ipa_func_body_info
*fbi
,
1938 gimple
*stmt
, tree base
, bool check_ref
,
1939 struct ipa_known_agg_contents_list
*content
)
1941 HOST_WIDE_INT lhs_offset
, lhs_size
;
1944 if (!is_gimple_assign (stmt
))
1947 tree lhs
= gimple_assign_lhs (stmt
);
1948 tree lhs_base
= get_ref_base_and_extent_hwi (lhs
, &lhs_offset
, &lhs_size
,
1955 if (TREE_CODE (lhs_base
) != MEM_REF
1956 || TREE_OPERAND (lhs_base
, 0) != base
1957 || !integer_zerop (TREE_OPERAND (lhs_base
, 1)))
1960 else if (lhs_base
!= base
)
1963 content
->offset
= lhs_offset
;
1964 content
->size
= lhs_size
;
1965 content
->type
= TREE_TYPE (lhs
);
1966 content
->next
= NULL
;
1968 analyze_agg_content_value (fbi
, &content
->value
, stmt
);
1972 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1973 in ARG is filled in constants or values that are derived from caller's
1974 formal parameter in the way described by some kinds of jump functions. FBI
1975 is the context of the caller function for interprocedural analysis. ARG can
1976 either be an aggregate expression or a pointer to an aggregate. ARG_TYPE is
1977 the type of the aggregate, JFUNC is the jump function for the aggregate. */
1980 determine_known_aggregate_parts (struct ipa_func_body_info
*fbi
,
1981 gcall
*call
, tree arg
,
1983 struct ipa_jump_func
*jfunc
)
1985 struct ipa_known_agg_contents_list
*list
= NULL
, *all_list
= NULL
;
1986 bitmap visited
= NULL
;
1987 int item_count
= 0, value_count
= 0;
1988 HOST_WIDE_INT arg_offset
, arg_size
;
1990 bool check_ref
, by_ref
;
1992 int max_agg_items
= opt_for_fn (fbi
->node
->decl
, param_ipa_max_agg_items
);
1994 if (max_agg_items
== 0)
1997 /* The function operates in three stages. First, we prepare check_ref, r,
1998 arg_base and arg_offset based on what is actually passed as an actual
2001 if (POINTER_TYPE_P (arg_type
))
2004 if (TREE_CODE (arg
) == SSA_NAME
)
2007 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type
)))
2008 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
2013 type_size
= TYPE_SIZE (TREE_TYPE (arg_type
));
2014 arg_size
= tree_to_uhwi (type_size
);
2015 ao_ref_init_from_ptr_and_size (&r
, arg_base
, NULL_TREE
);
2017 else if (TREE_CODE (arg
) == ADDR_EXPR
)
2021 arg
= TREE_OPERAND (arg
, 0);
2022 arg_base
= get_ref_base_and_extent_hwi (arg
, &arg_offset
,
2023 &arg_size
, &reverse
);
2026 if (DECL_P (arg_base
))
2029 ao_ref_init (&r
, arg_base
);
2041 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg
)));
2045 arg_base
= get_ref_base_and_extent_hwi (arg
, &arg_offset
,
2046 &arg_size
, &reverse
);
2050 ao_ref_init (&r
, arg
);
2053 /* Second stage traverses virtual SSA web backwards starting from the call
2054 statement, only looks at individual dominating virtual operand (its
2055 definition dominates the call), as long as it is confident that content
2056 of the aggregate is affected by definition of the virtual operand, it
2057 builds a sorted linked list of ipa_agg_jf_list describing that. */
2059 for (tree dom_vuse
= gimple_vuse (call
);
2060 dom_vuse
&& fbi
->aa_walk_budget
> 0;)
2062 gimple
*stmt
= SSA_NAME_DEF_STMT (dom_vuse
);
2064 if (gimple_code (stmt
) == GIMPLE_PHI
)
2066 dom_vuse
= get_continuation_for_phi (stmt
, &r
, true,
2067 fbi
->aa_walk_budget
,
2068 &visited
, false, NULL
, NULL
);
2072 fbi
->aa_walk_budget
--;
2073 if (stmt_may_clobber_ref_p_1 (stmt
, &r
))
2075 struct ipa_known_agg_contents_list
*content
2076 = XALLOCA (struct ipa_known_agg_contents_list
);
2078 if (!extract_mem_content (fbi
, stmt
, arg_base
, check_ref
, content
))
2081 /* Now we get a dominating virtual operand, and need to check
2082 whether its value is clobbered any other dominating one. */
2083 if ((content
->value
.pass_through
.formal_id
>= 0
2084 || content
->value
.pass_through
.operand
)
2085 && !clobber_by_agg_contents_list_p (all_list
, content
))
2087 struct ipa_known_agg_contents_list
*copy
2088 = XALLOCA (struct ipa_known_agg_contents_list
);
2090 /* Add to the list consisting of only dominating virtual
2091 operands, whose definitions can finally reach the call. */
2092 add_to_agg_contents_list (&list
, (*copy
= *content
, copy
));
2094 if (++value_count
== max_agg_items
)
2098 /* Add to the list consisting of all dominating virtual operands. */
2099 add_to_agg_contents_list (&all_list
, content
);
2101 if (++item_count
== 2 * max_agg_items
)
2104 dom_vuse
= gimple_vuse (stmt
);
2108 BITMAP_FREE (visited
);
2110 /* Third stage just goes over the list and creates an appropriate vector of
2111 ipa_agg_jf_item structures out of it, of course only if there are
2112 any meaningful items to begin with. */
2116 jfunc
->agg
.by_ref
= by_ref
;
2117 build_agg_jump_func_from_list (list
, value_count
, arg_offset
, jfunc
);
2122 /* Return the Ith param type of callee associated with call graph
2126 ipa_get_callee_param_type (struct cgraph_edge
*e
, int i
)
2129 tree type
= (e
->callee
2130 ? TREE_TYPE (e
->callee
->decl
)
2131 : gimple_call_fntype (e
->call_stmt
));
2132 tree t
= TYPE_ARG_TYPES (type
);
2134 for (n
= 0; n
< i
; n
++)
2141 return TREE_VALUE (t
);
2144 t
= DECL_ARGUMENTS (e
->callee
->decl
);
2145 for (n
= 0; n
< i
; n
++)
2152 return TREE_TYPE (t
);
2156 /* Return ipa_bits with VALUE and MASK values, which can be either a newly
2157 allocated structure or a previously existing one shared with other jump
2158 functions and/or transformation summaries. */
2161 ipa_get_ipa_bits_for_value (const widest_int
&value
, const widest_int
&mask
)
2167 ipa_bits
**slot
= ipa_bits_hash_table
->find_slot (&tmp
, INSERT
);
2171 ipa_bits
*res
= ggc_alloc
<ipa_bits
> ();
2179 /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
2180 table in order to avoid creating multiple same ipa_bits structures. */
2183 ipa_set_jfunc_bits (ipa_jump_func
*jf
, const widest_int
&value
,
2184 const widest_int
&mask
)
2186 jf
->bits
= ipa_get_ipa_bits_for_value (value
, mask
);
2189 /* Return a pointer to a value_range just like *TMP, but either find it in
2190 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
2192 static value_range
*
2193 ipa_get_value_range (value_range
*tmp
)
2195 value_range
**slot
= ipa_vr_hash_table
->find_slot (tmp
, INSERT
);
2199 value_range
*vr
= new (ggc_alloc
<value_range
> ()) value_range
;
2206 /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
2207 equiv set. Use hash table in order to avoid creating multiple same copies of
2210 static value_range
*
2211 ipa_get_value_range (enum value_range_kind kind
, tree min
, tree max
)
2213 value_range
tmp (min
, max
, kind
);
2214 return ipa_get_value_range (&tmp
);
2217 /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
2218 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
2219 same value_range structures. */
2222 ipa_set_jfunc_vr (ipa_jump_func
*jf
, enum value_range_kind type
,
2225 jf
->m_vr
= ipa_get_value_range (type
, min
, max
);
2228 /* Assign to JF a pointer to a value_range just like TMP but either fetch a
2229 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
2232 ipa_set_jfunc_vr (ipa_jump_func
*jf
, value_range
*tmp
)
2234 jf
->m_vr
= ipa_get_value_range (tmp
);
2237 /* Compute jump function for all arguments of callsite CS and insert the
2238 information in the jump_functions array in the ipa_edge_args corresponding
2239 to this callsite. */
2242 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info
*fbi
,
2243 struct cgraph_edge
*cs
)
2245 ipa_node_params
*info
= ipa_node_params_sum
->get (cs
->caller
);
2246 ipa_edge_args
*args
= ipa_edge_args_sum
->get_create (cs
);
2247 gcall
*call
= cs
->call_stmt
;
2248 int n
, arg_num
= gimple_call_num_args (call
);
2249 bool useful_context
= false;
2252 if (arg_num
== 0 || args
->jump_functions
)
2254 vec_safe_grow_cleared (args
->jump_functions
, arg_num
, true);
2255 if (flag_devirtualize
)
2256 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, arg_num
, true);
2258 if (gimple_call_internal_p (call
))
2260 if (ipa_func_spec_opts_forbid_analysis_p (cs
->caller
))
2263 for (n
= 0; n
< arg_num
; n
++)
2265 struct ipa_jump_func
*jfunc
= ipa_get_ith_jump_func (args
, n
);
2266 tree arg
= gimple_call_arg (call
, n
);
2267 tree param_type
= ipa_get_callee_param_type (cs
, n
);
2268 if (flag_devirtualize
&& POINTER_TYPE_P (TREE_TYPE (arg
)))
2271 class ipa_polymorphic_call_context
context (cs
->caller
->decl
,
2274 context
.get_dynamic_type (instance
, arg
, NULL
, cs
->call_stmt
,
2275 &fbi
->aa_walk_budget
);
2276 *ipa_get_ith_polymorhic_call_context (args
, n
) = context
;
2277 if (!context
.useless_p ())
2278 useful_context
= true;
2281 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
2283 bool addr_nonzero
= false;
2284 bool strict_overflow
= false;
2286 if (TREE_CODE (arg
) == SSA_NAME
2288 && get_range_query (cfun
)->range_of_expr (vr
, arg
)
2290 addr_nonzero
= true;
2291 else if (tree_single_nonzero_warnv_p (arg
, &strict_overflow
))
2292 addr_nonzero
= true;
2296 tree z
= build_int_cst (TREE_TYPE (arg
), 0);
2297 ipa_set_jfunc_vr (jfunc
, VR_ANTI_RANGE
, z
, z
);
2300 gcc_assert (!jfunc
->m_vr
);
2304 if (TREE_CODE (arg
) == SSA_NAME
2306 /* Limit the ranger query to integral types as the rest
2307 of this file uses value_range's, which only hold
2308 integers and pointers. */
2309 && irange::supports_p (TREE_TYPE (arg
))
2310 && get_range_query (cfun
)->range_of_expr (vr
, arg
)
2311 && !vr
.undefined_p ())
2314 range_fold_unary_expr (&resvr
, NOP_EXPR
, param_type
,
2315 &vr
, TREE_TYPE (arg
));
2316 if (!resvr
.undefined_p () && !resvr
.varying_p ())
2317 ipa_set_jfunc_vr (jfunc
, &resvr
);
2319 gcc_assert (!jfunc
->m_vr
);
2322 gcc_assert (!jfunc
->m_vr
);
2325 if (INTEGRAL_TYPE_P (TREE_TYPE (arg
))
2326 && (TREE_CODE (arg
) == SSA_NAME
|| TREE_CODE (arg
) == INTEGER_CST
))
2328 if (TREE_CODE (arg
) == SSA_NAME
)
2329 ipa_set_jfunc_bits (jfunc
, 0,
2330 widest_int::from (get_nonzero_bits (arg
),
2331 TYPE_SIGN (TREE_TYPE (arg
))));
2333 ipa_set_jfunc_bits (jfunc
, wi::to_widest (arg
), 0);
2335 else if (POINTER_TYPE_P (TREE_TYPE (arg
)))
2337 unsigned HOST_WIDE_INT bitpos
;
2340 get_pointer_alignment_1 (arg
, &align
, &bitpos
);
2341 widest_int mask
= wi::bit_and_not
2342 (wi::mask
<widest_int
> (TYPE_PRECISION (TREE_TYPE (arg
)), false),
2343 align
/ BITS_PER_UNIT
- 1);
2344 widest_int value
= bitpos
/ BITS_PER_UNIT
;
2345 ipa_set_jfunc_bits (jfunc
, value
, mask
);
2348 gcc_assert (!jfunc
->bits
);
2350 if (is_gimple_ip_invariant (arg
)
2352 && is_global_var (arg
)
2353 && TREE_READONLY (arg
)))
2354 ipa_set_jf_constant (jfunc
, arg
, cs
);
2355 else if (!is_gimple_reg_type (TREE_TYPE (arg
))
2356 && TREE_CODE (arg
) == PARM_DECL
)
2358 int index
= ipa_get_param_decl_index (info
, arg
);
2360 gcc_assert (index
>=0);
2361 /* Aggregate passed by value, check for pass-through, otherwise we
2362 will attempt to fill in aggregate contents later in this
2364 if (parm_preserved_before_stmt_p (fbi
, index
, call
, arg
))
2366 ipa_set_jf_simple_pass_through (jfunc
, index
, false);
2370 else if (TREE_CODE (arg
) == SSA_NAME
)
2372 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
2374 int index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (arg
));
2378 agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
, arg
);
2379 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
2384 gimple
*stmt
= SSA_NAME_DEF_STMT (arg
);
2385 if (is_gimple_assign (stmt
))
2386 compute_complex_assign_jump_func (fbi
, info
, jfunc
,
2387 call
, stmt
, arg
, param_type
);
2388 else if (gimple_code (stmt
) == GIMPLE_PHI
)
2389 compute_complex_ancestor_jump_func (fbi
, info
, jfunc
,
2391 as_a
<gphi
*> (stmt
));
2395 /* If ARG is pointer, we cannot use its type to determine the type of aggregate
2396 passed (because type conversions are ignored in gimple). Usually we can
2397 safely get type from function declaration, but in case of K&R prototypes or
2398 variadic functions we can try our luck with type of the pointer passed.
2399 TODO: Since we look for actual initialization of the memory object, we may better
2400 work out the type based on the memory stores we find. */
2402 param_type
= TREE_TYPE (arg
);
2404 if ((jfunc
->type
!= IPA_JF_PASS_THROUGH
2405 || !ipa_get_jf_pass_through_agg_preserved (jfunc
))
2406 && (jfunc
->type
!= IPA_JF_ANCESTOR
2407 || !ipa_get_jf_ancestor_agg_preserved (jfunc
))
2408 && (AGGREGATE_TYPE_P (TREE_TYPE (arg
))
2409 || POINTER_TYPE_P (param_type
)))
2410 determine_known_aggregate_parts (fbi
, call
, arg
, param_type
, jfunc
);
2412 if (!useful_context
)
2413 vec_free (args
->polymorphic_call_contexts
);
2416 /* Compute jump functions for all edges - both direct and indirect - outgoing
2420 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info
*fbi
, basic_block bb
)
2422 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
2424 struct cgraph_edge
*cs
;
2426 FOR_EACH_VEC_ELT_REVERSE (bi
->cg_edges
, i
, cs
)
2428 struct cgraph_node
*callee
= cs
->callee
;
2432 callee
= callee
->ultimate_alias_target ();
2433 /* We do not need to bother analyzing calls to unknown functions
2434 unless they may become known during lto/whopr. */
2435 if (!callee
->definition
&& !flag_lto
2436 && !gimple_call_fnspec (cs
->call_stmt
).known_p ())
2439 ipa_compute_jump_functions_for_edge (fbi
, cs
);
2443 /* If STMT looks like a statement loading a value from a member pointer formal
2444 parameter, return that parameter and store the offset of the field to
2445 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2446 might be clobbered). If USE_DELTA, then we look for a use of the delta
2447 field rather than the pfn. */
2450 ipa_get_stmt_member_ptr_load_param (gimple
*stmt
, bool use_delta
,
2451 HOST_WIDE_INT
*offset_p
)
2453 tree rhs
, rec
, ref_field
, ref_offset
, fld
, ptr_field
, delta_field
;
2455 if (!gimple_assign_single_p (stmt
))
2458 rhs
= gimple_assign_rhs1 (stmt
);
2459 if (TREE_CODE (rhs
) == COMPONENT_REF
)
2461 ref_field
= TREE_OPERAND (rhs
, 1);
2462 rhs
= TREE_OPERAND (rhs
, 0);
2465 ref_field
= NULL_TREE
;
2466 if (TREE_CODE (rhs
) != MEM_REF
)
2468 rec
= TREE_OPERAND (rhs
, 0);
2469 if (TREE_CODE (rec
) != ADDR_EXPR
)
2471 rec
= TREE_OPERAND (rec
, 0);
2472 if (TREE_CODE (rec
) != PARM_DECL
2473 || !type_like_member_ptr_p (TREE_TYPE (rec
), &ptr_field
, &delta_field
))
2475 ref_offset
= TREE_OPERAND (rhs
, 1);
2482 *offset_p
= int_bit_position (fld
);
2486 if (integer_nonzerop (ref_offset
))
2488 return ref_field
== fld
? rec
: NULL_TREE
;
2491 return tree_int_cst_equal (byte_position (fld
), ref_offset
) ? rec
2495 /* Returns true iff T is an SSA_NAME defined by a statement. */
2498 ipa_is_ssa_with_stmt_def (tree t
)
2500 if (TREE_CODE (t
) == SSA_NAME
2501 && !SSA_NAME_IS_DEFAULT_DEF (t
))
2507 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2508 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2509 indirect call graph edge.
2510 If POLYMORPHIC is true record is as a destination of polymorphic call. */
2512 static struct cgraph_edge
*
2513 ipa_note_param_call (struct cgraph_node
*node
, int param_index
,
2514 gcall
*stmt
, bool polymorphic
)
2516 struct cgraph_edge
*cs
;
2518 cs
= node
->get_edge (stmt
);
2519 cs
->indirect_info
->param_index
= param_index
;
2520 cs
->indirect_info
->agg_contents
= 0;
2521 cs
->indirect_info
->member_ptr
= 0;
2522 cs
->indirect_info
->guaranteed_unmodified
= 0;
2523 ipa_node_params
*info
= ipa_node_params_sum
->get (node
);
2524 ipa_set_param_used_by_indirect_call (info
, param_index
, true);
2525 if (cs
->indirect_info
->polymorphic
|| polymorphic
)
2526 ipa_set_param_used_by_polymorphic_call (info
, param_index
, true);
2530 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2531 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2532 intermediate information about each formal parameter. Currently it checks
2533 whether the call calls a pointer that is a formal parameter and if so, the
2534 parameter is marked with the called flag and an indirect call graph edge
2535 describing the call is created. This is very simple for ordinary pointers
2536 represented in SSA but not-so-nice when it comes to member pointers. The
2537 ugly part of this function does nothing more than trying to match the
2538 pattern of such a call. An example of such a pattern is the gimple dump
2539 below, the call is on the last line:
2542 f$__delta_5 = f.__delta;
2543 f$__pfn_24 = f.__pfn;
2547 f$__delta_5 = MEM[(struct *)&f];
2548 f$__pfn_24 = MEM[(struct *)&f + 4B];
2550 and a few lines below:
2553 D.2496_3 = (int) f$__pfn_24;
2554 D.2497_4 = D.2496_3 & 1;
2561 D.2500_7 = (unsigned int) f$__delta_5;
2562 D.2501_8 = &S + D.2500_7;
2563 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2564 D.2503_10 = *D.2502_9;
2565 D.2504_12 = f$__pfn_24 + -1;
2566 D.2505_13 = (unsigned int) D.2504_12;
2567 D.2506_14 = D.2503_10 + D.2505_13;
2568 D.2507_15 = *D.2506_14;
2569 iftmp.11_16 = (String:: *) D.2507_15;
2572 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2573 D.2500_19 = (unsigned int) f$__delta_5;
2574 D.2508_20 = &S + D.2500_19;
2575 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2577 Such patterns are results of simple calls to a member pointer:
2579 int doprinting (int (MyString::* f)(int) const)
2581 MyString S ("somestring");
2586 Moreover, the function also looks for called pointers loaded from aggregates
2587 passed by value or reference. */
2590 ipa_analyze_indirect_call_uses (struct ipa_func_body_info
*fbi
, gcall
*call
,
2593 class ipa_node_params
*info
= fbi
->info
;
2594 HOST_WIDE_INT offset
;
2597 if (SSA_NAME_IS_DEFAULT_DEF (target
))
2599 tree var
= SSA_NAME_VAR (target
);
2600 int index
= ipa_get_param_decl_index (info
, var
);
2602 ipa_note_param_call (fbi
->node
, index
, call
, false);
2607 gimple
*def
= SSA_NAME_DEF_STMT (target
);
2608 bool guaranteed_unmodified
;
2609 if (gimple_assign_single_p (def
)
2610 && ipa_load_from_parm_agg (fbi
, info
->descriptors
, def
,
2611 gimple_assign_rhs1 (def
), &index
, &offset
,
2612 NULL
, &by_ref
, &guaranteed_unmodified
))
2614 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
,
2616 cs
->indirect_info
->offset
= offset
;
2617 cs
->indirect_info
->agg_contents
= 1;
2618 cs
->indirect_info
->by_ref
= by_ref
;
2619 cs
->indirect_info
->guaranteed_unmodified
= guaranteed_unmodified
;
2623 /* Now we need to try to match the complex pattern of calling a member
2625 if (gimple_code (def
) != GIMPLE_PHI
2626 || gimple_phi_num_args (def
) != 2
2627 || !POINTER_TYPE_P (TREE_TYPE (target
))
2628 || TREE_CODE (TREE_TYPE (TREE_TYPE (target
))) != METHOD_TYPE
)
2631 /* First, we need to check whether one of these is a load from a member
2632 pointer that is a parameter to this function. */
2633 tree n1
= PHI_ARG_DEF (def
, 0);
2634 tree n2
= PHI_ARG_DEF (def
, 1);
2635 if (!ipa_is_ssa_with_stmt_def (n1
) || !ipa_is_ssa_with_stmt_def (n2
))
2637 gimple
*d1
= SSA_NAME_DEF_STMT (n1
);
2638 gimple
*d2
= SSA_NAME_DEF_STMT (n2
);
2641 basic_block bb
, virt_bb
;
2642 basic_block join
= gimple_bb (def
);
2643 if ((rec
= ipa_get_stmt_member_ptr_load_param (d1
, false, &offset
)))
2645 if (ipa_get_stmt_member_ptr_load_param (d2
, false, NULL
))
2648 bb
= EDGE_PRED (join
, 0)->src
;
2649 virt_bb
= gimple_bb (d2
);
2651 else if ((rec
= ipa_get_stmt_member_ptr_load_param (d2
, false, &offset
)))
2653 bb
= EDGE_PRED (join
, 1)->src
;
2654 virt_bb
= gimple_bb (d1
);
2659 /* Second, we need to check that the basic blocks are laid out in the way
2660 corresponding to the pattern. */
2662 if (!single_pred_p (virt_bb
) || !single_succ_p (virt_bb
)
2663 || single_pred (virt_bb
) != bb
2664 || single_succ (virt_bb
) != join
)
2667 /* Third, let's see that the branching is done depending on the least
2668 significant bit of the pfn. */
2670 gimple
*branch
= last_stmt (bb
);
2671 if (!branch
|| gimple_code (branch
) != GIMPLE_COND
)
2674 if ((gimple_cond_code (branch
) != NE_EXPR
2675 && gimple_cond_code (branch
) != EQ_EXPR
)
2676 || !integer_zerop (gimple_cond_rhs (branch
)))
2679 tree cond
= gimple_cond_lhs (branch
);
2680 if (!ipa_is_ssa_with_stmt_def (cond
))
2683 def
= SSA_NAME_DEF_STMT (cond
);
2684 if (!is_gimple_assign (def
)
2685 || gimple_assign_rhs_code (def
) != BIT_AND_EXPR
2686 || !integer_onep (gimple_assign_rhs2 (def
)))
2689 cond
= gimple_assign_rhs1 (def
);
2690 if (!ipa_is_ssa_with_stmt_def (cond
))
2693 def
= SSA_NAME_DEF_STMT (cond
);
2695 if (is_gimple_assign (def
)
2696 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
2698 cond
= gimple_assign_rhs1 (def
);
2699 if (!ipa_is_ssa_with_stmt_def (cond
))
2701 def
= SSA_NAME_DEF_STMT (cond
);
2705 rec2
= ipa_get_stmt_member_ptr_load_param (def
,
2706 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2707 == ptrmemfunc_vbit_in_delta
),
2712 index
= ipa_get_param_decl_index (info
, rec
);
2714 && parm_preserved_before_stmt_p (fbi
, index
, call
, rec
))
2716 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
,
2718 cs
->indirect_info
->offset
= offset
;
2719 cs
->indirect_info
->agg_contents
= 1;
2720 cs
->indirect_info
->member_ptr
= 1;
2721 cs
->indirect_info
->guaranteed_unmodified
= 1;
2727 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2728 object referenced in the expression is a formal parameter of the caller
2729 FBI->node (described by FBI->info), create a call note for the
2733 ipa_analyze_virtual_call_uses (struct ipa_func_body_info
*fbi
,
2734 gcall
*call
, tree target
)
2736 tree obj
= OBJ_TYPE_REF_OBJECT (target
);
2738 HOST_WIDE_INT anc_offset
;
2740 if (!flag_devirtualize
)
2743 if (TREE_CODE (obj
) != SSA_NAME
)
2746 class ipa_node_params
*info
= fbi
->info
;
2747 if (SSA_NAME_IS_DEFAULT_DEF (obj
))
2749 if (TREE_CODE (SSA_NAME_VAR (obj
)) != PARM_DECL
)
2753 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (obj
));
2754 gcc_assert (index
>= 0);
2755 if (detect_type_change_ssa (fbi
, obj
, obj_type_ref_class (target
),
2761 gimple
*stmt
= SSA_NAME_DEF_STMT (obj
);
2764 expr
= get_ancestor_addr_info (stmt
, &obj
, &anc_offset
);
2767 index
= ipa_get_param_decl_index (info
,
2768 SSA_NAME_VAR (TREE_OPERAND (expr
, 0)));
2769 gcc_assert (index
>= 0);
2770 if (detect_type_change (fbi
, obj
, expr
, obj_type_ref_class (target
),
2775 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
,
2777 class cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
2778 ii
->offset
= anc_offset
;
2779 ii
->otr_token
= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target
));
2780 ii
->otr_type
= obj_type_ref_class (target
);
2781 ii
->polymorphic
= 1;
2784 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2785 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2786 containing intermediate information about each formal parameter. */
2789 ipa_analyze_call_uses (struct ipa_func_body_info
*fbi
, gcall
*call
)
2791 tree target
= gimple_call_fn (call
);
2794 || (TREE_CODE (target
) != SSA_NAME
2795 && !virtual_method_call_p (target
)))
2798 struct cgraph_edge
*cs
= fbi
->node
->get_edge (call
);
2799 /* If we previously turned the call into a direct call, there is
2800 no need to analyze. */
2801 if (cs
&& !cs
->indirect_unknown_callee
)
2804 if (cs
->indirect_info
->polymorphic
&& flag_devirtualize
)
2807 tree target
= gimple_call_fn (call
);
2808 ipa_polymorphic_call_context
context (current_function_decl
,
2809 target
, call
, &instance
);
2811 gcc_checking_assert (cs
->indirect_info
->otr_type
2812 == obj_type_ref_class (target
));
2813 gcc_checking_assert (cs
->indirect_info
->otr_token
2814 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target
)));
2816 cs
->indirect_info
->vptr_changed
2817 = !context
.get_dynamic_type (instance
,
2818 OBJ_TYPE_REF_OBJECT (target
),
2819 obj_type_ref_class (target
), call
,
2820 &fbi
->aa_walk_budget
);
2821 cs
->indirect_info
->context
= context
;
2824 if (TREE_CODE (target
) == SSA_NAME
)
2825 ipa_analyze_indirect_call_uses (fbi
, call
, target
);
2826 else if (virtual_method_call_p (target
))
2827 ipa_analyze_virtual_call_uses (fbi
, call
, target
);
2831 /* Analyze the call statement STMT with respect to formal parameters (described
2832 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2833 formal parameters are called. */
2836 ipa_analyze_stmt_uses (struct ipa_func_body_info
*fbi
, gimple
*stmt
)
2838 if (is_gimple_call (stmt
))
2839 ipa_analyze_call_uses (fbi
, as_a
<gcall
*> (stmt
));
2842 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2843 If OP is a parameter declaration, mark it as used in the info structure
2847 visit_ref_for_mod_analysis (gimple
*, tree op
, tree
, void *data
)
2849 class ipa_node_params
*info
= (class ipa_node_params
*) data
;
2851 op
= get_base_address (op
);
2853 && TREE_CODE (op
) == PARM_DECL
)
2855 int index
= ipa_get_param_decl_index (info
, op
);
2856 gcc_assert (index
>= 0);
2857 ipa_set_param_used (info
, index
, true);
2863 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2864 the findings in various structures of the associated ipa_node_params
2865 structure, such as parameter flags, notes etc. FBI holds various data about
2866 the function being analyzed. */
2869 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info
*fbi
, basic_block bb
)
2871 gimple_stmt_iterator gsi
;
2872 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2874 gimple
*stmt
= gsi_stmt (gsi
);
2876 if (is_gimple_debug (stmt
))
2879 ipa_analyze_stmt_uses (fbi
, stmt
);
2880 walk_stmt_load_store_addr_ops (stmt
, fbi
->info
,
2881 visit_ref_for_mod_analysis
,
2882 visit_ref_for_mod_analysis
,
2883 visit_ref_for_mod_analysis
);
2885 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2886 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), fbi
->info
,
2887 visit_ref_for_mod_analysis
,
2888 visit_ref_for_mod_analysis
,
2889 visit_ref_for_mod_analysis
);
2892 /* Return true EXPR is a load from a dereference of SSA_NAME NAME. */
2895 load_from_dereferenced_name (tree expr
, tree name
)
2897 tree base
= get_base_address (expr
);
2898 return (TREE_CODE (base
) == MEM_REF
2899 && TREE_OPERAND (base
, 0) == name
);
2902 /* Calculate controlled uses of parameters of NODE. */
2905 ipa_analyze_controlled_uses (struct cgraph_node
*node
)
2907 ipa_node_params
*info
= ipa_node_params_sum
->get (node
);
2909 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2911 tree parm
= ipa_get_param (info
, i
);
2913 bool load_dereferenced
= false;
2915 /* For SSA regs see if parameter is used. For non-SSA we compute
2916 the flag during modification analysis. */
2917 if (is_gimple_reg (parm
))
2919 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
),
2921 if (ddef
&& !has_zero_uses (ddef
))
2923 imm_use_iterator imm_iter
;
2926 ipa_set_param_used (info
, i
, true);
2927 FOR_EACH_IMM_USE_STMT (stmt
, imm_iter
, ddef
)
2929 if (is_gimple_debug (stmt
))
2932 int all_stmt_uses
= 0;
2933 use_operand_p use_p
;
2934 FOR_EACH_IMM_USE_ON_STMT (use_p
, imm_iter
)
2937 if (is_gimple_call (stmt
))
2939 if (gimple_call_internal_p (stmt
))
2941 call_uses
= IPA_UNDESCRIBED_USE
;
2944 int recognized_stmt_uses
;
2945 if (gimple_call_fn (stmt
) == ddef
)
2946 recognized_stmt_uses
= 1;
2948 recognized_stmt_uses
= 0;
2949 unsigned arg_count
= gimple_call_num_args (stmt
);
2950 for (unsigned i
= 0; i
< arg_count
; i
++)
2952 tree arg
= gimple_call_arg (stmt
, i
);
2954 recognized_stmt_uses
++;
2955 else if (load_from_dereferenced_name (arg
, ddef
))
2957 load_dereferenced
= true;
2958 recognized_stmt_uses
++;
2962 if (recognized_stmt_uses
!= all_stmt_uses
)
2964 call_uses
= IPA_UNDESCRIBED_USE
;
2968 call_uses
+= all_stmt_uses
;
2970 else if (gimple_assign_single_p (stmt
))
2972 tree rhs
= gimple_assign_rhs1 (stmt
);
2973 if (all_stmt_uses
!= 1
2974 || !load_from_dereferenced_name (rhs
, ddef
))
2976 call_uses
= IPA_UNDESCRIBED_USE
;
2979 load_dereferenced
= true;
2983 call_uses
= IPA_UNDESCRIBED_USE
;
2992 call_uses
= IPA_UNDESCRIBED_USE
;
2993 ipa_set_controlled_uses (info
, i
, call_uses
);
2994 ipa_set_param_load_dereferenced (info
, i
, load_dereferenced
);
2998 /* Free stuff in BI. */
3001 free_ipa_bb_info (struct ipa_bb_info
*bi
)
3003 bi
->cg_edges
.release ();
3004 bi
->param_aa_statuses
.release ();
3007 /* Dominator walker driving the analysis. */
3009 class analysis_dom_walker
: public dom_walker
3012 analysis_dom_walker (struct ipa_func_body_info
*fbi
)
3013 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
) {}
3015 edge
before_dom_children (basic_block
) final override
;
3018 struct ipa_func_body_info
*m_fbi
;
3022 analysis_dom_walker::before_dom_children (basic_block bb
)
3024 ipa_analyze_params_uses_in_bb (m_fbi
, bb
);
3025 ipa_compute_jump_functions_for_bb (m_fbi
, bb
);
3029 /* Release body info FBI. */
3032 ipa_release_body_info (struct ipa_func_body_info
*fbi
)
3035 struct ipa_bb_info
*bi
;
3037 FOR_EACH_VEC_ELT (fbi
->bb_infos
, i
, bi
)
3038 free_ipa_bb_info (bi
);
3039 fbi
->bb_infos
.release ();
3042 /* Initialize the array describing properties of formal parameters
3043 of NODE, analyze their uses and compute jump functions associated
3044 with actual arguments of calls from within NODE. */
3047 ipa_analyze_node (struct cgraph_node
*node
)
3049 struct ipa_func_body_info fbi
;
3050 class ipa_node_params
*info
;
3052 ipa_check_create_node_params ();
3053 ipa_check_create_edge_args ();
3054 info
= ipa_node_params_sum
->get_create (node
);
3056 if (info
->analysis_done
)
3058 info
->analysis_done
= 1;
3060 if (ipa_func_spec_opts_forbid_analysis_p (node
))
3062 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
3064 ipa_set_param_used (info
, i
, true);
3065 ipa_set_controlled_uses (info
, i
, IPA_UNDESCRIBED_USE
);
3070 struct function
*func
= DECL_STRUCT_FUNCTION (node
->decl
);
3072 calculate_dominance_info (CDI_DOMINATORS
);
3073 ipa_initialize_node_params (node
);
3074 ipa_analyze_controlled_uses (node
);
3078 fbi
.bb_infos
= vNULL
;
3079 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
), true);
3080 fbi
.param_count
= ipa_get_param_count (info
);
3081 fbi
.aa_walk_budget
= opt_for_fn (node
->decl
, param_ipa_max_aa_steps
);
3083 for (struct cgraph_edge
*cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
3085 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
3086 bi
->cg_edges
.safe_push (cs
);
3089 for (struct cgraph_edge
*cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
3091 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
3092 bi
->cg_edges
.safe_push (cs
);
3095 analysis_dom_walker (&fbi
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
3097 ipa_release_body_info (&fbi
);
3098 free_dominance_info (CDI_DOMINATORS
);
3102 /* Update the jump functions associated with call graph edge E when the call
3103 graph edge CS is being inlined, assuming that E->caller is already (possibly
3104 indirectly) inlined into CS->callee and that E has not been inlined. */
3107 update_jump_functions_after_inlining (struct cgraph_edge
*cs
,
3108 struct cgraph_edge
*e
)
3110 ipa_edge_args
*top
= ipa_edge_args_sum
->get (cs
);
3111 ipa_edge_args
*args
= ipa_edge_args_sum
->get (e
);
3114 int count
= ipa_get_cs_argument_count (args
);
3117 for (i
= 0; i
< count
; i
++)
3119 struct ipa_jump_func
*dst
= ipa_get_ith_jump_func (args
, i
);
3120 class ipa_polymorphic_call_context
*dst_ctx
3121 = ipa_get_ith_polymorhic_call_context (args
, i
);
3125 struct ipa_agg_jf_item
*item
;
3128 FOR_EACH_VEC_ELT (*dst
->agg
.items
, j
, item
)
3131 struct ipa_jump_func
*src
;
3133 if (item
->jftype
!= IPA_JF_PASS_THROUGH
3134 && item
->jftype
!= IPA_JF_LOAD_AGG
)
3137 dst_fid
= item
->value
.pass_through
.formal_id
;
3138 if (!top
|| dst_fid
>= ipa_get_cs_argument_count (top
))
3140 item
->jftype
= IPA_JF_UNKNOWN
;
3144 item
->value
.pass_through
.formal_id
= -1;
3145 src
= ipa_get_ith_jump_func (top
, dst_fid
);
3146 if (src
->type
== IPA_JF_CONST
)
3148 if (item
->jftype
== IPA_JF_PASS_THROUGH
3149 && item
->value
.pass_through
.operation
== NOP_EXPR
)
3151 item
->jftype
= IPA_JF_CONST
;
3152 item
->value
.constant
= src
->value
.constant
.value
;
3156 else if (src
->type
== IPA_JF_PASS_THROUGH
3157 && src
->value
.pass_through
.operation
== NOP_EXPR
)
3159 if (item
->jftype
== IPA_JF_PASS_THROUGH
3160 || !item
->value
.load_agg
.by_ref
3161 || src
->value
.pass_through
.agg_preserved
)
3162 item
->value
.pass_through
.formal_id
3163 = src
->value
.pass_through
.formal_id
;
3165 else if (src
->type
== IPA_JF_ANCESTOR
)
3167 if (item
->jftype
== IPA_JF_PASS_THROUGH
)
3169 if (!src
->value
.ancestor
.offset
)
3170 item
->value
.pass_through
.formal_id
3171 = src
->value
.ancestor
.formal_id
;
3173 else if (src
->value
.ancestor
.agg_preserved
)
3175 gcc_checking_assert (item
->value
.load_agg
.by_ref
);
3177 item
->value
.pass_through
.formal_id
3178 = src
->value
.ancestor
.formal_id
;
3179 item
->value
.load_agg
.offset
3180 += src
->value
.ancestor
.offset
;
3184 if (item
->value
.pass_through
.formal_id
< 0)
3185 item
->jftype
= IPA_JF_UNKNOWN
;
3191 ipa_set_jf_unknown (dst
);
3195 if (dst
->type
== IPA_JF_ANCESTOR
)
3197 struct ipa_jump_func
*src
;
3198 int dst_fid
= dst
->value
.ancestor
.formal_id
;
3199 class ipa_polymorphic_call_context
*src_ctx
3200 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
3202 /* Variable number of arguments can cause havoc if we try to access
3203 one that does not exist in the inlined edge. So make sure we
3205 if (dst_fid
>= ipa_get_cs_argument_count (top
))
3207 ipa_set_jf_unknown (dst
);
3211 src
= ipa_get_ith_jump_func (top
, dst_fid
);
3213 if (src_ctx
&& !src_ctx
->useless_p ())
3215 class ipa_polymorphic_call_context ctx
= *src_ctx
;
3217 /* TODO: Make type preserved safe WRT contexts. */
3218 if (!ipa_get_jf_ancestor_type_preserved (dst
))
3219 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
3220 ctx
.offset_by (dst
->value
.ancestor
.offset
);
3221 if (!ctx
.useless_p ())
3225 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
3227 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
3230 dst_ctx
->combine_with (ctx
);
3234 /* Parameter and argument in ancestor jump function must be pointer
3235 type, which means access to aggregate must be by-reference. */
3236 gcc_assert (!src
->agg
.items
|| src
->agg
.by_ref
);
3238 if (src
->agg
.items
&& dst
->value
.ancestor
.agg_preserved
)
3240 struct ipa_agg_jf_item
*item
;
3243 /* Currently we do not produce clobber aggregate jump functions,
3244 replace with merging when we do. */
3245 gcc_assert (!dst
->agg
.items
);
3247 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
3248 dst
->agg
.by_ref
= src
->agg
.by_ref
;
3249 FOR_EACH_VEC_SAFE_ELT (dst
->agg
.items
, j
, item
)
3250 item
->offset
-= dst
->value
.ancestor
.offset
;
3253 if (src
->type
== IPA_JF_PASS_THROUGH
3254 && src
->value
.pass_through
.operation
== NOP_EXPR
)
3256 dst
->value
.ancestor
.formal_id
= src
->value
.pass_through
.formal_id
;
3257 dst
->value
.ancestor
.agg_preserved
&=
3258 src
->value
.pass_through
.agg_preserved
;
3260 else if (src
->type
== IPA_JF_ANCESTOR
)
3262 dst
->value
.ancestor
.formal_id
= src
->value
.ancestor
.formal_id
;
3263 dst
->value
.ancestor
.offset
+= src
->value
.ancestor
.offset
;
3264 dst
->value
.ancestor
.agg_preserved
&=
3265 src
->value
.ancestor
.agg_preserved
;
3266 dst
->value
.ancestor
.keep_null
|= src
->value
.ancestor
.keep_null
;
3269 ipa_set_jf_unknown (dst
);
3271 else if (dst
->type
== IPA_JF_PASS_THROUGH
)
3273 struct ipa_jump_func
*src
;
3274 /* We must check range due to calls with variable number of arguments
3275 and we cannot combine jump functions with operations. */
3276 if (dst
->value
.pass_through
.operation
== NOP_EXPR
3277 && (top
&& dst
->value
.pass_through
.formal_id
3278 < ipa_get_cs_argument_count (top
)))
3280 int dst_fid
= dst
->value
.pass_through
.formal_id
;
3281 src
= ipa_get_ith_jump_func (top
, dst_fid
);
3282 bool dst_agg_p
= ipa_get_jf_pass_through_agg_preserved (dst
);
3283 class ipa_polymorphic_call_context
*src_ctx
3284 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
3286 if (src_ctx
&& !src_ctx
->useless_p ())
3288 class ipa_polymorphic_call_context ctx
= *src_ctx
;
3290 /* TODO: Make type preserved safe WRT contexts. */
3291 if (!ipa_get_jf_pass_through_type_preserved (dst
))
3292 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
3293 if (!ctx
.useless_p ())
3297 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
3299 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
3301 dst_ctx
->combine_with (ctx
);
3306 case IPA_JF_UNKNOWN
:
3307 ipa_set_jf_unknown (dst
);
3310 ipa_set_jf_cst_copy (dst
, src
);
3313 case IPA_JF_PASS_THROUGH
:
3315 int formal_id
= ipa_get_jf_pass_through_formal_id (src
);
3316 enum tree_code operation
;
3317 operation
= ipa_get_jf_pass_through_operation (src
);
3319 if (operation
== NOP_EXPR
)
3323 && ipa_get_jf_pass_through_agg_preserved (src
);
3324 ipa_set_jf_simple_pass_through (dst
, formal_id
, agg_p
);
3326 else if (TREE_CODE_CLASS (operation
) == tcc_unary
)
3327 ipa_set_jf_unary_pass_through (dst
, formal_id
, operation
);
3330 tree operand
= ipa_get_jf_pass_through_operand (src
);
3331 ipa_set_jf_arith_pass_through (dst
, formal_id
, operand
,
3336 case IPA_JF_ANCESTOR
:
3340 && ipa_get_jf_ancestor_agg_preserved (src
);
3341 ipa_set_ancestor_jf (dst
,
3342 ipa_get_jf_ancestor_offset (src
),
3343 ipa_get_jf_ancestor_formal_id (src
),
3345 ipa_get_jf_ancestor_keep_null (src
));
3353 && (dst_agg_p
|| !src
->agg
.by_ref
))
3355 /* Currently we do not produce clobber aggregate jump
3356 functions, replace with merging when we do. */
3357 gcc_assert (!dst
->agg
.items
);
3359 dst
->agg
.by_ref
= src
->agg
.by_ref
;
3360 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
3364 ipa_set_jf_unknown (dst
);
3369 /* If TARGET is an addr_expr of a function declaration, make it the
3370 (SPECULATIVE)destination of an indirect edge IE and return the edge.
3371 Otherwise, return NULL. */
3373 struct cgraph_edge
*
3374 ipa_make_edge_direct_to_target (struct cgraph_edge
*ie
, tree target
,
3377 struct cgraph_node
*callee
;
3378 bool unreachable
= false;
3380 if (TREE_CODE (target
) == ADDR_EXPR
)
3381 target
= TREE_OPERAND (target
, 0);
3382 if (TREE_CODE (target
) != FUNCTION_DECL
)
3384 target
= canonicalize_constructor_val (target
, NULL
);
3385 if (!target
|| TREE_CODE (target
) != FUNCTION_DECL
)
3387 /* Member pointer call that goes through a VMT lookup. */
3388 if (ie
->indirect_info
->member_ptr
3389 /* Or if target is not an invariant expression and we do not
3390 know if it will evaulate to function at runtime.
3391 This can happen when folding through &VAR, where &VAR
3392 is IP invariant, but VAR itself is not.
3394 TODO: Revisit this when GCC 5 is branched. It seems that
3395 member_ptr check is not needed and that we may try to fold
3396 the expression and see if VAR is readonly. */
3397 || !is_gimple_ip_invariant (target
))
3399 if (dump_enabled_p ())
3401 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, ie
->call_stmt
,
3402 "discovered direct call non-invariant %s\n",
3403 ie
->caller
->dump_name ());
3409 if (dump_enabled_p ())
3411 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, ie
->call_stmt
,
3412 "discovered direct call to non-function in %s, "
3413 "making it __builtin_unreachable\n",
3414 ie
->caller
->dump_name ());
3417 target
= builtin_decl_unreachable ();
3418 callee
= cgraph_node::get_create (target
);
3422 callee
= cgraph_node::get (target
);
3425 callee
= cgraph_node::get (target
);
3427 /* Because may-edges are not explicitely represented and vtable may be external,
3428 we may create the first reference to the object in the unit. */
3429 if (!callee
|| callee
->inlined_to
)
3432 /* We are better to ensure we can refer to it.
3433 In the case of static functions we are out of luck, since we already
3434 removed its body. In the case of public functions we may or may
3435 not introduce the reference. */
3436 if (!canonicalize_constructor_val (target
, NULL
)
3437 || !TREE_PUBLIC (target
))
3440 fprintf (dump_file
, "ipa-prop: Discovered call to a known target "
3441 "(%s -> %s) but cannot refer to it. Giving up.\n",
3442 ie
->caller
->dump_name (),
3443 ie
->callee
->dump_name ());
3446 callee
= cgraph_node::get_create (target
);
3449 /* If the edge is already speculated. */
3450 if (speculative
&& ie
->speculative
)
3454 cgraph_edge
*e2
= ie
->speculative_call_for_target (callee
);
3458 fprintf (dump_file
, "ipa-prop: Discovered call to a "
3459 "speculative target (%s -> %s) but the call is "
3460 "already speculated to different target. "
3462 ie
->caller
->dump_name (), callee
->dump_name ());
3468 "ipa-prop: Discovered call to a speculative target "
3469 "(%s -> %s) this agree with previous speculation.\n",
3470 ie
->caller
->dump_name (), callee
->dump_name ());
3476 if (!dbg_cnt (devirt
))
3479 ipa_check_create_node_params ();
3481 /* We cannot make edges to inline clones. It is bug that someone removed
3482 the cgraph node too early. */
3483 gcc_assert (!callee
->inlined_to
);
3485 if (dump_file
&& !unreachable
)
3487 fprintf (dump_file
, "ipa-prop: Discovered %s call to a %s target "
3488 "(%s -> %s), for stmt ",
3489 ie
->indirect_info
->polymorphic
? "a virtual" : "an indirect",
3490 speculative
? "speculative" : "known",
3491 ie
->caller
->dump_name (),
3492 callee
->dump_name ());
3494 print_gimple_stmt (dump_file
, ie
->call_stmt
, 2, TDF_SLIM
);
3496 fprintf (dump_file
, "with uid %i\n", ie
->lto_stmt_uid
);
3498 if (dump_enabled_p ())
3500 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, ie
->call_stmt
,
3501 "converting indirect call in %s to direct call to %s\n",
3502 ie
->caller
->dump_name (), callee
->dump_name ());
3506 struct cgraph_edge
*orig
= ie
;
3507 ie
= cgraph_edge::make_direct (ie
, callee
);
3508 /* If we resolved speculative edge the cost is already up to date
3509 for direct call (adjusted by inline_edge_duplication_hook). */
3512 ipa_call_summary
*es
= ipa_call_summaries
->get (ie
);
3513 es
->call_stmt_size
-= (eni_size_weights
.indirect_call_cost
3514 - eni_size_weights
.call_cost
);
3515 es
->call_stmt_time
-= (eni_time_weights
.indirect_call_cost
3516 - eni_time_weights
.call_cost
);
3521 if (!callee
->can_be_discarded_p ())
3524 alias
= dyn_cast
<cgraph_node
*> (callee
->noninterposable_alias ());
3528 /* make_speculative will update ie's cost to direct call cost. */
3529 ie
= ie
->make_speculative
3530 (callee
, ie
->count
.apply_scale (8, 10));
3536 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3537 CONSTRUCTOR and return it. Return NULL if the search fails for some
3541 find_constructor_constant_at_offset (tree constructor
, HOST_WIDE_INT req_offset
)
3543 tree type
= TREE_TYPE (constructor
);
3544 if (TREE_CODE (type
) != ARRAY_TYPE
3545 && TREE_CODE (type
) != RECORD_TYPE
)
3550 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor
), ix
, index
, val
)
3552 HOST_WIDE_INT elt_offset
;
3553 if (TREE_CODE (type
) == ARRAY_TYPE
)
3556 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (type
));
3557 gcc_assert (TREE_CODE (unit_size
) == INTEGER_CST
);
3561 if (TREE_CODE (index
) == RANGE_EXPR
)
3562 off
= wi::to_offset (TREE_OPERAND (index
, 0));
3564 off
= wi::to_offset (index
);
3565 if (TYPE_DOMAIN (type
) && TYPE_MIN_VALUE (TYPE_DOMAIN (type
)))
3567 tree low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
3568 gcc_assert (TREE_CODE (unit_size
) == INTEGER_CST
);
3569 off
= wi::sext (off
- wi::to_offset (low_bound
),
3570 TYPE_PRECISION (TREE_TYPE (index
)));
3572 off
*= wi::to_offset (unit_size
);
3573 /* ??? Handle more than just the first index of a
3577 off
= wi::to_offset (unit_size
) * ix
;
3579 off
= wi::lshift (off
, LOG2_BITS_PER_UNIT
);
3580 if (!wi::fits_shwi_p (off
) || wi::neg_p (off
))
3582 elt_offset
= off
.to_shwi ();
3584 else if (TREE_CODE (type
) == RECORD_TYPE
)
3586 gcc_checking_assert (index
&& TREE_CODE (index
) == FIELD_DECL
);
3587 if (DECL_BIT_FIELD (index
))
3589 elt_offset
= int_bit_position (index
);
3594 if (elt_offset
> req_offset
)
3597 if (TREE_CODE (val
) == CONSTRUCTOR
)
3598 return find_constructor_constant_at_offset (val
,
3599 req_offset
- elt_offset
);
3601 if (elt_offset
== req_offset
3602 && is_gimple_reg_type (TREE_TYPE (val
))
3603 && is_gimple_ip_invariant (val
))
3609 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3610 invariant from a static constructor and if so, return it. Otherwise return
3614 ipa_find_agg_cst_from_init (tree scalar
, HOST_WIDE_INT offset
, bool by_ref
)
3618 if (TREE_CODE (scalar
) != ADDR_EXPR
)
3620 scalar
= TREE_OPERAND (scalar
, 0);
3624 || !is_global_var (scalar
)
3625 || !TREE_READONLY (scalar
)
3626 || !DECL_INITIAL (scalar
)
3627 || TREE_CODE (DECL_INITIAL (scalar
)) != CONSTRUCTOR
)
3630 return find_constructor_constant_at_offset (DECL_INITIAL (scalar
), offset
);
3633 /* Retrieve value from AGG, a set of known offset/value for an aggregate or
3634 static initializer of SCALAR (which can be NULL) for the given OFFSET or
3635 return NULL if there is none. BY_REF specifies whether the value has to be
3636 passed by reference or by value. If FROM_GLOBAL_CONSTANT is non-NULL, then
3637 the boolean it points to is set to true if the value comes from an
3638 initializer of a constant. */
3641 ipa_find_agg_cst_for_param (const ipa_agg_value_set
*agg
, tree scalar
,
3642 HOST_WIDE_INT offset
, bool by_ref
,
3643 bool *from_global_constant
)
3645 struct ipa_agg_value
*item
;
3650 tree res
= ipa_find_agg_cst_from_init (scalar
, offset
, by_ref
);
3653 if (from_global_constant
)
3654 *from_global_constant
= true;
3660 || by_ref
!= agg
->by_ref
)
3663 FOR_EACH_VEC_ELT (agg
->items
, i
, item
)
3664 if (item
->offset
== offset
)
3666 /* Currently we do not have clobber values, return NULL for them once
3668 gcc_checking_assert (is_gimple_ip_invariant (item
->value
));
3669 if (from_global_constant
)
3670 *from_global_constant
= false;
3676 /* Remove a reference to SYMBOL from the list of references of a node given by
3677 reference description RDESC. Return true if the reference has been
3678 successfully found and removed. */
3681 remove_described_reference (symtab_node
*symbol
, struct ipa_cst_ref_desc
*rdesc
)
3683 struct ipa_ref
*to_del
;
3684 struct cgraph_edge
*origin
;
3689 to_del
= origin
->caller
->find_reference (symbol
, origin
->call_stmt
,
3690 origin
->lto_stmt_uid
);
3694 to_del
->remove_reference ();
3696 fprintf (dump_file
, "ipa-prop: Removed a reference from %s to %s.\n",
3697 origin
->caller
->dump_name (), symbol
->dump_name ());
3701 /* If JFUNC has a reference description with refcount different from
3702 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3703 NULL. JFUNC must be a constant jump function. */
3705 static struct ipa_cst_ref_desc
*
3706 jfunc_rdesc_usable (struct ipa_jump_func
*jfunc
)
3708 struct ipa_cst_ref_desc
*rdesc
= ipa_get_jf_constant_rdesc (jfunc
);
3709 if (rdesc
&& rdesc
->refcount
!= IPA_UNDESCRIBED_USE
)
3715 /* If the value of constant jump function JFUNC is an address of a function
3716 declaration, return the associated call graph node. Otherwise return
3719 static symtab_node
*
3720 symtab_node_for_jfunc (struct ipa_jump_func
*jfunc
)
3722 gcc_checking_assert (jfunc
->type
== IPA_JF_CONST
);
3723 tree cst
= ipa_get_jf_constant (jfunc
);
3724 if (TREE_CODE (cst
) != ADDR_EXPR
3725 || (TREE_CODE (TREE_OPERAND (cst
, 0)) != FUNCTION_DECL
3726 && TREE_CODE (TREE_OPERAND (cst
, 0)) != VAR_DECL
))
3729 return symtab_node::get (TREE_OPERAND (cst
, 0));
3733 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3734 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3735 the edge specified in the rdesc. Return false if either the symbol or the
3736 reference could not be found, otherwise return true. */
3739 try_decrement_rdesc_refcount (struct ipa_jump_func
*jfunc
)
3741 struct ipa_cst_ref_desc
*rdesc
;
3742 if (jfunc
->type
== IPA_JF_CONST
3743 && (rdesc
= jfunc_rdesc_usable (jfunc
))
3744 && --rdesc
->refcount
== 0)
3746 symtab_node
*symbol
= symtab_node_for_jfunc (jfunc
);
3750 return remove_described_reference (symbol
, rdesc
);
3755 /* Try to find a destination for indirect edge IE that corresponds to a simple
3756 call or a call of a member function pointer and where the destination is a
3757 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3758 the type of the parameter to which the result of JFUNC is passed. If it can
3759 be determined, return the newly direct edge, otherwise return NULL.
3760 NEW_ROOT and NEW_ROOT_INFO is the node and its info that JFUNC lattices are
3763 static struct cgraph_edge
*
3764 try_make_edge_direct_simple_call (struct cgraph_edge
*ie
,
3765 struct ipa_jump_func
*jfunc
, tree target_type
,
3766 struct cgraph_node
*new_root
,
3767 class ipa_node_params
*new_root_info
)
3769 struct cgraph_edge
*cs
;
3771 bool agg_contents
= ie
->indirect_info
->agg_contents
;
3772 tree scalar
= ipa_value_from_jfunc (new_root_info
, jfunc
, target_type
);
3775 bool from_global_constant
;
3776 ipa_agg_value_set agg
= ipa_agg_value_set_from_jfunc (new_root_info
,
3779 target
= ipa_find_agg_cst_for_param (&agg
, scalar
,
3780 ie
->indirect_info
->offset
,
3781 ie
->indirect_info
->by_ref
,
3782 &from_global_constant
);
3785 && !from_global_constant
3786 && !ie
->indirect_info
->guaranteed_unmodified
)
3793 cs
= ipa_make_edge_direct_to_target (ie
, target
);
3795 if (cs
&& !agg_contents
)
3798 gcc_checking_assert (cs
->callee
3800 || jfunc
->type
!= IPA_JF_CONST
3801 || !symtab_node_for_jfunc (jfunc
)
3802 || cs
->callee
== symtab_node_for_jfunc (jfunc
)));
3803 ok
= try_decrement_rdesc_refcount (jfunc
);
3804 gcc_checking_assert (ok
);
3810 /* Return the target to be used in cases of impossible devirtualization. IE
3811 and target (the latter can be NULL) are dumped when dumping is enabled. */
3814 ipa_impossible_devirt_target (struct cgraph_edge
*ie
, tree target
)
3820 "Type inconsistent devirtualization: %s->%s\n",
3821 ie
->caller
->dump_name (),
3822 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target
)));
3825 "No devirtualization target in %s\n",
3826 ie
->caller
->dump_name ());
3828 tree new_target
= builtin_decl_unreachable ();
3829 cgraph_node::get_create (new_target
);
3833 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3834 call based on a formal parameter which is described by jump function JFUNC
3835 and if it can be determined, make it direct and return the direct edge.
3836 Otherwise, return NULL. CTX describes the polymorphic context that the
3837 parameter the call is based on brings along with it. NEW_ROOT and
3838 NEW_ROOT_INFO is the node and its info that JFUNC lattices are relative
3841 static struct cgraph_edge
*
3842 try_make_edge_direct_virtual_call (struct cgraph_edge
*ie
,
3843 struct ipa_jump_func
*jfunc
,
3844 class ipa_polymorphic_call_context ctx
,
3845 struct cgraph_node
*new_root
,
3846 class ipa_node_params
*new_root_info
)
3849 bool speculative
= false;
3851 if (!opt_for_fn (ie
->caller
->decl
, flag_devirtualize
))
3854 gcc_assert (!ie
->indirect_info
->by_ref
);
3856 /* Try to do lookup via known virtual table pointer value. */
3857 if (!ie
->indirect_info
->vptr_changed
3858 || opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
))
3861 unsigned HOST_WIDE_INT offset
;
3862 tree scalar
= (jfunc
->type
== IPA_JF_CONST
) ? ipa_get_jf_constant (jfunc
)
3864 ipa_agg_value_set agg
= ipa_agg_value_set_from_jfunc (new_root_info
,
3867 tree t
= ipa_find_agg_cst_for_param (&agg
, scalar
,
3868 ie
->indirect_info
->offset
,
3871 if (t
&& vtable_pointer_value_to_vtable (t
, &vtable
, &offset
))
3874 t
= gimple_get_virt_method_for_vtable (ie
->indirect_info
->otr_token
,
3875 vtable
, offset
, &can_refer
);
3879 || fndecl_built_in_p (t
, BUILT_IN_UNREACHABLE
)
3880 || !possible_polymorphic_call_target_p
3881 (ie
, cgraph_node::get (t
)))
3883 /* Do not speculate builtin_unreachable, it is stupid! */
3884 if (!ie
->indirect_info
->vptr_changed
)
3885 target
= ipa_impossible_devirt_target (ie
, target
);
3892 speculative
= ie
->indirect_info
->vptr_changed
;
3898 ipa_polymorphic_call_context
ie_context (ie
);
3899 vec
<cgraph_node
*>targets
;
3902 ctx
.offset_by (ie
->indirect_info
->offset
);
3903 if (ie
->indirect_info
->vptr_changed
)
3904 ctx
.possible_dynamic_type_change (ie
->in_polymorphic_cdtor
,
3905 ie
->indirect_info
->otr_type
);
3906 ctx
.combine_with (ie_context
, ie
->indirect_info
->otr_type
);
3907 targets
= possible_polymorphic_call_targets
3908 (ie
->indirect_info
->otr_type
,
3909 ie
->indirect_info
->otr_token
,
3911 if (final
&& targets
.length () <= 1)
3913 speculative
= false;
3914 if (targets
.length () == 1)
3915 target
= targets
[0]->decl
;
3917 target
= ipa_impossible_devirt_target (ie
, NULL_TREE
);
3919 else if (!target
&& opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
)
3920 && !ie
->speculative
&& ie
->maybe_hot_p ())
3923 n
= try_speculative_devirtualization (ie
->indirect_info
->otr_type
,
3924 ie
->indirect_info
->otr_token
,
3925 ie
->indirect_info
->context
);
3935 if (!possible_polymorphic_call_target_p
3936 (ie
, cgraph_node::get_create (target
)))
3940 target
= ipa_impossible_devirt_target (ie
, target
);
3942 return ipa_make_edge_direct_to_target (ie
, target
, speculative
);
3948 /* Update the param called notes associated with NODE when CS is being inlined,
3949 assuming NODE is (potentially indirectly) inlined into CS->callee.
3950 Moreover, if the callee is discovered to be constant, create a new cgraph
3951 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3952 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3955 update_indirect_edges_after_inlining (struct cgraph_edge
*cs
,
3956 struct cgraph_node
*node
,
3957 vec
<cgraph_edge
*> *new_edges
)
3959 class ipa_edge_args
*top
;
3960 struct cgraph_edge
*ie
, *next_ie
, *new_direct_edge
;
3961 struct cgraph_node
*new_root
;
3962 class ipa_node_params
*new_root_info
, *inlined_node_info
;
3965 ipa_check_create_edge_args ();
3966 top
= ipa_edge_args_sum
->get (cs
);
3967 new_root
= cs
->caller
->inlined_to
3968 ? cs
->caller
->inlined_to
: cs
->caller
;
3969 new_root_info
= ipa_node_params_sum
->get (new_root
);
3970 inlined_node_info
= ipa_node_params_sum
->get (cs
->callee
->function_symbol ());
3972 for (ie
= node
->indirect_calls
; ie
; ie
= next_ie
)
3974 class cgraph_indirect_call_info
*ici
= ie
->indirect_info
;
3975 struct ipa_jump_func
*jfunc
;
3978 next_ie
= ie
->next_callee
;
3980 if (ici
->param_index
== -1)
3983 /* We must check range due to calls with variable number of arguments: */
3984 if (!top
|| ici
->param_index
>= ipa_get_cs_argument_count (top
))
3986 ici
->param_index
= -1;
3990 param_index
= ici
->param_index
;
3991 jfunc
= ipa_get_ith_jump_func (top
, param_index
);
3993 auto_vec
<cgraph_node
*, 4> spec_targets
;
3994 if (ie
->speculative
)
3995 for (cgraph_edge
*direct
= ie
->first_speculative_call_target ();
3997 direct
= direct
->next_speculative_call_target ())
3998 spec_targets
.safe_push (direct
->callee
);
4000 if (!opt_for_fn (node
->decl
, flag_indirect_inlining
))
4001 new_direct_edge
= NULL
;
4002 else if (ici
->polymorphic
)
4004 ipa_polymorphic_call_context ctx
;
4005 ctx
= ipa_context_from_jfunc (new_root_info
, cs
, param_index
, jfunc
);
4006 new_direct_edge
= try_make_edge_direct_virtual_call (ie
, jfunc
, ctx
,
4012 tree target_type
= ipa_get_type (inlined_node_info
, param_index
);
4013 new_direct_edge
= try_make_edge_direct_simple_call (ie
, jfunc
,
4019 /* If speculation was removed, then we need to do nothing. */
4020 if (new_direct_edge
&& new_direct_edge
!= ie
4021 && spec_targets
.contains (new_direct_edge
->callee
))
4023 new_direct_edge
->indirect_inlining_edge
= 1;
4025 if (!new_direct_edge
->speculative
)
4028 else if (new_direct_edge
)
4030 new_direct_edge
->indirect_inlining_edge
= 1;
4033 new_edges
->safe_push (new_direct_edge
);
4036 /* If speculative edge was introduced we still need to update
4037 call info of the indirect edge. */
4038 if (!new_direct_edge
->speculative
)
4041 if (jfunc
->type
== IPA_JF_PASS_THROUGH
4042 && ipa_get_jf_pass_through_operation (jfunc
) == NOP_EXPR
)
4044 if (ici
->agg_contents
4045 && !ipa_get_jf_pass_through_agg_preserved (jfunc
)
4046 && !ici
->polymorphic
)
4047 ici
->param_index
= -1;
4050 ici
->param_index
= ipa_get_jf_pass_through_formal_id (jfunc
);
4051 if (ici
->polymorphic
4052 && !ipa_get_jf_pass_through_type_preserved (jfunc
))
4053 ici
->vptr_changed
= true;
4054 ipa_set_param_used_by_indirect_call (new_root_info
,
4055 ici
->param_index
, true);
4056 if (ici
->polymorphic
)
4057 ipa_set_param_used_by_polymorphic_call (new_root_info
,
4058 ici
->param_index
, true);
4061 else if (jfunc
->type
== IPA_JF_ANCESTOR
)
4063 if (ici
->agg_contents
4064 && !ipa_get_jf_ancestor_agg_preserved (jfunc
)
4065 && !ici
->polymorphic
)
4066 ici
->param_index
= -1;
4069 ici
->param_index
= ipa_get_jf_ancestor_formal_id (jfunc
);
4070 ici
->offset
+= ipa_get_jf_ancestor_offset (jfunc
);
4071 if (ici
->polymorphic
4072 && !ipa_get_jf_ancestor_type_preserved (jfunc
))
4073 ici
->vptr_changed
= true;
4074 ipa_set_param_used_by_indirect_call (new_root_info
,
4075 ici
->param_index
, true);
4076 if (ici
->polymorphic
)
4077 ipa_set_param_used_by_polymorphic_call (new_root_info
,
4078 ici
->param_index
, true);
4082 /* Either we can find a destination for this edge now or never. */
4083 ici
->param_index
= -1;
4089 /* Recursively traverse subtree of NODE (including node) made of inlined
4090 cgraph_edges when CS has been inlined and invoke
4091 update_indirect_edges_after_inlining on all nodes and
4092 update_jump_functions_after_inlining on all non-inlined edges that lead out
4093 of this subtree. Newly discovered indirect edges will be added to
4094 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
4098 propagate_info_to_inlined_callees (struct cgraph_edge
*cs
,
4099 struct cgraph_node
*node
,
4100 vec
<cgraph_edge
*> *new_edges
)
4102 struct cgraph_edge
*e
;
4105 res
= update_indirect_edges_after_inlining (cs
, node
, new_edges
);
4107 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4108 if (!e
->inline_failed
)
4109 res
|= propagate_info_to_inlined_callees (cs
, e
->callee
, new_edges
);
4111 update_jump_functions_after_inlining (cs
, e
);
4112 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4113 update_jump_functions_after_inlining (cs
, e
);
4118 /* Combine two controlled uses counts as done during inlining. */
4121 combine_controlled_uses_counters (int c
, int d
)
4123 if (c
== IPA_UNDESCRIBED_USE
|| d
== IPA_UNDESCRIBED_USE
)
4124 return IPA_UNDESCRIBED_USE
;
4129 /* Propagate number of controlled users from CS->caleee to the new root of the
4130 tree of inlined nodes. */
4133 propagate_controlled_uses (struct cgraph_edge
*cs
)
4135 ipa_edge_args
*args
= ipa_edge_args_sum
->get (cs
);
4138 struct cgraph_node
*new_root
= cs
->caller
->inlined_to
4139 ? cs
->caller
->inlined_to
: cs
->caller
;
4140 ipa_node_params
*new_root_info
= ipa_node_params_sum
->get (new_root
);
4141 ipa_node_params
*old_root_info
= ipa_node_params_sum
->get (cs
->callee
);
4147 count
= MIN (ipa_get_cs_argument_count (args
),
4148 ipa_get_param_count (old_root_info
));
4149 for (i
= 0; i
< count
; i
++)
4151 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
4152 struct ipa_cst_ref_desc
*rdesc
;
4154 if (jf
->type
== IPA_JF_PASS_THROUGH
)
4157 src_idx
= ipa_get_jf_pass_through_formal_id (jf
);
4158 c
= ipa_get_controlled_uses (new_root_info
, src_idx
);
4159 d
= ipa_get_controlled_uses (old_root_info
, i
);
4161 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf
)
4162 == NOP_EXPR
|| c
== IPA_UNDESCRIBED_USE
);
4163 c
= combine_controlled_uses_counters (c
, d
);
4164 ipa_set_controlled_uses (new_root_info
, src_idx
, c
);
4166 if (c
!= IPA_UNDESCRIBED_USE
)
4168 lderef
= (ipa_get_param_load_dereferenced (new_root_info
, src_idx
)
4169 || ipa_get_param_load_dereferenced (old_root_info
, i
));
4170 ipa_set_param_load_dereferenced (new_root_info
, src_idx
, lderef
);
4173 if (c
== 0 && !lderef
&& new_root_info
->ipcp_orig_node
)
4175 struct cgraph_node
*n
;
4176 struct ipa_ref
*ref
;
4177 tree t
= new_root_info
->known_csts
[src_idx
];
4179 if (t
&& TREE_CODE (t
) == ADDR_EXPR
4180 && TREE_CODE (TREE_OPERAND (t
, 0)) == FUNCTION_DECL
4181 && (n
= cgraph_node::get (TREE_OPERAND (t
, 0)))
4182 && (ref
= new_root
->find_reference (n
, NULL
, 0)))
4185 fprintf (dump_file
, "ipa-prop: Removing cloning-created "
4186 "reference from %s to %s.\n",
4187 new_root
->dump_name (),
4189 ref
->remove_reference ();
4193 else if (jf
->type
== IPA_JF_CONST
4194 && (rdesc
= jfunc_rdesc_usable (jf
)))
4196 int d
= ipa_get_controlled_uses (old_root_info
, i
);
4197 int c
= rdesc
->refcount
;
4198 tree cst
= ipa_get_jf_constant (jf
);
4199 rdesc
->refcount
= combine_controlled_uses_counters (c
, d
);
4200 if (rdesc
->refcount
!= IPA_UNDESCRIBED_USE
4201 && ipa_get_param_load_dereferenced (old_root_info
, i
)
4202 && TREE_CODE (cst
) == ADDR_EXPR
4203 && TREE_CODE (TREE_OPERAND (cst
, 0)) == VAR_DECL
)
4205 symtab_node
*n
= symtab_node::get (TREE_OPERAND (cst
, 0));
4206 new_root
->create_reference (n
, IPA_REF_LOAD
, NULL
);
4208 fprintf (dump_file
, "ipa-prop: Address IPA constant will reach "
4209 "a load so adding LOAD reference from %s to %s.\n",
4210 new_root
->dump_name (), n
->dump_name ());
4212 if (rdesc
->refcount
== 0)
4214 gcc_checking_assert (TREE_CODE (cst
) == ADDR_EXPR
4215 && ((TREE_CODE (TREE_OPERAND (cst
, 0))
4217 || (TREE_CODE (TREE_OPERAND (cst
, 0))
4220 symtab_node
*n
= symtab_node::get (TREE_OPERAND (cst
, 0));
4223 remove_described_reference (n
, rdesc
);
4224 cgraph_node
*clone
= cs
->caller
;
4225 while (clone
->inlined_to
4226 && clone
->ipcp_clone
4227 && clone
!= rdesc
->cs
->caller
)
4229 struct ipa_ref
*ref
;
4230 ref
= clone
->find_reference (n
, NULL
, 0);
4234 fprintf (dump_file
, "ipa-prop: Removing "
4235 "cloning-created reference "
4237 clone
->dump_name (),
4239 ref
->remove_reference ();
4241 clone
= clone
->callers
->caller
;
4248 for (i
= ipa_get_param_count (old_root_info
);
4249 i
< ipa_get_cs_argument_count (args
);
4252 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
4254 if (jf
->type
== IPA_JF_CONST
)
4256 struct ipa_cst_ref_desc
*rdesc
= jfunc_rdesc_usable (jf
);
4258 rdesc
->refcount
= IPA_UNDESCRIBED_USE
;
4260 else if (jf
->type
== IPA_JF_PASS_THROUGH
)
4261 ipa_set_controlled_uses (new_root_info
,
4262 jf
->value
.pass_through
.formal_id
,
4263 IPA_UNDESCRIBED_USE
);
4267 /* Update jump functions and call note functions on inlining the call site CS.
4268 CS is expected to lead to a node already cloned by
4269 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
4270 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
4274 ipa_propagate_indirect_call_infos (struct cgraph_edge
*cs
,
4275 vec
<cgraph_edge
*> *new_edges
)
4278 /* Do nothing if the preparation phase has not been carried out yet
4279 (i.e. during early inlining). */
4280 if (!ipa_node_params_sum
)
4282 gcc_assert (ipa_edge_args_sum
);
4284 propagate_controlled_uses (cs
);
4285 changed
= propagate_info_to_inlined_callees (cs
, cs
->callee
, new_edges
);
4286 ipa_node_params_sum
->remove (cs
->callee
);
4288 ipa_edge_args
*args
= ipa_edge_args_sum
->get (cs
);
4292 if (args
->jump_functions
)
4294 struct ipa_jump_func
*jf
;
4296 FOR_EACH_VEC_ELT (*args
->jump_functions
, i
, jf
)
4297 if (jf
->type
== IPA_JF_CONST
4298 && ipa_get_jf_constant_rdesc (jf
))
4305 ipa_edge_args_sum
->remove (cs
);
4307 if (ipcp_transformation_sum
)
4308 ipcp_transformation_sum
->remove (cs
->callee
);
4313 /* Ensure that array of edge arguments infos is big enough to accommodate a
4314 structure for all edges and reallocates it if not. Also, allocate
4315 associated hash tables is they do not already exist. */
4318 ipa_check_create_edge_args (void)
4320 if (!ipa_edge_args_sum
)
4322 = (new (ggc_alloc_no_dtor
<ipa_edge_args_sum_t
> ())
4323 ipa_edge_args_sum_t (symtab
, true));
4324 if (!ipa_bits_hash_table
)
4325 ipa_bits_hash_table
= hash_table
<ipa_bit_ggc_hash_traits
>::create_ggc (37);
4326 if (!ipa_vr_hash_table
)
4327 ipa_vr_hash_table
= hash_table
<ipa_vr_ggc_hash_traits
>::create_ggc (37);
4330 /* Free all ipa_edge structures. */
4333 ipa_free_all_edge_args (void)
4335 if (!ipa_edge_args_sum
)
4338 ggc_delete (ipa_edge_args_sum
);
4339 ipa_edge_args_sum
= NULL
;
4342 /* Free all ipa_node_params structures. */
4345 ipa_free_all_node_params (void)
4347 if (ipa_node_params_sum
)
4348 ggc_delete (ipa_node_params_sum
);
4349 ipa_node_params_sum
= NULL
;
4352 /* Initialize IPA CP transformation summary and also allocate any necessary hash
4353 tables if they do not already exist. */
4356 ipcp_transformation_initialize (void)
4358 if (!ipa_bits_hash_table
)
4359 ipa_bits_hash_table
= hash_table
<ipa_bit_ggc_hash_traits
>::create_ggc (37);
4360 if (!ipa_vr_hash_table
)
4361 ipa_vr_hash_table
= hash_table
<ipa_vr_ggc_hash_traits
>::create_ggc (37);
4362 if (ipcp_transformation_sum
== NULL
)
4364 ipcp_transformation_sum
= ipcp_transformation_t::create_ggc (symtab
);
4365 ipcp_transformation_sum
->disable_insertion_hook ();
4369 /* Release the IPA CP transformation summary. */
4372 ipcp_free_transformation_sum (void)
4374 if (!ipcp_transformation_sum
)
4377 ipcp_transformation_sum
->~function_summary
<ipcp_transformation
*> ();
4378 ggc_free (ipcp_transformation_sum
);
4379 ipcp_transformation_sum
= NULL
;
4382 /* Set the aggregate replacements of NODE to be AGGVALS. */
4385 ipa_set_node_agg_value_chain (struct cgraph_node
*node
,
4386 struct ipa_agg_replacement_value
*aggvals
)
4388 ipcp_transformation_initialize ();
4389 ipcp_transformation
*s
= ipcp_transformation_sum
->get_create (node
);
4390 s
->agg_values
= aggvals
;
4393 /* Hook that is called by cgraph.cc when an edge is removed. Adjust reference
4394 count data structures accordingly. */
4397 ipa_edge_args_sum_t::remove (cgraph_edge
*cs
, ipa_edge_args
*args
)
4399 if (args
->jump_functions
)
4401 struct ipa_jump_func
*jf
;
4403 FOR_EACH_VEC_ELT (*args
->jump_functions
, i
, jf
)
4405 struct ipa_cst_ref_desc
*rdesc
;
4406 try_decrement_rdesc_refcount (jf
);
4407 if (jf
->type
== IPA_JF_CONST
4408 && (rdesc
= ipa_get_jf_constant_rdesc (jf
))
4415 /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
4416 reference count data strucutres accordingly. */
4419 ipa_edge_args_sum_t::duplicate (cgraph_edge
*src
, cgraph_edge
*dst
,
4420 ipa_edge_args
*old_args
, ipa_edge_args
*new_args
)
4424 new_args
->jump_functions
= vec_safe_copy (old_args
->jump_functions
);
4425 if (old_args
->polymorphic_call_contexts
)
4426 new_args
->polymorphic_call_contexts
4427 = vec_safe_copy (old_args
->polymorphic_call_contexts
);
4429 for (i
= 0; i
< vec_safe_length (old_args
->jump_functions
); i
++)
4431 struct ipa_jump_func
*src_jf
= ipa_get_ith_jump_func (old_args
, i
);
4432 struct ipa_jump_func
*dst_jf
= ipa_get_ith_jump_func (new_args
, i
);
4434 dst_jf
->agg
.items
= vec_safe_copy (dst_jf
->agg
.items
);
4436 if (src_jf
->type
== IPA_JF_CONST
)
4438 struct ipa_cst_ref_desc
*src_rdesc
= jfunc_rdesc_usable (src_jf
);
4441 dst_jf
->value
.constant
.rdesc
= NULL
;
4442 else if (src
->caller
== dst
->caller
)
4444 /* Creation of a speculative edge. If the source edge is the one
4445 grabbing a reference, we must create a new (duplicate)
4446 reference description. Otherwise they refer to the same
4447 description corresponding to a reference taken in a function
4448 src->caller is inlined to. In that case we just must
4449 increment the refcount. */
4450 if (src_rdesc
->cs
== src
)
4452 symtab_node
*n
= symtab_node_for_jfunc (src_jf
);
4453 gcc_checking_assert (n
);
4455 = src
->caller
->find_reference (n
, src
->call_stmt
,
4457 gcc_checking_assert (ref
);
4458 dst
->caller
->clone_reference (ref
, ref
->stmt
);
4460 ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
4461 dst_rdesc
->cs
= dst
;
4462 dst_rdesc
->refcount
= src_rdesc
->refcount
;
4463 dst_rdesc
->next_duplicate
= NULL
;
4464 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
4468 src_rdesc
->refcount
++;
4469 dst_jf
->value
.constant
.rdesc
= src_rdesc
;
4472 else if (src_rdesc
->cs
== src
)
4474 struct ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
4475 dst_rdesc
->cs
= dst
;
4476 dst_rdesc
->refcount
= src_rdesc
->refcount
;
4477 dst_rdesc
->next_duplicate
= src_rdesc
->next_duplicate
;
4478 src_rdesc
->next_duplicate
= dst_rdesc
;
4479 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
4483 struct ipa_cst_ref_desc
*dst_rdesc
;
4484 /* This can happen during inlining, when a JFUNC can refer to a
4485 reference taken in a function up in the tree of inline clones.
4486 We need to find the duplicate that refers to our tree of
4489 gcc_assert (dst
->caller
->inlined_to
);
4490 for (dst_rdesc
= src_rdesc
->next_duplicate
;
4492 dst_rdesc
= dst_rdesc
->next_duplicate
)
4494 struct cgraph_node
*top
;
4495 top
= dst_rdesc
->cs
->caller
->inlined_to
4496 ? dst_rdesc
->cs
->caller
->inlined_to
4497 : dst_rdesc
->cs
->caller
;
4498 if (dst
->caller
->inlined_to
== top
)
4501 gcc_assert (dst_rdesc
);
4502 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
4505 else if (dst_jf
->type
== IPA_JF_PASS_THROUGH
4506 && src
->caller
== dst
->caller
)
4508 struct cgraph_node
*inline_root
= dst
->caller
->inlined_to
4509 ? dst
->caller
->inlined_to
: dst
->caller
;
4510 ipa_node_params
*root_info
= ipa_node_params_sum
->get (inline_root
);
4511 int idx
= ipa_get_jf_pass_through_formal_id (dst_jf
);
4513 int c
= ipa_get_controlled_uses (root_info
, idx
);
4514 if (c
!= IPA_UNDESCRIBED_USE
)
4517 ipa_set_controlled_uses (root_info
, idx
, c
);
4523 /* Analyze newly added function into callgraph. */
4526 ipa_add_new_function (cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
4528 if (node
->has_gimple_body_p ())
4529 ipa_analyze_node (node
);
4532 /* Hook that is called by summary when a node is duplicated. */
4535 ipa_node_params_t::duplicate(cgraph_node
*src
, cgraph_node
*dst
,
4536 ipa_node_params
*old_info
,
4537 ipa_node_params
*new_info
)
4539 ipa_agg_replacement_value
*old_av
, *new_av
;
4541 new_info
->descriptors
= vec_safe_copy (old_info
->descriptors
);
4542 new_info
->lattices
= NULL
;
4543 new_info
->ipcp_orig_node
= old_info
->ipcp_orig_node
;
4544 new_info
->known_csts
= old_info
->known_csts
.copy ();
4545 new_info
->known_contexts
= old_info
->known_contexts
.copy ();
4547 new_info
->analysis_done
= old_info
->analysis_done
;
4548 new_info
->node_enqueued
= old_info
->node_enqueued
;
4549 new_info
->versionable
= old_info
->versionable
;
4551 old_av
= ipa_get_agg_replacements_for_node (src
);
4557 struct ipa_agg_replacement_value
*v
;
4559 v
= ggc_alloc
<ipa_agg_replacement_value
> ();
4560 memcpy (v
, old_av
, sizeof (*v
));
4563 old_av
= old_av
->next
;
4565 ipa_set_node_agg_value_chain (dst
, new_av
);
4569 /* Duplication of ipcp transformation summaries. */
4572 ipcp_transformation_t::duplicate(cgraph_node
*, cgraph_node
*dst
,
4573 ipcp_transformation
*src_trans
,
4574 ipcp_transformation
*dst_trans
)
4576 /* Avoid redundant work of duplicating vectors we will never use. */
4577 if (dst
->inlined_to
)
4579 dst_trans
->bits
= vec_safe_copy (src_trans
->bits
);
4580 dst_trans
->m_vr
= vec_safe_copy (src_trans
->m_vr
);
4581 ipa_agg_replacement_value
*agg
= src_trans
->agg_values
,
4582 **aggptr
= &dst_trans
->agg_values
;
4585 *aggptr
= ggc_alloc
<ipa_agg_replacement_value
> ();
4588 aggptr
= &(*aggptr
)->next
;
4592 /* Register our cgraph hooks if they are not already there. */
4595 ipa_register_cgraph_hooks (void)
4597 ipa_check_create_node_params ();
4598 ipa_check_create_edge_args ();
4600 function_insertion_hook_holder
=
4601 symtab
->add_cgraph_insertion_hook (&ipa_add_new_function
, NULL
);
4604 /* Unregister our cgraph hooks if they are not already there. */
4607 ipa_unregister_cgraph_hooks (void)
4609 if (function_insertion_hook_holder
)
4610 symtab
->remove_cgraph_insertion_hook (function_insertion_hook_holder
);
4611 function_insertion_hook_holder
= NULL
;
4614 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4615 longer needed after ipa-cp. */
4618 ipa_free_all_structures_after_ipa_cp (void)
4620 if (!optimize
&& !in_lto_p
)
4622 ipa_free_all_edge_args ();
4623 ipa_free_all_node_params ();
4624 ipcp_sources_pool
.release ();
4625 ipcp_cst_values_pool
.release ();
4626 ipcp_poly_ctx_values_pool
.release ();
4627 ipcp_agg_lattice_pool
.release ();
4628 ipa_unregister_cgraph_hooks ();
4629 ipa_refdesc_pool
.release ();
4633 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4634 longer needed after indirect inlining. */
4637 ipa_free_all_structures_after_iinln (void)
4639 ipa_free_all_edge_args ();
4640 ipa_free_all_node_params ();
4641 ipa_unregister_cgraph_hooks ();
4642 ipcp_sources_pool
.release ();
4643 ipcp_cst_values_pool
.release ();
4644 ipcp_poly_ctx_values_pool
.release ();
4645 ipcp_agg_lattice_pool
.release ();
4646 ipa_refdesc_pool
.release ();
4649 /* Print ipa_tree_map data structures of all functions in the
4653 ipa_print_node_params (FILE *f
, struct cgraph_node
*node
)
4656 class ipa_node_params
*info
;
4658 if (!node
->definition
)
4660 info
= ipa_node_params_sum
->get (node
);
4661 fprintf (f
, " function %s parameter descriptors:\n", node
->dump_name ());
4664 fprintf (f
, " no params return\n");
4667 count
= ipa_get_param_count (info
);
4668 for (i
= 0; i
< count
; i
++)
4673 ipa_dump_param (f
, info
, i
);
4674 if (ipa_is_param_used (info
, i
))
4675 fprintf (f
, " used");
4676 if (ipa_is_param_used_by_ipa_predicates (info
, i
))
4677 fprintf (f
, " used_by_ipa_predicates");
4678 if (ipa_is_param_used_by_indirect_call (info
, i
))
4679 fprintf (f
, " used_by_indirect_call");
4680 if (ipa_is_param_used_by_polymorphic_call (info
, i
))
4681 fprintf (f
, " used_by_polymorphic_call");
4682 c
= ipa_get_controlled_uses (info
, i
);
4683 if (c
== IPA_UNDESCRIBED_USE
)
4684 fprintf (f
, " undescribed_use");
4686 fprintf (f
, " controlled_uses=%i %s", c
,
4687 ipa_get_param_load_dereferenced (info
, i
)
4688 ? "(load_dereferenced)" : "");
4693 /* Print ipa_tree_map data structures of all functions in the
4697 ipa_print_all_params (FILE * f
)
4699 struct cgraph_node
*node
;
4701 fprintf (f
, "\nFunction parameters:\n");
4702 FOR_EACH_FUNCTION (node
)
4703 ipa_print_node_params (f
, node
);
4706 /* Dump the AV linked list. */
4709 ipa_dump_agg_replacement_values (FILE *f
, struct ipa_agg_replacement_value
*av
)
4712 fprintf (f
, " Aggregate replacements:");
4713 for (; av
; av
= av
->next
)
4715 fprintf (f
, "%s %i[" HOST_WIDE_INT_PRINT_DEC
"]=", comma
? "," : "",
4716 av
->index
, av
->offset
);
4717 print_generic_expr (f
, av
->value
);
4723 /* Stream out jump function JUMP_FUNC to OB. */
4726 ipa_write_jump_function (struct output_block
*ob
,
4727 struct ipa_jump_func
*jump_func
)
4729 struct ipa_agg_jf_item
*item
;
4730 struct bitpack_d bp
;
4734 /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4735 as well as WPA memory by handling them specially. */
4736 if (jump_func
->type
== IPA_JF_CONST
4737 && TREE_CODE (jump_func
->value
.constant
.value
) == ADDR_EXPR
)
4740 streamer_write_uhwi (ob
, jump_func
->type
* 2 + flag
);
4741 switch (jump_func
->type
)
4743 case IPA_JF_UNKNOWN
:
4747 EXPR_LOCATION (jump_func
->value
.constant
.value
) == UNKNOWN_LOCATION
);
4748 stream_write_tree (ob
,
4750 ? TREE_OPERAND (jump_func
->value
.constant
.value
, 0)
4751 : jump_func
->value
.constant
.value
, true);
4753 case IPA_JF_PASS_THROUGH
:
4754 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.operation
);
4755 if (jump_func
->value
.pass_through
.operation
== NOP_EXPR
)
4757 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4758 bp
= bitpack_create (ob
->main_stream
);
4759 bp_pack_value (&bp
, jump_func
->value
.pass_through
.agg_preserved
, 1);
4760 streamer_write_bitpack (&bp
);
4762 else if (TREE_CODE_CLASS (jump_func
->value
.pass_through
.operation
)
4764 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4767 stream_write_tree (ob
, jump_func
->value
.pass_through
.operand
, true);
4768 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4771 case IPA_JF_ANCESTOR
:
4772 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.offset
);
4773 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.formal_id
);
4774 bp
= bitpack_create (ob
->main_stream
);
4775 bp_pack_value (&bp
, jump_func
->value
.ancestor
.agg_preserved
, 1);
4776 bp_pack_value (&bp
, jump_func
->value
.ancestor
.keep_null
, 1);
4777 streamer_write_bitpack (&bp
);
4780 fatal_error (UNKNOWN_LOCATION
, "invalid jump function in LTO stream");
4783 count
= vec_safe_length (jump_func
->agg
.items
);
4784 streamer_write_uhwi (ob
, count
);
4787 bp
= bitpack_create (ob
->main_stream
);
4788 bp_pack_value (&bp
, jump_func
->agg
.by_ref
, 1);
4789 streamer_write_bitpack (&bp
);
4792 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, i
, item
)
4794 stream_write_tree (ob
, item
->type
, true);
4795 streamer_write_uhwi (ob
, item
->offset
);
4796 streamer_write_uhwi (ob
, item
->jftype
);
4797 switch (item
->jftype
)
4799 case IPA_JF_UNKNOWN
:
4802 stream_write_tree (ob
, item
->value
.constant
, true);
4804 case IPA_JF_PASS_THROUGH
:
4805 case IPA_JF_LOAD_AGG
:
4806 streamer_write_uhwi (ob
, item
->value
.pass_through
.operation
);
4807 streamer_write_uhwi (ob
, item
->value
.pass_through
.formal_id
);
4808 if (TREE_CODE_CLASS (item
->value
.pass_through
.operation
)
4810 stream_write_tree (ob
, item
->value
.pass_through
.operand
, true);
4811 if (item
->jftype
== IPA_JF_LOAD_AGG
)
4813 stream_write_tree (ob
, item
->value
.load_agg
.type
, true);
4814 streamer_write_uhwi (ob
, item
->value
.load_agg
.offset
);
4815 bp
= bitpack_create (ob
->main_stream
);
4816 bp_pack_value (&bp
, item
->value
.load_agg
.by_ref
, 1);
4817 streamer_write_bitpack (&bp
);
4821 fatal_error (UNKNOWN_LOCATION
,
4822 "invalid jump function in LTO stream");
4826 bp
= bitpack_create (ob
->main_stream
);
4827 bp_pack_value (&bp
, !!jump_func
->bits
, 1);
4828 streamer_write_bitpack (&bp
);
4829 if (jump_func
->bits
)
4831 streamer_write_widest_int (ob
, jump_func
->bits
->value
);
4832 streamer_write_widest_int (ob
, jump_func
->bits
->mask
);
4834 bp_pack_value (&bp
, !!jump_func
->m_vr
, 1);
4835 streamer_write_bitpack (&bp
);
4836 if (jump_func
->m_vr
)
4838 streamer_write_enum (ob
->main_stream
, value_rang_type
,
4839 VR_LAST
, jump_func
->m_vr
->kind ());
4840 stream_write_tree (ob
, jump_func
->m_vr
->min (), true);
4841 stream_write_tree (ob
, jump_func
->m_vr
->max (), true);
4845 /* Read in jump function JUMP_FUNC from IB. */
4848 ipa_read_jump_function (class lto_input_block
*ib
,
4849 struct ipa_jump_func
*jump_func
,
4850 struct cgraph_edge
*cs
,
4851 class data_in
*data_in
,
4854 enum jump_func_type jftype
;
4855 enum tree_code operation
;
4857 int val
= streamer_read_uhwi (ib
);
4858 bool flag
= val
& 1;
4860 jftype
= (enum jump_func_type
) (val
/ 2);
4863 case IPA_JF_UNKNOWN
:
4864 ipa_set_jf_unknown (jump_func
);
4868 tree t
= stream_read_tree (ib
, data_in
);
4869 if (flag
&& prevails
)
4870 t
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (t
)), t
);
4871 ipa_set_jf_constant (jump_func
, t
, cs
);
4874 case IPA_JF_PASS_THROUGH
:
4875 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
4876 if (operation
== NOP_EXPR
)
4878 int formal_id
= streamer_read_uhwi (ib
);
4879 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4880 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4881 ipa_set_jf_simple_pass_through (jump_func
, formal_id
, agg_preserved
);
4883 else if (TREE_CODE_CLASS (operation
) == tcc_unary
)
4885 int formal_id
= streamer_read_uhwi (ib
);
4886 ipa_set_jf_unary_pass_through (jump_func
, formal_id
, operation
);
4890 tree operand
= stream_read_tree (ib
, data_in
);
4891 int formal_id
= streamer_read_uhwi (ib
);
4892 ipa_set_jf_arith_pass_through (jump_func
, formal_id
, operand
,
4896 case IPA_JF_ANCESTOR
:
4898 HOST_WIDE_INT offset
= streamer_read_uhwi (ib
);
4899 int formal_id
= streamer_read_uhwi (ib
);
4900 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4901 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4902 bool keep_null
= bp_unpack_value (&bp
, 1);
4903 ipa_set_ancestor_jf (jump_func
, offset
, formal_id
, agg_preserved
,
4908 fatal_error (UNKNOWN_LOCATION
, "invalid jump function in LTO stream");
4911 count
= streamer_read_uhwi (ib
);
4914 jump_func
->agg
.items
= NULL
;
4915 vec_safe_reserve (jump_func
->agg
.items
, count
, true);
4919 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4920 jump_func
->agg
.by_ref
= bp_unpack_value (&bp
, 1);
4922 for (i
= 0; i
< count
; i
++)
4924 struct ipa_agg_jf_item item
;
4925 item
.type
= stream_read_tree (ib
, data_in
);
4926 item
.offset
= streamer_read_uhwi (ib
);
4927 item
.jftype
= (enum jump_func_type
) streamer_read_uhwi (ib
);
4929 switch (item
.jftype
)
4931 case IPA_JF_UNKNOWN
:
4934 item
.value
.constant
= stream_read_tree (ib
, data_in
);
4936 case IPA_JF_PASS_THROUGH
:
4937 case IPA_JF_LOAD_AGG
:
4938 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
4939 item
.value
.pass_through
.operation
= operation
;
4940 item
.value
.pass_through
.formal_id
= streamer_read_uhwi (ib
);
4941 if (TREE_CODE_CLASS (operation
) == tcc_unary
)
4942 item
.value
.pass_through
.operand
= NULL_TREE
;
4944 item
.value
.pass_through
.operand
= stream_read_tree (ib
, data_in
);
4945 if (item
.jftype
== IPA_JF_LOAD_AGG
)
4947 struct bitpack_d bp
;
4948 item
.value
.load_agg
.type
= stream_read_tree (ib
, data_in
);
4949 item
.value
.load_agg
.offset
= streamer_read_uhwi (ib
);
4950 bp
= streamer_read_bitpack (ib
);
4951 item
.value
.load_agg
.by_ref
= bp_unpack_value (&bp
, 1);
4955 fatal_error (UNKNOWN_LOCATION
,
4956 "invalid jump function in LTO stream");
4959 jump_func
->agg
.items
->quick_push (item
);
4962 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4963 bool bits_known
= bp_unpack_value (&bp
, 1);
4966 widest_int value
= streamer_read_widest_int (ib
);
4967 widest_int mask
= streamer_read_widest_int (ib
);
4969 ipa_set_jfunc_bits (jump_func
, value
, mask
);
4972 jump_func
->bits
= NULL
;
4974 struct bitpack_d vr_bp
= streamer_read_bitpack (ib
);
4975 bool vr_known
= bp_unpack_value (&vr_bp
, 1);
4978 enum value_range_kind type
= streamer_read_enum (ib
, value_range_kind
,
4980 tree min
= stream_read_tree (ib
, data_in
);
4981 tree max
= stream_read_tree (ib
, data_in
);
4983 ipa_set_jfunc_vr (jump_func
, type
, min
, max
);
4986 jump_func
->m_vr
= NULL
;
4989 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4990 relevant to indirect inlining to OB. */
4993 ipa_write_indirect_edge_info (struct output_block
*ob
,
4994 struct cgraph_edge
*cs
)
4996 class cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4997 struct bitpack_d bp
;
4999 streamer_write_hwi (ob
, ii
->param_index
);
5000 bp
= bitpack_create (ob
->main_stream
);
5001 bp_pack_value (&bp
, ii
->polymorphic
, 1);
5002 bp_pack_value (&bp
, ii
->agg_contents
, 1);
5003 bp_pack_value (&bp
, ii
->member_ptr
, 1);
5004 bp_pack_value (&bp
, ii
->by_ref
, 1);
5005 bp_pack_value (&bp
, ii
->guaranteed_unmodified
, 1);
5006 bp_pack_value (&bp
, ii
->vptr_changed
, 1);
5007 streamer_write_bitpack (&bp
);
5008 if (ii
->agg_contents
|| ii
->polymorphic
)
5009 streamer_write_hwi (ob
, ii
->offset
);
5011 gcc_assert (ii
->offset
== 0);
5013 if (ii
->polymorphic
)
5015 streamer_write_hwi (ob
, ii
->otr_token
);
5016 stream_write_tree (ob
, ii
->otr_type
, true);
5017 ii
->context
.stream_out (ob
);
5021 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
5022 relevant to indirect inlining from IB. */
5025 ipa_read_indirect_edge_info (class lto_input_block
*ib
,
5026 class data_in
*data_in
,
5027 struct cgraph_edge
*cs
,
5028 class ipa_node_params
*info
)
5030 class cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
5031 struct bitpack_d bp
;
5033 ii
->param_index
= (int) streamer_read_hwi (ib
);
5034 bp
= streamer_read_bitpack (ib
);
5035 ii
->polymorphic
= bp_unpack_value (&bp
, 1);
5036 ii
->agg_contents
= bp_unpack_value (&bp
, 1);
5037 ii
->member_ptr
= bp_unpack_value (&bp
, 1);
5038 ii
->by_ref
= bp_unpack_value (&bp
, 1);
5039 ii
->guaranteed_unmodified
= bp_unpack_value (&bp
, 1);
5040 ii
->vptr_changed
= bp_unpack_value (&bp
, 1);
5041 if (ii
->agg_contents
|| ii
->polymorphic
)
5042 ii
->offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
5045 if (ii
->polymorphic
)
5047 ii
->otr_token
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
5048 ii
->otr_type
= stream_read_tree (ib
, data_in
);
5049 ii
->context
.stream_in (ib
, data_in
);
5051 if (info
&& ii
->param_index
>= 0)
5053 if (ii
->polymorphic
)
5054 ipa_set_param_used_by_polymorphic_call (info
,
5055 ii
->param_index
, true);
5056 ipa_set_param_used_by_indirect_call (info
,
5057 ii
->param_index
, true);
5061 /* Stream out NODE info to OB. */
5064 ipa_write_node_info (struct output_block
*ob
, struct cgraph_node
*node
)
5067 lto_symtab_encoder_t encoder
;
5068 ipa_node_params
*info
= ipa_node_params_sum
->get (node
);
5070 struct cgraph_edge
*e
;
5071 struct bitpack_d bp
;
5073 encoder
= ob
->decl_state
->symtab_node_encoder
;
5074 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
5075 streamer_write_uhwi (ob
, node_ref
);
5077 streamer_write_uhwi (ob
, ipa_get_param_count (info
));
5078 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
5079 streamer_write_uhwi (ob
, ipa_get_param_move_cost (info
, j
));
5080 bp
= bitpack_create (ob
->main_stream
);
5081 gcc_assert (info
->analysis_done
5082 || ipa_get_param_count (info
) == 0);
5083 gcc_assert (!info
->node_enqueued
);
5084 gcc_assert (!info
->ipcp_orig_node
);
5085 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
5087 /* TODO: We could just not stream the bit in the undescribed case. */
5088 bool d
= (ipa_get_controlled_uses (info
, j
) != IPA_UNDESCRIBED_USE
)
5089 ? ipa_get_param_load_dereferenced (info
, j
) : true;
5090 bp_pack_value (&bp
, d
, 1);
5091 bp_pack_value (&bp
, ipa_is_param_used (info
, j
), 1);
5093 streamer_write_bitpack (&bp
);
5094 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
5096 streamer_write_hwi (ob
, ipa_get_controlled_uses (info
, j
));
5097 stream_write_tree (ob
, ipa_get_type (info
, j
), true);
5099 for (e
= node
->callees
; e
; e
= e
->next_callee
)
5101 ipa_edge_args
*args
= ipa_edge_args_sum
->get (e
);
5105 streamer_write_uhwi (ob
, 0);
5109 streamer_write_uhwi (ob
,
5110 ipa_get_cs_argument_count (args
) * 2
5111 + (args
->polymorphic_call_contexts
!= NULL
));
5112 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
5114 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
5115 if (args
->polymorphic_call_contexts
!= NULL
)
5116 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
5119 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
5121 ipa_edge_args
*args
= ipa_edge_args_sum
->get (e
);
5123 streamer_write_uhwi (ob
, 0);
5126 streamer_write_uhwi (ob
,
5127 ipa_get_cs_argument_count (args
) * 2
5128 + (args
->polymorphic_call_contexts
!= NULL
));
5129 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
5131 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
5132 if (args
->polymorphic_call_contexts
!= NULL
)
5133 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
5136 ipa_write_indirect_edge_info (ob
, e
);
5140 /* Stream in edge E from IB. */
5143 ipa_read_edge_info (class lto_input_block
*ib
,
5144 class data_in
*data_in
,
5145 struct cgraph_edge
*e
, bool prevails
)
5147 int count
= streamer_read_uhwi (ib
);
5148 bool contexts_computed
= count
& 1;
5154 && (e
->possibly_call_in_translation_unit_p ()
5155 /* Also stream in jump functions to builtins in hope that they
5156 will get fnspecs. */
5157 || fndecl_built_in_p (e
->callee
->decl
, BUILT_IN_NORMAL
)))
5159 ipa_edge_args
*args
= ipa_edge_args_sum
->get_create (e
);
5160 vec_safe_grow_cleared (args
->jump_functions
, count
, true);
5161 if (contexts_computed
)
5162 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
, true);
5163 for (int k
= 0; k
< count
; k
++)
5165 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
5167 if (contexts_computed
)
5168 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in
5174 for (int k
= 0; k
< count
; k
++)
5176 struct ipa_jump_func dummy
;
5177 ipa_read_jump_function (ib
, &dummy
, e
,
5179 if (contexts_computed
)
5181 class ipa_polymorphic_call_context ctx
;
5182 ctx
.stream_in (ib
, data_in
);
5188 /* Stream in NODE info from IB. */
5191 ipa_read_node_info (class lto_input_block
*ib
, struct cgraph_node
*node
,
5192 class data_in
*data_in
)
5195 struct cgraph_edge
*e
;
5196 struct bitpack_d bp
;
5197 bool prevails
= node
->prevailing_p ();
5198 ipa_node_params
*info
5199 = prevails
? ipa_node_params_sum
->get_create (node
) : NULL
;
5201 int param_count
= streamer_read_uhwi (ib
);
5204 ipa_alloc_node_params (node
, param_count
);
5205 for (k
= 0; k
< param_count
; k
++)
5206 (*info
->descriptors
)[k
].move_cost
= streamer_read_uhwi (ib
);
5207 if (ipa_get_param_count (info
) != 0)
5208 info
->analysis_done
= true;
5209 info
->node_enqueued
= false;
5212 for (k
= 0; k
< param_count
; k
++)
5213 streamer_read_uhwi (ib
);
5215 bp
= streamer_read_bitpack (ib
);
5216 for (k
= 0; k
< param_count
; k
++)
5218 bool load_dereferenced
= bp_unpack_value (&bp
, 1);
5219 bool used
= bp_unpack_value (&bp
, 1);
5223 ipa_set_param_load_dereferenced (info
, k
, load_dereferenced
);
5224 ipa_set_param_used (info
, k
, used
);
5227 for (k
= 0; k
< param_count
; k
++)
5229 int nuses
= streamer_read_hwi (ib
);
5230 tree type
= stream_read_tree (ib
, data_in
);
5234 ipa_set_controlled_uses (info
, k
, nuses
);
5235 (*info
->descriptors
)[k
].decl_or_type
= type
;
5238 for (e
= node
->callees
; e
; e
= e
->next_callee
)
5239 ipa_read_edge_info (ib
, data_in
, e
, prevails
);
5240 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
5242 ipa_read_edge_info (ib
, data_in
, e
, prevails
);
5243 ipa_read_indirect_edge_info (ib
, data_in
, e
, info
);
5247 /* Write jump functions for nodes in SET. */
5250 ipa_prop_write_jump_functions (void)
5252 struct output_block
*ob
;
5253 unsigned int count
= 0;
5254 lto_symtab_encoder_iterator lsei
;
5255 lto_symtab_encoder_t encoder
;
5257 if (!ipa_node_params_sum
|| !ipa_edge_args_sum
)
5260 ob
= create_output_block (LTO_section_jump_functions
);
5261 encoder
= ob
->decl_state
->symtab_node_encoder
;
5263 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5264 lsei_next_function_in_partition (&lsei
))
5266 cgraph_node
*node
= lsei_cgraph_node (lsei
);
5267 if (node
->has_gimple_body_p ()
5268 && ipa_node_params_sum
->get (node
) != NULL
)
5272 streamer_write_uhwi (ob
, count
);
5274 /* Process all of the functions. */
5275 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5276 lsei_next_function_in_partition (&lsei
))
5278 cgraph_node
*node
= lsei_cgraph_node (lsei
);
5279 if (node
->has_gimple_body_p ()
5280 && ipa_node_params_sum
->get (node
) != NULL
)
5281 ipa_write_node_info (ob
, node
);
5283 streamer_write_char_stream (ob
->main_stream
, 0);
5284 produce_asm (ob
, NULL
);
5285 destroy_output_block (ob
);
5288 /* Read section in file FILE_DATA of length LEN with data DATA. */
5291 ipa_prop_read_section (struct lto_file_decl_data
*file_data
, const char *data
,
5294 const struct lto_function_header
*header
=
5295 (const struct lto_function_header
*) data
;
5296 const int cfg_offset
= sizeof (struct lto_function_header
);
5297 const int main_offset
= cfg_offset
+ header
->cfg_size
;
5298 const int string_offset
= main_offset
+ header
->main_size
;
5299 class data_in
*data_in
;
5303 lto_input_block
ib_main ((const char *) data
+ main_offset
,
5304 header
->main_size
, file_data
->mode_table
);
5307 lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
5308 header
->string_size
, vNULL
);
5309 count
= streamer_read_uhwi (&ib_main
);
5311 for (i
= 0; i
< count
; i
++)
5314 struct cgraph_node
*node
;
5315 lto_symtab_encoder_t encoder
;
5317 index
= streamer_read_uhwi (&ib_main
);
5318 encoder
= file_data
->symtab_node_encoder
;
5319 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
5321 gcc_assert (node
->definition
);
5322 ipa_read_node_info (&ib_main
, node
, data_in
);
5324 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
5326 lto_data_in_delete (data_in
);
5329 /* Read ipcp jump functions. */
5332 ipa_prop_read_jump_functions (void)
5334 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
5335 struct lto_file_decl_data
*file_data
;
5338 ipa_check_create_node_params ();
5339 ipa_check_create_edge_args ();
5340 ipa_register_cgraph_hooks ();
5342 while ((file_data
= file_data_vec
[j
++]))
5346 = lto_get_summary_section_data (file_data
, LTO_section_jump_functions
,
5349 ipa_prop_read_section (file_data
, data
, len
);
5354 write_ipcp_transformation_info (output_block
*ob
, cgraph_node
*node
)
5357 unsigned int count
= 0;
5358 lto_symtab_encoder_t encoder
;
5359 struct ipa_agg_replacement_value
*aggvals
, *av
;
5361 aggvals
= ipa_get_agg_replacements_for_node (node
);
5362 encoder
= ob
->decl_state
->symtab_node_encoder
;
5363 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
5364 streamer_write_uhwi (ob
, node_ref
);
5366 for (av
= aggvals
; av
; av
= av
->next
)
5368 streamer_write_uhwi (ob
, count
);
5370 for (av
= aggvals
; av
; av
= av
->next
)
5372 struct bitpack_d bp
;
5374 streamer_write_uhwi (ob
, av
->offset
);
5375 streamer_write_uhwi (ob
, av
->index
);
5376 stream_write_tree (ob
, av
->value
, true);
5378 bp
= bitpack_create (ob
->main_stream
);
5379 bp_pack_value (&bp
, av
->by_ref
, 1);
5380 streamer_write_bitpack (&bp
);
5383 ipcp_transformation
*ts
= ipcp_get_transformation_summary (node
);
5384 if (ts
&& vec_safe_length (ts
->m_vr
) > 0)
5386 count
= ts
->m_vr
->length ();
5387 streamer_write_uhwi (ob
, count
);
5388 for (unsigned i
= 0; i
< count
; ++i
)
5390 struct bitpack_d bp
;
5391 ipa_vr
*parm_vr
= &(*ts
->m_vr
)[i
];
5392 bp
= bitpack_create (ob
->main_stream
);
5393 bp_pack_value (&bp
, parm_vr
->known
, 1);
5394 streamer_write_bitpack (&bp
);
5397 streamer_write_enum (ob
->main_stream
, value_rang_type
,
5398 VR_LAST
, parm_vr
->type
);
5399 streamer_write_wide_int (ob
, parm_vr
->min
);
5400 streamer_write_wide_int (ob
, parm_vr
->max
);
5405 streamer_write_uhwi (ob
, 0);
5407 if (ts
&& vec_safe_length (ts
->bits
) > 0)
5409 count
= ts
->bits
->length ();
5410 streamer_write_uhwi (ob
, count
);
5412 for (unsigned i
= 0; i
< count
; ++i
)
5414 const ipa_bits
*bits_jfunc
= (*ts
->bits
)[i
];
5415 struct bitpack_d bp
= bitpack_create (ob
->main_stream
);
5416 bp_pack_value (&bp
, !!bits_jfunc
, 1);
5417 streamer_write_bitpack (&bp
);
5420 streamer_write_widest_int (ob
, bits_jfunc
->value
);
5421 streamer_write_widest_int (ob
, bits_jfunc
->mask
);
5426 streamer_write_uhwi (ob
, 0);
5429 /* Stream in the aggregate value replacement chain for NODE from IB. */
5432 read_ipcp_transformation_info (lto_input_block
*ib
, cgraph_node
*node
,
5435 struct ipa_agg_replacement_value
*aggvals
= NULL
;
5436 unsigned int count
, i
;
5438 count
= streamer_read_uhwi (ib
);
5439 for (i
= 0; i
<count
; i
++)
5441 struct ipa_agg_replacement_value
*av
;
5442 struct bitpack_d bp
;
5444 av
= ggc_alloc
<ipa_agg_replacement_value
> ();
5445 av
->offset
= streamer_read_uhwi (ib
);
5446 av
->index
= streamer_read_uhwi (ib
);
5447 av
->value
= stream_read_tree (ib
, data_in
);
5448 bp
= streamer_read_bitpack (ib
);
5449 av
->by_ref
= bp_unpack_value (&bp
, 1);
5453 ipa_set_node_agg_value_chain (node
, aggvals
);
5455 count
= streamer_read_uhwi (ib
);
5458 ipcp_transformation_initialize ();
5459 ipcp_transformation
*ts
= ipcp_transformation_sum
->get_create (node
);
5460 vec_safe_grow_cleared (ts
->m_vr
, count
, true);
5461 for (i
= 0; i
< count
; i
++)
5464 parm_vr
= &(*ts
->m_vr
)[i
];
5465 struct bitpack_d bp
;
5466 bp
= streamer_read_bitpack (ib
);
5467 parm_vr
->known
= bp_unpack_value (&bp
, 1);
5470 parm_vr
->type
= streamer_read_enum (ib
, value_range_kind
,
5472 parm_vr
->min
= streamer_read_wide_int (ib
);
5473 parm_vr
->max
= streamer_read_wide_int (ib
);
5477 count
= streamer_read_uhwi (ib
);
5480 ipcp_transformation_initialize ();
5481 ipcp_transformation
*ts
= ipcp_transformation_sum
->get_create (node
);
5482 vec_safe_grow_cleared (ts
->bits
, count
, true);
5484 for (i
= 0; i
< count
; i
++)
5486 struct bitpack_d bp
= streamer_read_bitpack (ib
);
5487 bool known
= bp_unpack_value (&bp
, 1);
5490 const widest_int value
= streamer_read_widest_int (ib
);
5491 const widest_int mask
= streamer_read_widest_int (ib
);
5493 = ipa_get_ipa_bits_for_value (value
, mask
);
5494 (*ts
->bits
)[i
] = bits
;
5500 /* Write all aggregate replacement for nodes in set. */
5503 ipcp_write_transformation_summaries (void)
5505 struct cgraph_node
*node
;
5506 struct output_block
*ob
;
5507 unsigned int count
= 0;
5508 lto_symtab_encoder_iterator lsei
;
5509 lto_symtab_encoder_t encoder
;
5511 ob
= create_output_block (LTO_section_ipcp_transform
);
5512 encoder
= ob
->decl_state
->symtab_node_encoder
;
5514 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5515 lsei_next_function_in_partition (&lsei
))
5517 node
= lsei_cgraph_node (lsei
);
5518 if (node
->has_gimple_body_p ())
5522 streamer_write_uhwi (ob
, count
);
5524 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5525 lsei_next_function_in_partition (&lsei
))
5527 node
= lsei_cgraph_node (lsei
);
5528 if (node
->has_gimple_body_p ())
5529 write_ipcp_transformation_info (ob
, node
);
5531 streamer_write_char_stream (ob
->main_stream
, 0);
5532 produce_asm (ob
, NULL
);
5533 destroy_output_block (ob
);
5536 /* Read replacements section in file FILE_DATA of length LEN with data
5540 read_replacements_section (struct lto_file_decl_data
*file_data
,
5544 const struct lto_function_header
*header
=
5545 (const struct lto_function_header
*) data
;
5546 const int cfg_offset
= sizeof (struct lto_function_header
);
5547 const int main_offset
= cfg_offset
+ header
->cfg_size
;
5548 const int string_offset
= main_offset
+ header
->main_size
;
5549 class data_in
*data_in
;
5553 lto_input_block
ib_main ((const char *) data
+ main_offset
,
5554 header
->main_size
, file_data
->mode_table
);
5556 data_in
= lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
5557 header
->string_size
, vNULL
);
5558 count
= streamer_read_uhwi (&ib_main
);
5560 for (i
= 0; i
< count
; i
++)
5563 struct cgraph_node
*node
;
5564 lto_symtab_encoder_t encoder
;
5566 index
= streamer_read_uhwi (&ib_main
);
5567 encoder
= file_data
->symtab_node_encoder
;
5568 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
5570 gcc_assert (node
->definition
);
5571 read_ipcp_transformation_info (&ib_main
, node
, data_in
);
5573 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
5575 lto_data_in_delete (data_in
);
5578 /* Read IPA-CP aggregate replacements. */
5581 ipcp_read_transformation_summaries (void)
5583 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
5584 struct lto_file_decl_data
*file_data
;
5587 while ((file_data
= file_data_vec
[j
++]))
5591 = lto_get_summary_section_data (file_data
, LTO_section_ipcp_transform
,
5594 read_replacements_section (file_data
, data
, len
);
5598 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5599 NODE but also if any parameter was IPA-SRAed into a scalar go ahead with
5600 substitution of the default_definitions of that new param with the
5601 appropriate constant.
5603 Return two bools. the first it true if at least one item in AGGVAL still
5604 exists and function body walk should go ahead. The second is true if any
5605 values were already substituted for scalarized parameters and update_cfg
5606 shuld be run after replace_uses_by. */
5608 static std::pair
<bool, bool>
5609 adjust_agg_replacement_values (cgraph_node
*node
,
5610 ipa_agg_replacement_value
*aggval
,
5611 const vec
<ipa_param_descriptor
, va_gc
>
5614 struct ipa_agg_replacement_value
*v
;
5615 clone_info
*cinfo
= clone_info::get (node
);
5616 if (!cinfo
|| !cinfo
->param_adjustments
)
5617 return std::pair
<bool, bool> (true, false);
5619 bool anything_left
= false;
5620 bool done_replacement
= false;
5621 for (v
= aggval
; v
; v
= v
->next
)
5623 gcc_checking_assert (v
->index
>= 0);
5625 unsigned unit_offset
= v
->offset
/ BITS_PER_UNIT
;
5626 tree cst_type
= TREE_TYPE (v
->value
);
5629 = cinfo
->param_adjustments
->get_updated_index_or_split (v
->index
,
5635 anything_left
= true;
5636 else if (split_idx
>= 0)
5638 tree parm
= ipa_get_param (descriptors
, split_idx
);
5639 tree ddef
= ssa_default_def (cfun
, parm
);
5642 replace_uses_by (ddef
, v
->value
);
5643 done_replacement
= true;
5647 return std::pair
<bool, bool> (anything_left
, done_replacement
);
5650 /* Dominator walker driving the ipcp modification phase. */
5652 class ipcp_modif_dom_walker
: public dom_walker
5655 ipcp_modif_dom_walker (struct ipa_func_body_info
*fbi
,
5656 vec
<ipa_param_descriptor
, va_gc
> *descs
,
5657 struct ipa_agg_replacement_value
*av
,
5659 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
), m_descriptors (descs
),
5660 m_aggval (av
), m_something_changed (sc
) {}
5662 edge
before_dom_children (basic_block
) final override
;
5664 { return gimple_purge_all_dead_eh_edges (m_need_eh_cleanup
); }
5667 struct ipa_func_body_info
*m_fbi
;
5668 vec
<ipa_param_descriptor
, va_gc
> *m_descriptors
;
5669 struct ipa_agg_replacement_value
*m_aggval
;
5670 bool *m_something_changed
;
5671 auto_bitmap m_need_eh_cleanup
;
5675 ipcp_modif_dom_walker::before_dom_children (basic_block bb
)
5677 gimple_stmt_iterator gsi
;
5678 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5680 struct ipa_agg_replacement_value
*v
;
5681 gimple
*stmt
= gsi_stmt (gsi
);
5683 HOST_WIDE_INT offset
;
5688 if (!gimple_assign_load_p (stmt
))
5690 rhs
= gimple_assign_rhs1 (stmt
);
5691 if (!is_gimple_reg_type (TREE_TYPE (rhs
)))
5696 while (handled_component_p (t
))
5698 /* V_C_E can do things like convert an array of integers to one
5699 bigger integer and similar things we do not handle below. */
5700 if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
5705 t
= TREE_OPERAND (t
, 0);
5710 if (!ipa_load_from_parm_agg (m_fbi
, m_descriptors
, stmt
, rhs
, &index
,
5711 &offset
, &size
, &by_ref
))
5713 for (v
= m_aggval
; v
; v
= v
->next
)
5714 if (v
->index
== index
5715 && v
->offset
== offset
)
5718 || v
->by_ref
!= by_ref
5719 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v
->value
))),
5723 gcc_checking_assert (is_gimple_ip_invariant (v
->value
));
5724 if (!useless_type_conversion_p (TREE_TYPE (rhs
), TREE_TYPE (v
->value
)))
5726 if (fold_convertible_p (TREE_TYPE (rhs
), v
->value
))
5727 val
= fold_build1 (NOP_EXPR
, TREE_TYPE (rhs
), v
->value
);
5728 else if (TYPE_SIZE (TREE_TYPE (rhs
))
5729 == TYPE_SIZE (TREE_TYPE (v
->value
)))
5730 val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (rhs
), v
->value
);
5735 fprintf (dump_file
, " const ");
5736 print_generic_expr (dump_file
, v
->value
);
5737 fprintf (dump_file
, " can't be converted to type of ");
5738 print_generic_expr (dump_file
, rhs
);
5739 fprintf (dump_file
, "\n");
5747 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5749 fprintf (dump_file
, "Modifying stmt:\n ");
5750 print_gimple_stmt (dump_file
, stmt
, 0);
5752 gimple_assign_set_rhs_from_tree (&gsi
, val
);
5755 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5757 fprintf (dump_file
, "into:\n ");
5758 print_gimple_stmt (dump_file
, stmt
, 0);
5759 fprintf (dump_file
, "\n");
5762 *m_something_changed
= true;
5763 if (maybe_clean_eh_stmt (stmt
))
5764 bitmap_set_bit (m_need_eh_cleanup
, bb
->index
);
5769 /* Return true if we have recorded VALUE and MASK about PARM.
5770 Set VALUE and MASk accordingly. */
5773 ipcp_get_parm_bits (tree parm
, tree
*value
, widest_int
*mask
)
5775 cgraph_node
*cnode
= cgraph_node::get (current_function_decl
);
5776 ipcp_transformation
*ts
= ipcp_get_transformation_summary (cnode
);
5777 if (!ts
|| vec_safe_length (ts
->bits
) == 0)
5781 for (tree p
= DECL_ARGUMENTS (current_function_decl
);
5782 p
!= parm
; p
= DECL_CHAIN (p
))
5785 /* Ignore static chain. */
5790 clone_info
*cinfo
= clone_info::get (cnode
);
5791 if (cinfo
&& cinfo
->param_adjustments
)
5793 i
= cinfo
->param_adjustments
->get_original_index (i
);
5798 vec
<ipa_bits
*, va_gc
> &bits
= *ts
->bits
;
5801 *mask
= bits
[i
]->mask
;
5802 *value
= wide_int_to_tree (TREE_TYPE (parm
), bits
[i
]->value
);
5807 /* Update bits info of formal parameters as described in
5808 ipcp_transformation. */
5811 ipcp_update_bits (struct cgraph_node
*node
)
5813 ipcp_transformation
*ts
= ipcp_get_transformation_summary (node
);
5815 if (!ts
|| vec_safe_length (ts
->bits
) == 0)
5817 vec
<ipa_bits
*, va_gc
> &bits
= *ts
->bits
;
5818 unsigned count
= bits
.length ();
5822 auto_vec
<int, 16> new_indices
;
5823 bool need_remapping
= false;
5824 clone_info
*cinfo
= clone_info::get (node
);
5825 if (cinfo
&& cinfo
->param_adjustments
)
5827 cinfo
->param_adjustments
->get_updated_indices (&new_indices
);
5828 need_remapping
= true;
5830 auto_vec
<tree
, 16> parm_decls
;
5831 push_function_arg_decls (&parm_decls
, node
->decl
);
5833 for (unsigned i
= 0; i
< count
; ++i
)
5838 if (i
>= new_indices
.length ())
5840 int idx
= new_indices
[i
];
5843 parm
= parm_decls
[idx
];
5846 parm
= parm_decls
[i
];
5847 gcc_checking_assert (parm
);
5851 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm
))
5852 || POINTER_TYPE_P (TREE_TYPE (parm
)))
5853 || !is_gimple_reg (parm
))
5856 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
), parm
);
5862 fprintf (dump_file
, "Adjusting mask for param %u to ", i
);
5863 print_hex (bits
[i
]->mask
, dump_file
);
5864 fprintf (dump_file
, "\n");
5867 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef
)))
5869 unsigned prec
= TYPE_PRECISION (TREE_TYPE (ddef
));
5870 signop sgn
= TYPE_SIGN (TREE_TYPE (ddef
));
5872 wide_int nonzero_bits
= wide_int::from (bits
[i
]->mask
, prec
, UNSIGNED
)
5873 | wide_int::from (bits
[i
]->value
, prec
, sgn
);
5874 set_nonzero_bits (ddef
, nonzero_bits
);
5878 unsigned tem
= bits
[i
]->mask
.to_uhwi ();
5879 unsigned HOST_WIDE_INT bitpos
= bits
[i
]->value
.to_uhwi ();
5880 unsigned align
= tem
& -tem
;
5881 unsigned misalign
= bitpos
& (align
- 1);
5886 fprintf (dump_file
, "Adjusting align: %u, misalign: %u\n", align
, misalign
);
5888 unsigned old_align
, old_misalign
;
5889 struct ptr_info_def
*pi
= get_ptr_info (ddef
);
5890 bool old_known
= get_ptr_info_alignment (pi
, &old_align
, &old_misalign
);
5893 && old_align
> align
)
5897 fprintf (dump_file
, "But alignment was already %u.\n", old_align
);
5898 if ((old_misalign
& (align
- 1)) != misalign
)
5899 fprintf (dump_file
, "old_misalign (%u) and misalign (%u) mismatch\n",
5900 old_misalign
, misalign
);
5906 && ((misalign
& (old_align
- 1)) != old_misalign
)
5908 fprintf (dump_file
, "old_misalign (%u) and misalign (%u) mismatch\n",
5909 old_misalign
, misalign
);
5911 set_ptr_info_alignment (pi
, align
, misalign
);
5918 ipa_vr::nonzero_p (tree expr_type
) const
5920 if (type
== VR_ANTI_RANGE
&& wi::eq_p (min
, 0) && wi::eq_p (max
, 0))
5923 unsigned prec
= TYPE_PRECISION (expr_type
);
5924 return (type
== VR_RANGE
5925 && TYPE_UNSIGNED (expr_type
)
5926 && wi::eq_p (min
, wi::one (prec
))
5927 && wi::eq_p (max
, wi::max_value (prec
, TYPE_SIGN (expr_type
))));
5930 /* Update value range of formal parameters as described in
5931 ipcp_transformation. */
5934 ipcp_update_vr (struct cgraph_node
*node
)
5936 ipcp_transformation
*ts
= ipcp_get_transformation_summary (node
);
5937 if (!ts
|| vec_safe_length (ts
->m_vr
) == 0)
5939 const vec
<ipa_vr
, va_gc
> &vr
= *ts
->m_vr
;
5940 unsigned count
= vr
.length ();
5944 auto_vec
<int, 16> new_indices
;
5945 bool need_remapping
= false;
5946 clone_info
*cinfo
= clone_info::get (node
);
5947 if (cinfo
&& cinfo
->param_adjustments
)
5949 cinfo
->param_adjustments
->get_updated_indices (&new_indices
);
5950 need_remapping
= true;
5952 auto_vec
<tree
, 16> parm_decls
;
5953 push_function_arg_decls (&parm_decls
, node
->decl
);
5955 for (unsigned i
= 0; i
< count
; ++i
)
5961 if (i
>= new_indices
.length ())
5963 remapped_idx
= new_indices
[i
];
5964 if (remapped_idx
< 0)
5970 parm
= parm_decls
[remapped_idx
];
5972 gcc_checking_assert (parm
);
5973 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
), parm
);
5975 if (!ddef
|| !is_gimple_reg (parm
))
5979 && (vr
[i
].type
== VR_RANGE
|| vr
[i
].type
== VR_ANTI_RANGE
))
5981 tree type
= TREE_TYPE (ddef
);
5982 unsigned prec
= TYPE_PRECISION (type
);
5983 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef
)))
5987 fprintf (dump_file
, "Setting value range of param %u "
5988 "(now %i) ", i
, remapped_idx
);
5989 fprintf (dump_file
, "%s[",
5990 (vr
[i
].type
== VR_ANTI_RANGE
) ? "~" : "");
5991 print_decs (vr
[i
].min
, dump_file
);
5992 fprintf (dump_file
, ", ");
5993 print_decs (vr
[i
].max
, dump_file
);
5994 fprintf (dump_file
, "]\n");
5996 value_range
v (type
,
5997 wide_int_storage::from (vr
[i
].min
, prec
,
5999 wide_int_storage::from (vr
[i
].max
, prec
,
6002 set_range_info (ddef
, v
);
6004 else if (POINTER_TYPE_P (TREE_TYPE (ddef
))
6005 && vr
[i
].nonzero_p (TREE_TYPE (ddef
)))
6008 fprintf (dump_file
, "Setting nonnull for %u\n", i
);
6009 set_ptr_nonnull (ddef
);
6015 /* IPCP transformation phase doing propagation of aggregate values. */
6018 ipcp_transform_function (struct cgraph_node
*node
)
6020 vec
<ipa_param_descriptor
, va_gc
> *descriptors
= NULL
;
6021 struct ipa_func_body_info fbi
;
6022 struct ipa_agg_replacement_value
*aggval
;
6025 gcc_checking_assert (cfun
);
6026 gcc_checking_assert (current_function_decl
);
6029 fprintf (dump_file
, "Modification phase of node %s\n",
6030 node
->dump_name ());
6032 ipcp_update_bits (node
);
6033 ipcp_update_vr (node
);
6034 aggval
= ipa_get_agg_replacements_for_node (node
);
6037 param_count
= count_formal_params (node
->decl
);
6038 if (param_count
== 0)
6040 vec_safe_grow_cleared (descriptors
, param_count
, true);
6041 ipa_populate_param_decls (node
, *descriptors
);
6042 std::pair
<bool, bool> rr
6043 = adjust_agg_replacement_values (node
, aggval
, *descriptors
);
6044 bool cfg_changed
= rr
.second
;
6047 vec_free (descriptors
);
6049 fprintf (dump_file
, " All affected aggregate parameters were either "
6050 "removed or converted into scalars, phase done.\n");
6052 delete_unreachable_blocks_update_callgraph (node
, false);
6056 ipa_dump_agg_replacement_values (dump_file
, aggval
);
6060 fbi
.bb_infos
= vNULL
;
6061 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
), true);
6062 fbi
.param_count
= param_count
;
6063 fbi
.aa_walk_budget
= opt_for_fn (node
->decl
, param_ipa_max_aa_steps
);
6065 bool modified_mem_access
= false;
6066 calculate_dominance_info (CDI_DOMINATORS
);
6067 ipcp_modif_dom_walker
walker (&fbi
, descriptors
, aggval
, &modified_mem_access
);
6068 walker
.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
6069 free_dominance_info (CDI_DOMINATORS
);
6070 cfg_changed
|= walker
.cleanup_eh ();
6073 struct ipa_bb_info
*bi
;
6074 FOR_EACH_VEC_ELT (fbi
.bb_infos
, i
, bi
)
6075 free_ipa_bb_info (bi
);
6076 fbi
.bb_infos
.release ();
6078 ipcp_transformation
*s
= ipcp_transformation_sum
->get (node
);
6079 s
->agg_values
= NULL
;
6083 vec_free (descriptors
);
6085 delete_unreachable_blocks_update_callgraph (node
, false);
6087 return modified_mem_access
? TODO_update_ssa_only_virtuals
: 0;
6091 /* Return true if OTHER describes same agg value. */
6093 ipa_agg_value::equal_to (const ipa_agg_value
&other
)
6095 return offset
== other
.offset
6096 && operand_equal_p (value
, other
.value
, 0);
6099 /* Destructor also removing individual aggregate values. */
6101 ipa_auto_call_arg_values::~ipa_auto_call_arg_values ()
6103 ipa_release_agg_values (m_known_aggs
, false);
6108 #include "gt-ipa-prop.h"