1 /* Interprocedural analyses.
2 Copyright (C) 2005-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
30 #include "tree-streamer.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
51 #include "ipa-utils.h"
56 /* Function summary where the parameter infos are actually stored. */
57 ipa_node_params_t
*ipa_node_params_sum
= NULL
;
58 /* Vector of IPA-CP transformation data for each clone. */
59 vec
<ipcp_transformation_summary
, va_gc
> *ipcp_transformations
;
60 /* Edge summary for IPA-CP edge information. */
61 ipa_edge_args_sum_t
*ipa_edge_args_sum
;
63 /* Traits for a hash table for reusing already existing ipa_bits. */
65 struct ipa_bit_ggc_hash_traits
: public ggc_cache_remove
<ipa_bits
*>
67 typedef ipa_bits
*value_type
;
68 typedef ipa_bits
*compare_type
;
70 hash (const ipa_bits
*p
)
72 hashval_t t
= (hashval_t
) p
->value
.to_shwi ();
73 return iterative_hash_host_wide_int (p
->mask
.to_shwi (), t
);
76 equal (const ipa_bits
*a
, const ipa_bits
*b
)
78 return a
->value
== b
->value
&& a
->mask
== b
->mask
;
81 mark_empty (ipa_bits
*&p
)
86 is_empty (const ipa_bits
*p
)
91 is_deleted (const ipa_bits
*p
)
93 return p
== reinterpret_cast<const ipa_bits
*> (1);
96 mark_deleted (ipa_bits
*&p
)
98 p
= reinterpret_cast<ipa_bits
*> (1);
102 /* Hash table for avoid repeated allocations of equal ipa_bits. */
103 static GTY ((cache
)) hash_table
<ipa_bit_ggc_hash_traits
> *ipa_bits_hash_table
;
105 /* Traits for a hash table for reusing value_ranges used for IPA. Note that
106 the equiv bitmap is not hashed and is expected to be NULL. */
108 struct ipa_vr_ggc_hash_traits
: public ggc_cache_remove
<value_range
*>
110 typedef value_range
*value_type
;
111 typedef value_range
*compare_type
;
113 hash (const value_range
*p
)
115 gcc_checking_assert (!p
->equiv
);
116 hashval_t t
= (hashval_t
) p
->type
;
117 t
= iterative_hash_expr (p
->min
, t
);
118 return iterative_hash_expr (p
->max
, t
);
121 equal (const value_range
*a
, const value_range
*b
)
123 return a
->type
== b
->type
&& a
->min
== b
->min
&& a
->max
== b
->max
;
126 mark_empty (value_range
*&p
)
131 is_empty (const value_range
*p
)
136 is_deleted (const value_range
*p
)
138 return p
== reinterpret_cast<const value_range
*> (1);
141 mark_deleted (value_range
*&p
)
143 p
= reinterpret_cast<value_range
*> (1);
147 /* Hash table for avoid repeated allocations of equal value_ranges. */
148 static GTY ((cache
)) hash_table
<ipa_vr_ggc_hash_traits
> *ipa_vr_hash_table
;
150 /* Holders of ipa cgraph hooks: */
151 static struct cgraph_node_hook_list
*function_insertion_hook_holder
;
153 /* Description of a reference to an IPA constant. */
154 struct ipa_cst_ref_desc
156 /* Edge that corresponds to the statement which took the reference. */
157 struct cgraph_edge
*cs
;
158 /* Linked list of duplicates created when call graph edges are cloned. */
159 struct ipa_cst_ref_desc
*next_duplicate
;
160 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
161 if out of control. */
165 /* Allocation pool for reference descriptions. */
167 static object_allocator
<ipa_cst_ref_desc
> ipa_refdesc_pool
168 ("IPA-PROP ref descriptions");
170 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
171 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
174 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node
*node
)
176 tree fs_opts
= DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node
->decl
);
180 return !opt_for_fn (node
->decl
, optimize
) || !opt_for_fn (node
->decl
, flag_ipa_cp
);
183 /* Return index of the formal whose tree is PTREE in function which corresponds
187 ipa_get_param_decl_index_1 (vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
192 count
= vec_safe_length (descriptors
);
193 for (i
= 0; i
< count
; i
++)
194 if ((*descriptors
)[i
].decl_or_type
== ptree
)
200 /* Return index of the formal whose tree is PTREE in function which corresponds
204 ipa_get_param_decl_index (struct ipa_node_params
*info
, tree ptree
)
206 return ipa_get_param_decl_index_1 (info
->descriptors
, ptree
);
209 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
213 ipa_populate_param_decls (struct cgraph_node
*node
,
214 vec
<ipa_param_descriptor
, va_gc
> &descriptors
)
222 gcc_assert (gimple_has_body_p (fndecl
));
223 fnargs
= DECL_ARGUMENTS (fndecl
);
225 for (parm
= fnargs
; parm
; parm
= DECL_CHAIN (parm
))
227 descriptors
[param_num
].decl_or_type
= parm
;
228 descriptors
[param_num
].move_cost
= estimate_move_cost (TREE_TYPE (parm
),
234 /* Return how many formal parameters FNDECL has. */
237 count_formal_params (tree fndecl
)
241 gcc_assert (gimple_has_body_p (fndecl
));
243 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
249 /* Return the declaration of Ith formal parameter of the function corresponding
250 to INFO. Note there is no setter function as this array is built just once
251 using ipa_initialize_node_params. */
254 ipa_dump_param (FILE *file
, struct ipa_node_params
*info
, int i
)
256 fprintf (file
, "param #%i", i
);
257 if ((*info
->descriptors
)[i
].decl_or_type
)
260 print_generic_expr (file
, (*info
->descriptors
)[i
].decl_or_type
);
264 /* If necessary, allocate vector of parameter descriptors in info of NODE.
265 Return true if they were allocated, false if not. */
268 ipa_alloc_node_params (struct cgraph_node
*node
, int param_count
)
270 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
272 if (!info
->descriptors
&& param_count
)
274 vec_safe_grow_cleared (info
->descriptors
, param_count
);
281 /* Initialize the ipa_node_params structure associated with NODE by counting
282 the function parameters, creating the descriptors and populating their
286 ipa_initialize_node_params (struct cgraph_node
*node
)
288 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
290 if (!info
->descriptors
291 && ipa_alloc_node_params (node
, count_formal_params (node
->decl
)))
292 ipa_populate_param_decls (node
, *info
->descriptors
);
295 /* Print the jump functions associated with call graph edge CS to file F. */
298 ipa_print_node_jump_functions_for_edge (FILE *f
, struct cgraph_edge
*cs
)
302 count
= ipa_get_cs_argument_count (IPA_EDGE_REF (cs
));
303 for (i
= 0; i
< count
; i
++)
305 struct ipa_jump_func
*jump_func
;
306 enum jump_func_type type
;
308 jump_func
= ipa_get_ith_jump_func (IPA_EDGE_REF (cs
), i
);
309 type
= jump_func
->type
;
311 fprintf (f
, " param %d: ", i
);
312 if (type
== IPA_JF_UNKNOWN
)
313 fprintf (f
, "UNKNOWN\n");
314 else if (type
== IPA_JF_CONST
)
316 tree val
= jump_func
->value
.constant
.value
;
317 fprintf (f
, "CONST: ");
318 print_generic_expr (f
, val
);
319 if (TREE_CODE (val
) == ADDR_EXPR
320 && TREE_CODE (TREE_OPERAND (val
, 0)) == CONST_DECL
)
323 print_generic_expr (f
, DECL_INITIAL (TREE_OPERAND (val
, 0)));
327 else if (type
== IPA_JF_PASS_THROUGH
)
329 fprintf (f
, "PASS THROUGH: ");
330 fprintf (f
, "%d, op %s",
331 jump_func
->value
.pass_through
.formal_id
,
332 get_tree_code_name(jump_func
->value
.pass_through
.operation
));
333 if (jump_func
->value
.pass_through
.operation
!= NOP_EXPR
)
336 print_generic_expr (f
, jump_func
->value
.pass_through
.operand
);
338 if (jump_func
->value
.pass_through
.agg_preserved
)
339 fprintf (f
, ", agg_preserved");
342 else if (type
== IPA_JF_ANCESTOR
)
344 fprintf (f
, "ANCESTOR: ");
345 fprintf (f
, "%d, offset " HOST_WIDE_INT_PRINT_DEC
,
346 jump_func
->value
.ancestor
.formal_id
,
347 jump_func
->value
.ancestor
.offset
);
348 if (jump_func
->value
.ancestor
.agg_preserved
)
349 fprintf (f
, ", agg_preserved");
353 if (jump_func
->agg
.items
)
355 struct ipa_agg_jf_item
*item
;
358 fprintf (f
, " Aggregate passed by %s:\n",
359 jump_func
->agg
.by_ref
? "reference" : "value");
360 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, j
, item
)
362 fprintf (f
, " offset: " HOST_WIDE_INT_PRINT_DEC
", ",
364 if (TYPE_P (item
->value
))
365 fprintf (f
, "clobber of " HOST_WIDE_INT_PRINT_DEC
" bits",
366 tree_to_uhwi (TYPE_SIZE (item
->value
)));
369 fprintf (f
, "cst: ");
370 print_generic_expr (f
, item
->value
);
376 struct ipa_polymorphic_call_context
*ctx
377 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs
), i
);
378 if (ctx
&& !ctx
->useless_p ())
380 fprintf (f
, " Context: ");
381 ctx
->dump (dump_file
);
386 fprintf (f
, " value: ");
387 print_hex (jump_func
->bits
->value
, f
);
388 fprintf (f
, ", mask: ");
389 print_hex (jump_func
->bits
->mask
, f
);
393 fprintf (f
, " Unknown bits\n");
399 (jump_func
->m_vr
->type
== VR_ANTI_RANGE
) ? "~" : "");
400 print_decs (wi::to_wide (jump_func
->m_vr
->min
), f
);
402 print_decs (wi::to_wide (jump_func
->m_vr
->max
), f
);
406 fprintf (f
, " Unknown VR\n");
411 /* Print the jump functions of all arguments on all call graph edges going from
415 ipa_print_node_jump_functions (FILE *f
, struct cgraph_node
*node
)
417 struct cgraph_edge
*cs
;
419 fprintf (f
, " Jump functions of caller %s:\n", node
->dump_name ());
420 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
422 if (!ipa_edge_args_info_available_for_edge_p (cs
))
425 fprintf (f
, " callsite %s -> %s : \n",
427 cs
->callee
->dump_name ());
428 ipa_print_node_jump_functions_for_edge (f
, cs
);
431 for (cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
433 struct cgraph_indirect_call_info
*ii
;
434 if (!ipa_edge_args_info_available_for_edge_p (cs
))
437 ii
= cs
->indirect_info
;
438 if (ii
->agg_contents
)
439 fprintf (f
, " indirect %s callsite, calling param %i, "
440 "offset " HOST_WIDE_INT_PRINT_DEC
", %s",
441 ii
->member_ptr
? "member ptr" : "aggregate",
442 ii
->param_index
, ii
->offset
,
443 ii
->by_ref
? "by reference" : "by_value");
445 fprintf (f
, " indirect %s callsite, calling param %i, "
446 "offset " HOST_WIDE_INT_PRINT_DEC
,
447 ii
->polymorphic
? "polymorphic" : "simple", ii
->param_index
,
452 fprintf (f
, ", for stmt ");
453 print_gimple_stmt (f
, cs
->call_stmt
, 0, TDF_SLIM
);
458 ii
->context
.dump (f
);
459 ipa_print_node_jump_functions_for_edge (f
, cs
);
463 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
466 ipa_print_all_jump_functions (FILE *f
)
468 struct cgraph_node
*node
;
470 fprintf (f
, "\nJump functions:\n");
471 FOR_EACH_FUNCTION (node
)
473 ipa_print_node_jump_functions (f
, node
);
477 /* Set jfunc to be a know-really nothing jump function. */
480 ipa_set_jf_unknown (struct ipa_jump_func
*jfunc
)
482 jfunc
->type
= IPA_JF_UNKNOWN
;
487 /* Set JFUNC to be a copy of another jmp (to be used by jump function
488 combination code). The two functions will share their rdesc. */
491 ipa_set_jf_cst_copy (struct ipa_jump_func
*dst
,
492 struct ipa_jump_func
*src
)
495 gcc_checking_assert (src
->type
== IPA_JF_CONST
);
496 dst
->type
= IPA_JF_CONST
;
497 dst
->value
.constant
= src
->value
.constant
;
500 /* Set JFUNC to be a constant jmp function. */
503 ipa_set_jf_constant (struct ipa_jump_func
*jfunc
, tree constant
,
504 struct cgraph_edge
*cs
)
506 jfunc
->type
= IPA_JF_CONST
;
507 jfunc
->value
.constant
.value
= unshare_expr_without_location (constant
);
509 if (TREE_CODE (constant
) == ADDR_EXPR
510 && TREE_CODE (TREE_OPERAND (constant
, 0)) == FUNCTION_DECL
)
512 struct ipa_cst_ref_desc
*rdesc
;
514 rdesc
= ipa_refdesc_pool
.allocate ();
516 rdesc
->next_duplicate
= NULL
;
518 jfunc
->value
.constant
.rdesc
= rdesc
;
521 jfunc
->value
.constant
.rdesc
= NULL
;
524 /* Set JFUNC to be a simple pass-through jump function. */
526 ipa_set_jf_simple_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
529 jfunc
->type
= IPA_JF_PASS_THROUGH
;
530 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
531 jfunc
->value
.pass_through
.formal_id
= formal_id
;
532 jfunc
->value
.pass_through
.operation
= NOP_EXPR
;
533 jfunc
->value
.pass_through
.agg_preserved
= agg_preserved
;
536 /* Set JFUNC to be an unary pass through jump function. */
539 ipa_set_jf_unary_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
540 enum tree_code operation
)
542 jfunc
->type
= IPA_JF_PASS_THROUGH
;
543 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
544 jfunc
->value
.pass_through
.formal_id
= formal_id
;
545 jfunc
->value
.pass_through
.operation
= operation
;
546 jfunc
->value
.pass_through
.agg_preserved
= false;
548 /* Set JFUNC to be an arithmetic pass through jump function. */
551 ipa_set_jf_arith_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
552 tree operand
, enum tree_code operation
)
554 jfunc
->type
= IPA_JF_PASS_THROUGH
;
555 jfunc
->value
.pass_through
.operand
= unshare_expr_without_location (operand
);
556 jfunc
->value
.pass_through
.formal_id
= formal_id
;
557 jfunc
->value
.pass_through
.operation
= operation
;
558 jfunc
->value
.pass_through
.agg_preserved
= false;
561 /* Set JFUNC to be an ancestor jump function. */
564 ipa_set_ancestor_jf (struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
,
565 int formal_id
, bool agg_preserved
)
567 jfunc
->type
= IPA_JF_ANCESTOR
;
568 jfunc
->value
.ancestor
.formal_id
= formal_id
;
569 jfunc
->value
.ancestor
.offset
= offset
;
570 jfunc
->value
.ancestor
.agg_preserved
= agg_preserved
;
573 /* Get IPA BB information about the given BB. FBI is the context of analyzis
574 of this function body. */
576 static struct ipa_bb_info
*
577 ipa_get_bb_info (struct ipa_func_body_info
*fbi
, basic_block bb
)
579 gcc_checking_assert (fbi
);
580 return &fbi
->bb_infos
[bb
->index
];
583 /* Structure to be passed in between detect_type_change and
584 check_stmt_for_type_change. */
586 struct prop_type_change_info
588 /* Offset into the object where there is the virtual method pointer we are
590 HOST_WIDE_INT offset
;
591 /* The declaration or SSA_NAME pointer of the base that we are checking for
594 /* Set to true if dynamic type change has been detected. */
595 bool type_maybe_changed
;
598 /* Return true if STMT can modify a virtual method table pointer.
600 This function makes special assumptions about both constructors and
601 destructors which are all the functions that are allowed to alter the VMT
602 pointers. It assumes that destructors begin with assignment into all VMT
603 pointers and that constructors essentially look in the following way:
605 1) The very first thing they do is that they call constructors of ancestor
606 sub-objects that have them.
608 2) Then VMT pointers of this and all its ancestors is set to new values
609 corresponding to the type corresponding to the constructor.
611 3) Only afterwards, other stuff such as constructor of member sub-objects
612 and the code written by the user is run. Only this may include calling
613 virtual functions, directly or indirectly.
615 There is no way to call a constructor of an ancestor sub-object in any
618 This means that we do not have to care whether constructors get the correct
619 type information because they will always change it (in fact, if we define
620 the type to be given by the VMT pointer, it is undefined).
622 The most important fact to derive from the above is that if, for some
623 statement in the section 3, we try to detect whether the dynamic type has
624 changed, we can safely ignore all calls as we examine the function body
625 backwards until we reach statements in section 2 because these calls cannot
626 be ancestor constructors or destructors (if the input is not bogus) and so
627 do not change the dynamic type (this holds true only for automatically
628 allocated objects but at the moment we devirtualize only these). We then
629 must detect that statements in section 2 change the dynamic type and can try
630 to derive the new type. That is enough and we can stop, we will never see
631 the calls into constructors of sub-objects in this code. Therefore we can
632 safely ignore all call statements that we traverse.
636 stmt_may_be_vtbl_ptr_store (gimple
*stmt
)
638 if (is_gimple_call (stmt
))
640 if (gimple_clobber_p (stmt
))
642 else if (is_gimple_assign (stmt
))
644 tree lhs
= gimple_assign_lhs (stmt
);
646 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs
)))
648 if (flag_strict_aliasing
649 && !POINTER_TYPE_P (TREE_TYPE (lhs
)))
652 if (TREE_CODE (lhs
) == COMPONENT_REF
653 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
655 /* In the future we might want to use get_ref_base_and_extent to find
656 if there is a field corresponding to the offset and if so, proceed
657 almost like if it was a component ref. */
663 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
664 to check whether a particular statement may modify the virtual table
665 pointerIt stores its result into DATA, which points to a
666 prop_type_change_info structure. */
669 check_stmt_for_type_change (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef
, void *data
)
671 gimple
*stmt
= SSA_NAME_DEF_STMT (vdef
);
672 struct prop_type_change_info
*tci
= (struct prop_type_change_info
*) data
;
674 if (stmt_may_be_vtbl_ptr_store (stmt
))
676 tci
->type_maybe_changed
= true;
683 /* See if ARG is PARAM_DECl describing instance passed by pointer
684 or reference in FUNCTION. Return false if the dynamic type may change
685 in between beggining of the function until CALL is invoked.
687 Generally functions are not allowed to change type of such instances,
688 but they call destructors. We assume that methods can not destroy the THIS
689 pointer. Also as a special cases, constructor and destructors may change
690 type of the THIS pointer. */
693 param_type_may_change_p (tree function
, tree arg
, gimple
*call
)
695 /* Pure functions can not do any changes on the dynamic type;
696 that require writting to memory. */
697 if (flags_from_decl_or_type (function
) & (ECF_PURE
| ECF_CONST
))
699 /* We need to check if we are within inlined consturctor
700 or destructor (ideally we would have way to check that the
701 inline cdtor is actually working on ARG, but we don't have
702 easy tie on this, so punt on all non-pure cdtors.
703 We may also record the types of cdtors and once we know type
704 of the instance match them.
706 Also code unification optimizations may merge calls from
707 different blocks making return values unreliable. So
708 do nothing during late optimization. */
709 if (DECL_STRUCT_FUNCTION (function
)->after_inlining
)
711 if (TREE_CODE (arg
) == SSA_NAME
712 && SSA_NAME_IS_DEFAULT_DEF (arg
)
713 && TREE_CODE (SSA_NAME_VAR (arg
)) == PARM_DECL
)
715 /* Normal (non-THIS) argument. */
716 if ((SSA_NAME_VAR (arg
) != DECL_ARGUMENTS (function
)
717 || TREE_CODE (TREE_TYPE (function
)) != METHOD_TYPE
)
718 /* THIS pointer of an method - here we want to watch constructors
719 and destructors as those definitely may change the dynamic
721 || (TREE_CODE (TREE_TYPE (function
)) == METHOD_TYPE
722 && !DECL_CXX_CONSTRUCTOR_P (function
)
723 && !DECL_CXX_DESTRUCTOR_P (function
)
724 && (SSA_NAME_VAR (arg
) == DECL_ARGUMENTS (function
))))
726 /* Walk the inline stack and watch out for ctors/dtors. */
727 for (tree block
= gimple_block (call
); block
&& TREE_CODE (block
) == BLOCK
;
728 block
= BLOCK_SUPERCONTEXT (block
))
729 if (inlined_polymorphic_ctor_dtor_block_p (block
, false))
737 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
738 callsite CALL) by looking for assignments to its virtual table pointer. If
739 it is, return true and fill in the jump function JFUNC with relevant type
740 information or set it to unknown. ARG is the object itself (not a pointer
741 to it, unless dereferenced). BASE is the base of the memory access as
742 returned by get_ref_base_and_extent, as is the offset.
744 This is helper function for detect_type_change and detect_type_change_ssa
745 that does the heavy work which is usually unnecesary. */
748 detect_type_change_from_memory_writes (tree arg
, tree base
, tree comp_type
,
749 gcall
*call
, struct ipa_jump_func
*jfunc
,
750 HOST_WIDE_INT offset
)
752 struct prop_type_change_info tci
;
754 bool entry_reached
= false;
756 gcc_checking_assert (DECL_P (arg
)
757 || TREE_CODE (arg
) == MEM_REF
758 || handled_component_p (arg
));
760 comp_type
= TYPE_MAIN_VARIANT (comp_type
);
762 /* Const calls cannot call virtual methods through VMT and so type changes do
764 if (!flag_devirtualize
|| !gimple_vuse (call
)
765 /* Be sure expected_type is polymorphic. */
767 || TREE_CODE (comp_type
) != RECORD_TYPE
768 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))
769 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))))
772 ao_ref_init (&ao
, arg
);
775 ao
.size
= POINTER_SIZE
;
776 ao
.max_size
= ao
.size
;
779 tci
.object
= get_base_address (arg
);
780 tci
.type_maybe_changed
= false;
782 walk_aliased_vdefs (&ao
, gimple_vuse (call
), check_stmt_for_type_change
,
783 &tci
, NULL
, &entry_reached
);
784 if (!tci
.type_maybe_changed
)
787 ipa_set_jf_unknown (jfunc
);
791 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
792 If it is, return true and fill in the jump function JFUNC with relevant type
793 information or set it to unknown. ARG is the object itself (not a pointer
794 to it, unless dereferenced). BASE is the base of the memory access as
795 returned by get_ref_base_and_extent, as is the offset. */
798 detect_type_change (tree arg
, tree base
, tree comp_type
, gcall
*call
,
799 struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
)
801 if (!flag_devirtualize
)
804 if (TREE_CODE (base
) == MEM_REF
805 && !param_type_may_change_p (current_function_decl
,
806 TREE_OPERAND (base
, 0),
809 return detect_type_change_from_memory_writes (arg
, base
, comp_type
,
810 call
, jfunc
, offset
);
813 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
814 SSA name (its dereference will become the base and the offset is assumed to
818 detect_type_change_ssa (tree arg
, tree comp_type
,
819 gcall
*call
, struct ipa_jump_func
*jfunc
)
821 gcc_checking_assert (TREE_CODE (arg
) == SSA_NAME
);
822 if (!flag_devirtualize
823 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
826 if (!param_type_may_change_p (current_function_decl
, arg
, call
))
829 arg
= build2 (MEM_REF
, ptr_type_node
, arg
,
830 build_int_cst (ptr_type_node
, 0));
832 return detect_type_change_from_memory_writes (arg
, arg
, comp_type
,
836 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
837 boolean variable pointed to by DATA. */
840 mark_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
843 bool *b
= (bool *) data
;
848 /* Return true if we have already walked so many statements in AA that we
849 should really just start giving up. */
852 aa_overwalked (struct ipa_func_body_info
*fbi
)
854 gcc_checking_assert (fbi
);
855 return fbi
->aa_walked
> (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS
);
858 /* Find the nearest valid aa status for parameter specified by INDEX that
861 static struct ipa_param_aa_status
*
862 find_dominating_aa_status (struct ipa_func_body_info
*fbi
, basic_block bb
,
867 bb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
870 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
871 if (!bi
->param_aa_statuses
.is_empty ()
872 && bi
->param_aa_statuses
[index
].valid
)
873 return &bi
->param_aa_statuses
[index
];
877 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
878 structures and/or intialize the result with a dominating description as
881 static struct ipa_param_aa_status
*
882 parm_bb_aa_status_for_bb (struct ipa_func_body_info
*fbi
, basic_block bb
,
885 gcc_checking_assert (fbi
);
886 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
887 if (bi
->param_aa_statuses
.is_empty ())
888 bi
->param_aa_statuses
.safe_grow_cleared (fbi
->param_count
);
889 struct ipa_param_aa_status
*paa
= &bi
->param_aa_statuses
[index
];
892 gcc_checking_assert (!paa
->parm_modified
893 && !paa
->ref_modified
894 && !paa
->pt_modified
);
895 struct ipa_param_aa_status
*dom_paa
;
896 dom_paa
= find_dominating_aa_status (fbi
, bb
, index
);
906 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
907 a value known not to be modified in this function before reaching the
908 statement STMT. FBI holds information about the function we have so far
909 gathered but do not survive the summary building stage. */
912 parm_preserved_before_stmt_p (struct ipa_func_body_info
*fbi
, int index
,
913 gimple
*stmt
, tree parm_load
)
915 struct ipa_param_aa_status
*paa
;
916 bool modified
= false;
919 tree base
= get_base_address (parm_load
);
920 gcc_assert (TREE_CODE (base
) == PARM_DECL
);
921 if (TREE_READONLY (base
))
924 /* FIXME: FBI can be NULL if we are being called from outside
925 ipa_node_analysis or ipcp_transform_function, which currently happens
926 during inlining analysis. It would be great to extend fbi's lifetime and
927 always have it. Currently, we are just not afraid of too much walking in
931 if (aa_overwalked (fbi
))
933 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
934 if (paa
->parm_modified
)
940 gcc_checking_assert (gimple_vuse (stmt
) != NULL_TREE
);
941 ao_ref_init (&refd
, parm_load
);
942 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
945 fbi
->aa_walked
+= walked
;
947 paa
->parm_modified
= true;
951 /* If STMT is an assignment that loads a value from an parameter declaration,
952 return the index of the parameter in ipa_node_params which has not been
953 modified. Otherwise return -1. */
956 load_from_unmodified_param (struct ipa_func_body_info
*fbi
,
957 vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
963 if (!gimple_assign_single_p (stmt
))
966 op1
= gimple_assign_rhs1 (stmt
);
967 if (TREE_CODE (op1
) != PARM_DECL
)
970 index
= ipa_get_param_decl_index_1 (descriptors
, op1
);
972 || !parm_preserved_before_stmt_p (fbi
, index
, stmt
, op1
))
978 /* Return true if memory reference REF (which must be a load through parameter
979 with INDEX) loads data that are known to be unmodified in this function
980 before reaching statement STMT. */
983 parm_ref_data_preserved_p (struct ipa_func_body_info
*fbi
,
984 int index
, gimple
*stmt
, tree ref
)
986 struct ipa_param_aa_status
*paa
;
987 bool modified
= false;
990 /* FIXME: FBI can be NULL if we are being called from outside
991 ipa_node_analysis or ipcp_transform_function, which currently happens
992 during inlining analysis. It would be great to extend fbi's lifetime and
993 always have it. Currently, we are just not afraid of too much walking in
997 if (aa_overwalked (fbi
))
999 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
1000 if (paa
->ref_modified
)
1006 gcc_checking_assert (gimple_vuse (stmt
));
1007 ao_ref_init (&refd
, ref
);
1008 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
1011 fbi
->aa_walked
+= walked
;
1012 if (paa
&& modified
)
1013 paa
->ref_modified
= true;
1017 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1018 is known to be unmodified in this function before reaching call statement
1019 CALL into which it is passed. FBI describes the function body. */
1022 parm_ref_data_pass_through_p (struct ipa_func_body_info
*fbi
, int index
,
1023 gimple
*call
, tree parm
)
1025 bool modified
= false;
1028 /* It's unnecessary to calculate anything about memory contnets for a const
1029 function because it is not goin to use it. But do not cache the result
1030 either. Also, no such calculations for non-pointers. */
1031 if (!gimple_vuse (call
)
1032 || !POINTER_TYPE_P (TREE_TYPE (parm
))
1033 || aa_overwalked (fbi
))
1036 struct ipa_param_aa_status
*paa
= parm_bb_aa_status_for_bb (fbi
,
1039 if (paa
->pt_modified
)
1042 ao_ref_init_from_ptr_and_size (&refd
, parm
, NULL_TREE
);
1043 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (call
), mark_modified
,
1045 fbi
->aa_walked
+= walked
;
1047 paa
->pt_modified
= true;
1051 /* Return true if we can prove that OP is a memory reference loading
1052 data from an aggregate passed as a parameter.
1054 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1055 false if it cannot prove that the value has not been modified before the
1056 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1057 if it cannot prove the value has not been modified, in that case it will
1058 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1060 INFO and PARMS_AINFO describe parameters of the current function (but the
1061 latter can be NULL), STMT is the load statement. If function returns true,
1062 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1063 within the aggregate and whether it is a load from a value passed by
1064 reference respectively. */
1067 ipa_load_from_parm_agg (struct ipa_func_body_info
*fbi
,
1068 vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
1069 gimple
*stmt
, tree op
, int *index_p
,
1070 HOST_WIDE_INT
*offset_p
, HOST_WIDE_INT
*size_p
,
1071 bool *by_ref_p
, bool *guaranteed_unmodified
)
1074 HOST_WIDE_INT size
, max_size
;
1077 = get_ref_base_and_extent (op
, offset_p
, &size
, &max_size
, &reverse
);
1079 if (max_size
== -1 || max_size
!= size
|| *offset_p
< 0)
1084 int index
= ipa_get_param_decl_index_1 (descriptors
, base
);
1086 && parm_preserved_before_stmt_p (fbi
, index
, stmt
, op
))
1092 if (guaranteed_unmodified
)
1093 *guaranteed_unmodified
= true;
1099 if (TREE_CODE (base
) != MEM_REF
1100 || TREE_CODE (TREE_OPERAND (base
, 0)) != SSA_NAME
1101 || !integer_zerop (TREE_OPERAND (base
, 1)))
1104 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base
, 0)))
1106 tree parm
= SSA_NAME_VAR (TREE_OPERAND (base
, 0));
1107 index
= ipa_get_param_decl_index_1 (descriptors
, parm
);
1111 /* This branch catches situations where a pointer parameter is not a
1112 gimple register, for example:
1114 void hip7(S*) (struct S * p)
1116 void (*<T2e4>) (struct S *) D.1867;
1121 D.1867_2 = p.1_1->f;
1126 gimple
*def
= SSA_NAME_DEF_STMT (TREE_OPERAND (base
, 0));
1127 index
= load_from_unmodified_param (fbi
, descriptors
, def
);
1132 bool data_preserved
= parm_ref_data_preserved_p (fbi
, index
, stmt
, op
);
1133 if (!data_preserved
&& !guaranteed_unmodified
)
1140 if (guaranteed_unmodified
)
1141 *guaranteed_unmodified
= data_preserved
;
1147 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1148 of an assignment statement STMT, try to determine whether we are actually
1149 handling any of the following cases and construct an appropriate jump
1150 function into JFUNC if so:
1152 1) The passed value is loaded from a formal parameter which is not a gimple
1153 register (most probably because it is addressable, the value has to be
1154 scalar) and we can guarantee the value has not changed. This case can
1155 therefore be described by a simple pass-through jump function. For example:
1164 2) The passed value can be described by a simple arithmetic pass-through
1171 D.2064_4 = a.1(D) + 4;
1174 This case can also occur in combination of the previous one, e.g.:
1182 D.2064_4 = a.0_3 + 4;
1185 3) The passed value is an address of an object within another one (which
1186 also passed by reference). Such situations are described by an ancestor
1187 jump function and describe situations such as:
1189 B::foo() (struct B * const this)
1193 D.1845_2 = &this_1(D)->D.1748;
1196 INFO is the structure describing individual parameters access different
1197 stages of IPA optimizations. PARMS_AINFO contains the information that is
1198 only needed for intraprocedural analysis. */
1201 compute_complex_assign_jump_func (struct ipa_func_body_info
*fbi
,
1202 struct ipa_node_params
*info
,
1203 struct ipa_jump_func
*jfunc
,
1204 gcall
*call
, gimple
*stmt
, tree name
,
1207 HOST_WIDE_INT offset
, size
, max_size
;
1208 tree op1
, tc_ssa
, base
, ssa
;
1212 op1
= gimple_assign_rhs1 (stmt
);
1214 if (TREE_CODE (op1
) == SSA_NAME
)
1216 if (SSA_NAME_IS_DEFAULT_DEF (op1
))
1217 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (op1
));
1219 index
= load_from_unmodified_param (fbi
, info
->descriptors
,
1220 SSA_NAME_DEF_STMT (op1
));
1225 index
= load_from_unmodified_param (fbi
, info
->descriptors
, stmt
);
1226 tc_ssa
= gimple_assign_lhs (stmt
);
1231 switch (gimple_assign_rhs_class (stmt
))
1233 case GIMPLE_BINARY_RHS
:
1235 tree op2
= gimple_assign_rhs2 (stmt
);
1236 if (!is_gimple_ip_invariant (op2
)
1237 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt
))
1239 && !useless_type_conversion_p (TREE_TYPE (name
),
1243 ipa_set_jf_arith_pass_through (jfunc
, index
, op2
,
1244 gimple_assign_rhs_code (stmt
));
1247 case GIMPLE_SINGLE_RHS
:
1249 bool agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
,
1251 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1254 case GIMPLE_UNARY_RHS
:
1255 if (is_gimple_assign (stmt
)
1256 && gimple_assign_rhs_class (stmt
) == GIMPLE_UNARY_RHS
1257 && ! CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
)))
1258 ipa_set_jf_unary_pass_through (jfunc
, index
,
1259 gimple_assign_rhs_code (stmt
));
1265 if (TREE_CODE (op1
) != ADDR_EXPR
)
1267 op1
= TREE_OPERAND (op1
, 0);
1268 if (TREE_CODE (TREE_TYPE (op1
)) != RECORD_TYPE
)
1270 base
= get_ref_base_and_extent (op1
, &offset
, &size
, &max_size
, &reverse
);
1271 if (TREE_CODE (base
) != MEM_REF
1272 /* If this is a varying address, punt. */
1274 || max_size
!= size
)
1276 offset
+= mem_ref_offset (base
).to_short_addr () * BITS_PER_UNIT
;
1277 ssa
= TREE_OPERAND (base
, 0);
1278 if (TREE_CODE (ssa
) != SSA_NAME
1279 || !SSA_NAME_IS_DEFAULT_DEF (ssa
)
1283 /* Dynamic types are changed in constructors and destructors. */
1284 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (ssa
));
1285 if (index
>= 0 && param_type
&& POINTER_TYPE_P (param_type
))
1286 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1287 parm_ref_data_pass_through_p (fbi
, index
, call
, ssa
));
1290 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1293 iftmp.1_3 = &obj_2(D)->D.1762;
1295 The base of the MEM_REF must be a default definition SSA NAME of a
1296 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1297 whole MEM_REF expression is returned and the offset calculated from any
1298 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1299 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1302 get_ancestor_addr_info (gimple
*assign
, tree
*obj_p
, HOST_WIDE_INT
*offset
)
1304 HOST_WIDE_INT size
, max_size
;
1305 tree expr
, parm
, obj
;
1308 if (!gimple_assign_single_p (assign
))
1310 expr
= gimple_assign_rhs1 (assign
);
1312 if (TREE_CODE (expr
) != ADDR_EXPR
)
1314 expr
= TREE_OPERAND (expr
, 0);
1316 expr
= get_ref_base_and_extent (expr
, offset
, &size
, &max_size
, &reverse
);
1318 if (TREE_CODE (expr
) != MEM_REF
1319 /* If this is a varying address, punt. */
1324 parm
= TREE_OPERAND (expr
, 0);
1325 if (TREE_CODE (parm
) != SSA_NAME
1326 || !SSA_NAME_IS_DEFAULT_DEF (parm
)
1327 || TREE_CODE (SSA_NAME_VAR (parm
)) != PARM_DECL
)
1330 *offset
+= mem_ref_offset (expr
).to_short_addr () * BITS_PER_UNIT
;
1336 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1337 statement PHI, try to find out whether NAME is in fact a
1338 multiple-inheritance typecast from a descendant into an ancestor of a formal
1339 parameter and thus can be described by an ancestor jump function and if so,
1340 write the appropriate function into JFUNC.
1342 Essentially we want to match the following pattern:
1350 iftmp.1_3 = &obj_2(D)->D.1762;
1353 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1354 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1358 compute_complex_ancestor_jump_func (struct ipa_func_body_info
*fbi
,
1359 struct ipa_node_params
*info
,
1360 struct ipa_jump_func
*jfunc
,
1361 gcall
*call
, gphi
*phi
)
1363 HOST_WIDE_INT offset
;
1364 gimple
*assign
, *cond
;
1365 basic_block phi_bb
, assign_bb
, cond_bb
;
1366 tree tmp
, parm
, expr
, obj
;
1369 if (gimple_phi_num_args (phi
) != 2)
1372 if (integer_zerop (PHI_ARG_DEF (phi
, 1)))
1373 tmp
= PHI_ARG_DEF (phi
, 0);
1374 else if (integer_zerop (PHI_ARG_DEF (phi
, 0)))
1375 tmp
= PHI_ARG_DEF (phi
, 1);
1378 if (TREE_CODE (tmp
) != SSA_NAME
1379 || SSA_NAME_IS_DEFAULT_DEF (tmp
)
1380 || !POINTER_TYPE_P (TREE_TYPE (tmp
))
1381 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp
))) != RECORD_TYPE
)
1384 assign
= SSA_NAME_DEF_STMT (tmp
);
1385 assign_bb
= gimple_bb (assign
);
1386 if (!single_pred_p (assign_bb
))
1388 expr
= get_ancestor_addr_info (assign
, &obj
, &offset
);
1391 parm
= TREE_OPERAND (expr
, 0);
1392 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (parm
));
1396 cond_bb
= single_pred (assign_bb
);
1397 cond
= last_stmt (cond_bb
);
1399 || gimple_code (cond
) != GIMPLE_COND
1400 || gimple_cond_code (cond
) != NE_EXPR
1401 || gimple_cond_lhs (cond
) != parm
1402 || !integer_zerop (gimple_cond_rhs (cond
)))
1405 phi_bb
= gimple_bb (phi
);
1406 for (i
= 0; i
< 2; i
++)
1408 basic_block pred
= EDGE_PRED (phi_bb
, i
)->src
;
1409 if (pred
!= assign_bb
&& pred
!= cond_bb
)
1413 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1414 parm_ref_data_pass_through_p (fbi
, index
, call
, parm
));
1417 /* Inspect the given TYPE and return true iff it has the same structure (the
1418 same number of fields of the same types) as a C++ member pointer. If
1419 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1420 corresponding fields there. */
1423 type_like_member_ptr_p (tree type
, tree
*method_ptr
, tree
*delta
)
1427 if (TREE_CODE (type
) != RECORD_TYPE
)
1430 fld
= TYPE_FIELDS (type
);
1431 if (!fld
|| !POINTER_TYPE_P (TREE_TYPE (fld
))
1432 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld
))) != METHOD_TYPE
1433 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1439 fld
= DECL_CHAIN (fld
);
1440 if (!fld
|| INTEGRAL_TYPE_P (fld
)
1441 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1446 if (DECL_CHAIN (fld
))
1452 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1453 return the rhs of its defining statement. Otherwise return RHS as it
1457 get_ssa_def_if_simple_copy (tree rhs
)
1459 while (TREE_CODE (rhs
) == SSA_NAME
&& !SSA_NAME_IS_DEFAULT_DEF (rhs
))
1461 gimple
*def_stmt
= SSA_NAME_DEF_STMT (rhs
);
1463 if (gimple_assign_single_p (def_stmt
))
1464 rhs
= gimple_assign_rhs1 (def_stmt
);
1471 /* Simple linked list, describing known contents of an aggregate beforere
1474 struct ipa_known_agg_contents_list
1476 /* Offset and size of the described part of the aggregate. */
1477 HOST_WIDE_INT offset
, size
;
1478 /* Known constant value or NULL if the contents is known to be unknown. */
1480 /* Pointer to the next structure in the list. */
1481 struct ipa_known_agg_contents_list
*next
;
1484 /* Find the proper place in linked list of ipa_known_agg_contents_list
1485 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1486 unless there is a partial overlap, in which case return NULL, or such
1487 element is already there, in which case set *ALREADY_THERE to true. */
1489 static struct ipa_known_agg_contents_list
**
1490 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list
**list
,
1491 HOST_WIDE_INT lhs_offset
,
1492 HOST_WIDE_INT lhs_size
,
1493 bool *already_there
)
1495 struct ipa_known_agg_contents_list
**p
= list
;
1496 while (*p
&& (*p
)->offset
< lhs_offset
)
1498 if ((*p
)->offset
+ (*p
)->size
> lhs_offset
)
1503 if (*p
&& (*p
)->offset
< lhs_offset
+ lhs_size
)
1505 if ((*p
)->offset
== lhs_offset
&& (*p
)->size
== lhs_size
)
1506 /* We already know this value is subsequently overwritten with
1508 *already_there
= true;
1510 /* Otherwise this is a partial overlap which we cannot
1517 /* Build aggregate jump function from LIST, assuming there are exactly
1518 CONST_COUNT constant entries there and that th offset of the passed argument
1519 is ARG_OFFSET and store it into JFUNC. */
1522 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list
*list
,
1523 int const_count
, HOST_WIDE_INT arg_offset
,
1524 struct ipa_jump_func
*jfunc
)
1526 vec_alloc (jfunc
->agg
.items
, const_count
);
1531 struct ipa_agg_jf_item item
;
1532 item
.offset
= list
->offset
- arg_offset
;
1533 gcc_assert ((item
.offset
% BITS_PER_UNIT
) == 0);
1534 item
.value
= unshare_expr_without_location (list
->constant
);
1535 jfunc
->agg
.items
->quick_push (item
);
1541 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1542 in ARG is filled in with constant values. ARG can either be an aggregate
1543 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1544 aggregate. JFUNC is the jump function into which the constants are
1545 subsequently stored. */
1548 determine_locally_known_aggregate_parts (gcall
*call
, tree arg
,
1550 struct ipa_jump_func
*jfunc
)
1552 struct ipa_known_agg_contents_list
*list
= NULL
;
1553 int item_count
= 0, const_count
= 0;
1554 HOST_WIDE_INT arg_offset
, arg_size
;
1555 gimple_stmt_iterator gsi
;
1557 bool check_ref
, by_ref
;
1560 if (PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
) == 0)
1563 /* The function operates in three stages. First, we prepare check_ref, r,
1564 arg_base and arg_offset based on what is actually passed as an actual
1567 if (POINTER_TYPE_P (arg_type
))
1570 if (TREE_CODE (arg
) == SSA_NAME
)
1573 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type
))))
1578 type_size
= TYPE_SIZE (TREE_TYPE (arg_type
));
1579 arg_size
= tree_to_uhwi (type_size
);
1580 ao_ref_init_from_ptr_and_size (&r
, arg_base
, NULL_TREE
);
1582 else if (TREE_CODE (arg
) == ADDR_EXPR
)
1584 HOST_WIDE_INT arg_max_size
;
1587 arg
= TREE_OPERAND (arg
, 0);
1588 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1589 &arg_max_size
, &reverse
);
1590 if (arg_max_size
== -1
1591 || arg_max_size
!= arg_size
1594 if (DECL_P (arg_base
))
1597 ao_ref_init (&r
, arg_base
);
1607 HOST_WIDE_INT arg_max_size
;
1610 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg
)));
1614 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1615 &arg_max_size
, &reverse
);
1616 if (arg_max_size
== -1
1617 || arg_max_size
!= arg_size
1621 ao_ref_init (&r
, arg
);
1624 /* Second stage walks back the BB, looks at individual statements and as long
1625 as it is confident of how the statements affect contents of the
1626 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1628 gsi
= gsi_for_stmt (call
);
1630 for (; !gsi_end_p (gsi
); gsi_prev (&gsi
))
1632 struct ipa_known_agg_contents_list
*n
, **p
;
1633 gimple
*stmt
= gsi_stmt (gsi
);
1634 HOST_WIDE_INT lhs_offset
, lhs_size
, lhs_max_size
;
1635 tree lhs
, rhs
, lhs_base
;
1638 if (!stmt_may_clobber_ref_p_1 (stmt
, &r
))
1640 if (!gimple_assign_single_p (stmt
))
1643 lhs
= gimple_assign_lhs (stmt
);
1644 rhs
= gimple_assign_rhs1 (stmt
);
1645 if (!is_gimple_reg_type (TREE_TYPE (rhs
))
1646 || TREE_CODE (lhs
) == BIT_FIELD_REF
1647 || contains_bitfld_component_ref_p (lhs
))
1650 lhs_base
= get_ref_base_and_extent (lhs
, &lhs_offset
, &lhs_size
,
1651 &lhs_max_size
, &reverse
);
1652 if (lhs_max_size
== -1
1653 || lhs_max_size
!= lhs_size
)
1658 if (TREE_CODE (lhs_base
) != MEM_REF
1659 || TREE_OPERAND (lhs_base
, 0) != arg_base
1660 || !integer_zerop (TREE_OPERAND (lhs_base
, 1)))
1663 else if (lhs_base
!= arg_base
)
1665 if (DECL_P (lhs_base
))
1671 bool already_there
= false;
1672 p
= get_place_in_agg_contents_list (&list
, lhs_offset
, lhs_size
,
1679 rhs
= get_ssa_def_if_simple_copy (rhs
);
1680 n
= XALLOCA (struct ipa_known_agg_contents_list
);
1682 n
->offset
= lhs_offset
;
1683 if (is_gimple_ip_invariant (rhs
))
1689 n
->constant
= NULL_TREE
;
1694 if (const_count
== PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
)
1695 || item_count
== 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
))
1699 /* Third stage just goes over the list and creates an appropriate vector of
1700 ipa_agg_jf_item structures out of it, of sourse only if there are
1701 any known constants to begin with. */
1705 jfunc
->agg
.by_ref
= by_ref
;
1706 build_agg_jump_func_from_list (list
, const_count
, arg_offset
, jfunc
);
1710 /* Return the Ith param type of callee associated with call graph
1714 ipa_get_callee_param_type (struct cgraph_edge
*e
, int i
)
1717 tree type
= (e
->callee
1718 ? TREE_TYPE (e
->callee
->decl
)
1719 : gimple_call_fntype (e
->call_stmt
));
1720 tree t
= TYPE_ARG_TYPES (type
);
1722 for (n
= 0; n
< i
; n
++)
1729 return TREE_VALUE (t
);
1732 t
= DECL_ARGUMENTS (e
->callee
->decl
);
1733 for (n
= 0; n
< i
; n
++)
1740 return TREE_TYPE (t
);
1744 /* Return ipa_bits with VALUE and MASK values, which can be either a newly
1745 allocated structure or a previously existing one shared with other jump
1746 functions and/or transformation summaries. */
1749 ipa_get_ipa_bits_for_value (const widest_int
&value
, const widest_int
&mask
)
1755 ipa_bits
**slot
= ipa_bits_hash_table
->find_slot (&tmp
, INSERT
);
1759 ipa_bits
*res
= ggc_alloc
<ipa_bits
> ();
1767 /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
1768 table in order to avoid creating multiple same ipa_bits structures. */
1771 ipa_set_jfunc_bits (ipa_jump_func
*jf
, const widest_int
&value
,
1772 const widest_int
&mask
)
1774 jf
->bits
= ipa_get_ipa_bits_for_value (value
, mask
);
1777 /* Return a pointer to a value_range just like *TMP, but either find it in
1778 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
1780 static value_range
*
1781 ipa_get_value_range (value_range
*tmp
)
1783 value_range
**slot
= ipa_vr_hash_table
->find_slot (tmp
, INSERT
);
1787 value_range
*vr
= ggc_alloc
<value_range
> ();
1794 /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
1795 equiv set. Use hash table in order to avoid creating multiple same copies of
1798 static value_range
*
1799 ipa_get_value_range (enum value_range_type type
, tree min
, tree max
)
1806 return ipa_get_value_range (&tmp
);
1809 /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
1810 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
1811 same value_range structures. */
1814 ipa_set_jfunc_vr (ipa_jump_func
*jf
, enum value_range_type type
,
1817 jf
->m_vr
= ipa_get_value_range (type
, min
, max
);
1820 /* Assign to JF a pointer to a value_range just liek TMP but either fetch a
1821 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
1824 ipa_set_jfunc_vr (ipa_jump_func
*jf
, value_range
*tmp
)
1826 jf
->m_vr
= ipa_get_value_range (tmp
);
1829 /* Compute jump function for all arguments of callsite CS and insert the
1830 information in the jump_functions array in the ipa_edge_args corresponding
1831 to this callsite. */
1834 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info
*fbi
,
1835 struct cgraph_edge
*cs
)
1837 struct ipa_node_params
*info
= IPA_NODE_REF (cs
->caller
);
1838 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
1839 gcall
*call
= cs
->call_stmt
;
1840 int n
, arg_num
= gimple_call_num_args (call
);
1841 bool useful_context
= false;
1843 if (arg_num
== 0 || args
->jump_functions
)
1845 vec_safe_grow_cleared (args
->jump_functions
, arg_num
);
1846 if (flag_devirtualize
)
1847 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, arg_num
);
1849 if (gimple_call_internal_p (call
))
1851 if (ipa_func_spec_opts_forbid_analysis_p (cs
->caller
))
1854 for (n
= 0; n
< arg_num
; n
++)
1856 struct ipa_jump_func
*jfunc
= ipa_get_ith_jump_func (args
, n
);
1857 tree arg
= gimple_call_arg (call
, n
);
1858 tree param_type
= ipa_get_callee_param_type (cs
, n
);
1859 if (flag_devirtualize
&& POINTER_TYPE_P (TREE_TYPE (arg
)))
1862 struct ipa_polymorphic_call_context
context (cs
->caller
->decl
,
1865 context
.get_dynamic_type (instance
, arg
, NULL
, cs
->call_stmt
);
1866 *ipa_get_ith_polymorhic_call_context (args
, n
) = context
;
1867 if (!context
.useless_p ())
1868 useful_context
= true;
1871 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
1873 bool addr_nonzero
= false;
1874 bool strict_overflow
= false;
1876 if (TREE_CODE (arg
) == SSA_NAME
1878 && get_ptr_nonnull (arg
))
1879 addr_nonzero
= true;
1880 else if (tree_single_nonzero_warnv_p (arg
, &strict_overflow
))
1881 addr_nonzero
= true;
1885 tree z
= build_int_cst (TREE_TYPE (arg
), 0);
1886 ipa_set_jfunc_vr (jfunc
, VR_ANTI_RANGE
, z
, z
);
1889 gcc_assert (!jfunc
->m_vr
);
1894 value_range_type type
;
1895 if (TREE_CODE (arg
) == SSA_NAME
1897 && (type
= get_range_info (arg
, &min
, &max
))
1898 && (type
== VR_RANGE
|| type
== VR_ANTI_RANGE
))
1900 value_range tmpvr
,resvr
;
1903 tmpvr
.min
= wide_int_to_tree (TREE_TYPE (arg
), min
);
1904 tmpvr
.max
= wide_int_to_tree (TREE_TYPE (arg
), max
);
1906 memset (&resvr
, 0, sizeof (resvr
));
1907 extract_range_from_unary_expr (&resvr
, NOP_EXPR
, param_type
,
1908 &tmpvr
, TREE_TYPE (arg
));
1909 if (resvr
.type
== VR_RANGE
|| resvr
.type
== VR_ANTI_RANGE
)
1910 ipa_set_jfunc_vr (jfunc
, &resvr
);
1912 gcc_assert (!jfunc
->m_vr
);
1915 gcc_assert (!jfunc
->m_vr
);
1918 if (INTEGRAL_TYPE_P (TREE_TYPE (arg
))
1919 && (TREE_CODE (arg
) == SSA_NAME
|| TREE_CODE (arg
) == INTEGER_CST
))
1921 if (TREE_CODE (arg
) == SSA_NAME
)
1922 ipa_set_jfunc_bits (jfunc
, 0,
1923 widest_int::from (get_nonzero_bits (arg
),
1924 TYPE_SIGN (TREE_TYPE (arg
))));
1926 ipa_set_jfunc_bits (jfunc
, wi::to_widest (arg
), 0);
1928 else if (POINTER_TYPE_P (TREE_TYPE (arg
)))
1930 unsigned HOST_WIDE_INT bitpos
;
1933 get_pointer_alignment_1 (arg
, &align
, &bitpos
);
1934 widest_int mask
= wi::bit_and_not
1935 (wi::mask
<widest_int
> (TYPE_PRECISION (TREE_TYPE (arg
)), false),
1936 align
/ BITS_PER_UNIT
- 1);
1937 widest_int value
= bitpos
/ BITS_PER_UNIT
;
1938 ipa_set_jfunc_bits (jfunc
, value
, mask
);
1941 gcc_assert (!jfunc
->bits
);
1943 if (is_gimple_ip_invariant (arg
)
1945 && is_global_var (arg
)
1946 && TREE_READONLY (arg
)))
1947 ipa_set_jf_constant (jfunc
, arg
, cs
);
1948 else if (!is_gimple_reg_type (TREE_TYPE (arg
))
1949 && TREE_CODE (arg
) == PARM_DECL
)
1951 int index
= ipa_get_param_decl_index (info
, arg
);
1953 gcc_assert (index
>=0);
1954 /* Aggregate passed by value, check for pass-through, otherwise we
1955 will attempt to fill in aggregate contents later in this
1957 if (parm_preserved_before_stmt_p (fbi
, index
, call
, arg
))
1959 ipa_set_jf_simple_pass_through (jfunc
, index
, false);
1963 else if (TREE_CODE (arg
) == SSA_NAME
)
1965 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
1967 int index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (arg
));
1971 agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
, arg
);
1972 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1977 gimple
*stmt
= SSA_NAME_DEF_STMT (arg
);
1978 if (is_gimple_assign (stmt
))
1979 compute_complex_assign_jump_func (fbi
, info
, jfunc
,
1980 call
, stmt
, arg
, param_type
);
1981 else if (gimple_code (stmt
) == GIMPLE_PHI
)
1982 compute_complex_ancestor_jump_func (fbi
, info
, jfunc
,
1984 as_a
<gphi
*> (stmt
));
1988 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1989 passed (because type conversions are ignored in gimple). Usually we can
1990 safely get type from function declaration, but in case of K&R prototypes or
1991 variadic functions we can try our luck with type of the pointer passed.
1992 TODO: Since we look for actual initialization of the memory object, we may better
1993 work out the type based on the memory stores we find. */
1995 param_type
= TREE_TYPE (arg
);
1997 if ((jfunc
->type
!= IPA_JF_PASS_THROUGH
1998 || !ipa_get_jf_pass_through_agg_preserved (jfunc
))
1999 && (jfunc
->type
!= IPA_JF_ANCESTOR
2000 || !ipa_get_jf_ancestor_agg_preserved (jfunc
))
2001 && (AGGREGATE_TYPE_P (TREE_TYPE (arg
))
2002 || POINTER_TYPE_P (param_type
)))
2003 determine_locally_known_aggregate_parts (call
, arg
, param_type
, jfunc
);
2005 if (!useful_context
)
2006 vec_free (args
->polymorphic_call_contexts
);
2009 /* Compute jump functions for all edges - both direct and indirect - outgoing
2013 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info
*fbi
, basic_block bb
)
2015 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
2017 struct cgraph_edge
*cs
;
2019 FOR_EACH_VEC_ELT_REVERSE (bi
->cg_edges
, i
, cs
)
2021 struct cgraph_node
*callee
= cs
->callee
;
2025 callee
->ultimate_alias_target ();
2026 /* We do not need to bother analyzing calls to unknown functions
2027 unless they may become known during lto/whopr. */
2028 if (!callee
->definition
&& !flag_lto
)
2031 ipa_compute_jump_functions_for_edge (fbi
, cs
);
2035 /* If STMT looks like a statement loading a value from a member pointer formal
2036 parameter, return that parameter and store the offset of the field to
2037 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2038 might be clobbered). If USE_DELTA, then we look for a use of the delta
2039 field rather than the pfn. */
2042 ipa_get_stmt_member_ptr_load_param (gimple
*stmt
, bool use_delta
,
2043 HOST_WIDE_INT
*offset_p
)
2045 tree rhs
, rec
, ref_field
, ref_offset
, fld
, ptr_field
, delta_field
;
2047 if (!gimple_assign_single_p (stmt
))
2050 rhs
= gimple_assign_rhs1 (stmt
);
2051 if (TREE_CODE (rhs
) == COMPONENT_REF
)
2053 ref_field
= TREE_OPERAND (rhs
, 1);
2054 rhs
= TREE_OPERAND (rhs
, 0);
2057 ref_field
= NULL_TREE
;
2058 if (TREE_CODE (rhs
) != MEM_REF
)
2060 rec
= TREE_OPERAND (rhs
, 0);
2061 if (TREE_CODE (rec
) != ADDR_EXPR
)
2063 rec
= TREE_OPERAND (rec
, 0);
2064 if (TREE_CODE (rec
) != PARM_DECL
2065 || !type_like_member_ptr_p (TREE_TYPE (rec
), &ptr_field
, &delta_field
))
2067 ref_offset
= TREE_OPERAND (rhs
, 1);
2074 *offset_p
= int_bit_position (fld
);
2078 if (integer_nonzerop (ref_offset
))
2080 return ref_field
== fld
? rec
: NULL_TREE
;
2083 return tree_int_cst_equal (byte_position (fld
), ref_offset
) ? rec
2087 /* Returns true iff T is an SSA_NAME defined by a statement. */
2090 ipa_is_ssa_with_stmt_def (tree t
)
2092 if (TREE_CODE (t
) == SSA_NAME
2093 && !SSA_NAME_IS_DEFAULT_DEF (t
))
2099 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2100 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2101 indirect call graph edge. */
2103 static struct cgraph_edge
*
2104 ipa_note_param_call (struct cgraph_node
*node
, int param_index
,
2107 struct cgraph_edge
*cs
;
2109 cs
= node
->get_edge (stmt
);
2110 cs
->indirect_info
->param_index
= param_index
;
2111 cs
->indirect_info
->agg_contents
= 0;
2112 cs
->indirect_info
->member_ptr
= 0;
2113 cs
->indirect_info
->guaranteed_unmodified
= 0;
2117 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2118 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2119 intermediate information about each formal parameter. Currently it checks
2120 whether the call calls a pointer that is a formal parameter and if so, the
2121 parameter is marked with the called flag and an indirect call graph edge
2122 describing the call is created. This is very simple for ordinary pointers
2123 represented in SSA but not-so-nice when it comes to member pointers. The
2124 ugly part of this function does nothing more than trying to match the
2125 pattern of such a call. An example of such a pattern is the gimple dump
2126 below, the call is on the last line:
2129 f$__delta_5 = f.__delta;
2130 f$__pfn_24 = f.__pfn;
2134 f$__delta_5 = MEM[(struct *)&f];
2135 f$__pfn_24 = MEM[(struct *)&f + 4B];
2137 and a few lines below:
2140 D.2496_3 = (int) f$__pfn_24;
2141 D.2497_4 = D.2496_3 & 1;
2148 D.2500_7 = (unsigned int) f$__delta_5;
2149 D.2501_8 = &S + D.2500_7;
2150 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2151 D.2503_10 = *D.2502_9;
2152 D.2504_12 = f$__pfn_24 + -1;
2153 D.2505_13 = (unsigned int) D.2504_12;
2154 D.2506_14 = D.2503_10 + D.2505_13;
2155 D.2507_15 = *D.2506_14;
2156 iftmp.11_16 = (String:: *) D.2507_15;
2159 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2160 D.2500_19 = (unsigned int) f$__delta_5;
2161 D.2508_20 = &S + D.2500_19;
2162 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2164 Such patterns are results of simple calls to a member pointer:
2166 int doprinting (int (MyString::* f)(int) const)
2168 MyString S ("somestring");
2173 Moreover, the function also looks for called pointers loaded from aggregates
2174 passed by value or reference. */
2177 ipa_analyze_indirect_call_uses (struct ipa_func_body_info
*fbi
, gcall
*call
,
2180 struct ipa_node_params
*info
= fbi
->info
;
2181 HOST_WIDE_INT offset
;
2184 if (SSA_NAME_IS_DEFAULT_DEF (target
))
2186 tree var
= SSA_NAME_VAR (target
);
2187 int index
= ipa_get_param_decl_index (info
, var
);
2189 ipa_note_param_call (fbi
->node
, index
, call
);
2194 gimple
*def
= SSA_NAME_DEF_STMT (target
);
2195 bool guaranteed_unmodified
;
2196 if (gimple_assign_single_p (def
)
2197 && ipa_load_from_parm_agg (fbi
, info
->descriptors
, def
,
2198 gimple_assign_rhs1 (def
), &index
, &offset
,
2199 NULL
, &by_ref
, &guaranteed_unmodified
))
2201 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2202 cs
->indirect_info
->offset
= offset
;
2203 cs
->indirect_info
->agg_contents
= 1;
2204 cs
->indirect_info
->by_ref
= by_ref
;
2205 cs
->indirect_info
->guaranteed_unmodified
= guaranteed_unmodified
;
2209 /* Now we need to try to match the complex pattern of calling a member
2211 if (gimple_code (def
) != GIMPLE_PHI
2212 || gimple_phi_num_args (def
) != 2
2213 || !POINTER_TYPE_P (TREE_TYPE (target
))
2214 || TREE_CODE (TREE_TYPE (TREE_TYPE (target
))) != METHOD_TYPE
)
2217 /* First, we need to check whether one of these is a load from a member
2218 pointer that is a parameter to this function. */
2219 tree n1
= PHI_ARG_DEF (def
, 0);
2220 tree n2
= PHI_ARG_DEF (def
, 1);
2221 if (!ipa_is_ssa_with_stmt_def (n1
) || !ipa_is_ssa_with_stmt_def (n2
))
2223 gimple
*d1
= SSA_NAME_DEF_STMT (n1
);
2224 gimple
*d2
= SSA_NAME_DEF_STMT (n2
);
2227 basic_block bb
, virt_bb
;
2228 basic_block join
= gimple_bb (def
);
2229 if ((rec
= ipa_get_stmt_member_ptr_load_param (d1
, false, &offset
)))
2231 if (ipa_get_stmt_member_ptr_load_param (d2
, false, NULL
))
2234 bb
= EDGE_PRED (join
, 0)->src
;
2235 virt_bb
= gimple_bb (d2
);
2237 else if ((rec
= ipa_get_stmt_member_ptr_load_param (d2
, false, &offset
)))
2239 bb
= EDGE_PRED (join
, 1)->src
;
2240 virt_bb
= gimple_bb (d1
);
2245 /* Second, we need to check that the basic blocks are laid out in the way
2246 corresponding to the pattern. */
2248 if (!single_pred_p (virt_bb
) || !single_succ_p (virt_bb
)
2249 || single_pred (virt_bb
) != bb
2250 || single_succ (virt_bb
) != join
)
2253 /* Third, let's see that the branching is done depending on the least
2254 significant bit of the pfn. */
2256 gimple
*branch
= last_stmt (bb
);
2257 if (!branch
|| gimple_code (branch
) != GIMPLE_COND
)
2260 if ((gimple_cond_code (branch
) != NE_EXPR
2261 && gimple_cond_code (branch
) != EQ_EXPR
)
2262 || !integer_zerop (gimple_cond_rhs (branch
)))
2265 tree cond
= gimple_cond_lhs (branch
);
2266 if (!ipa_is_ssa_with_stmt_def (cond
))
2269 def
= SSA_NAME_DEF_STMT (cond
);
2270 if (!is_gimple_assign (def
)
2271 || gimple_assign_rhs_code (def
) != BIT_AND_EXPR
2272 || !integer_onep (gimple_assign_rhs2 (def
)))
2275 cond
= gimple_assign_rhs1 (def
);
2276 if (!ipa_is_ssa_with_stmt_def (cond
))
2279 def
= SSA_NAME_DEF_STMT (cond
);
2281 if (is_gimple_assign (def
)
2282 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
2284 cond
= gimple_assign_rhs1 (def
);
2285 if (!ipa_is_ssa_with_stmt_def (cond
))
2287 def
= SSA_NAME_DEF_STMT (cond
);
2291 rec2
= ipa_get_stmt_member_ptr_load_param (def
,
2292 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2293 == ptrmemfunc_vbit_in_delta
),
2298 index
= ipa_get_param_decl_index (info
, rec
);
2300 && parm_preserved_before_stmt_p (fbi
, index
, call
, rec
))
2302 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2303 cs
->indirect_info
->offset
= offset
;
2304 cs
->indirect_info
->agg_contents
= 1;
2305 cs
->indirect_info
->member_ptr
= 1;
2306 cs
->indirect_info
->guaranteed_unmodified
= 1;
2312 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2313 object referenced in the expression is a formal parameter of the caller
2314 FBI->node (described by FBI->info), create a call note for the
2318 ipa_analyze_virtual_call_uses (struct ipa_func_body_info
*fbi
,
2319 gcall
*call
, tree target
)
2321 tree obj
= OBJ_TYPE_REF_OBJECT (target
);
2323 HOST_WIDE_INT anc_offset
;
2325 if (!flag_devirtualize
)
2328 if (TREE_CODE (obj
) != SSA_NAME
)
2331 struct ipa_node_params
*info
= fbi
->info
;
2332 if (SSA_NAME_IS_DEFAULT_DEF (obj
))
2334 struct ipa_jump_func jfunc
;
2335 if (TREE_CODE (SSA_NAME_VAR (obj
)) != PARM_DECL
)
2339 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (obj
));
2340 gcc_assert (index
>= 0);
2341 if (detect_type_change_ssa (obj
, obj_type_ref_class (target
),
2347 struct ipa_jump_func jfunc
;
2348 gimple
*stmt
= SSA_NAME_DEF_STMT (obj
);
2351 expr
= get_ancestor_addr_info (stmt
, &obj
, &anc_offset
);
2354 index
= ipa_get_param_decl_index (info
,
2355 SSA_NAME_VAR (TREE_OPERAND (expr
, 0)));
2356 gcc_assert (index
>= 0);
2357 if (detect_type_change (obj
, expr
, obj_type_ref_class (target
),
2358 call
, &jfunc
, anc_offset
))
2362 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2363 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
2364 ii
->offset
= anc_offset
;
2365 ii
->otr_token
= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target
));
2366 ii
->otr_type
= obj_type_ref_class (target
);
2367 ii
->polymorphic
= 1;
2370 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2371 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2372 containing intermediate information about each formal parameter. */
2375 ipa_analyze_call_uses (struct ipa_func_body_info
*fbi
, gcall
*call
)
2377 tree target
= gimple_call_fn (call
);
2380 || (TREE_CODE (target
) != SSA_NAME
2381 && !virtual_method_call_p (target
)))
2384 struct cgraph_edge
*cs
= fbi
->node
->get_edge (call
);
2385 /* If we previously turned the call into a direct call, there is
2386 no need to analyze. */
2387 if (cs
&& !cs
->indirect_unknown_callee
)
2390 if (cs
->indirect_info
->polymorphic
&& flag_devirtualize
)
2393 tree target
= gimple_call_fn (call
);
2394 ipa_polymorphic_call_context
context (current_function_decl
,
2395 target
, call
, &instance
);
2397 gcc_checking_assert (cs
->indirect_info
->otr_type
2398 == obj_type_ref_class (target
));
2399 gcc_checking_assert (cs
->indirect_info
->otr_token
2400 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target
)));
2402 cs
->indirect_info
->vptr_changed
2403 = !context
.get_dynamic_type (instance
,
2404 OBJ_TYPE_REF_OBJECT (target
),
2405 obj_type_ref_class (target
), call
);
2406 cs
->indirect_info
->context
= context
;
2409 if (TREE_CODE (target
) == SSA_NAME
)
2410 ipa_analyze_indirect_call_uses (fbi
, call
, target
);
2411 else if (virtual_method_call_p (target
))
2412 ipa_analyze_virtual_call_uses (fbi
, call
, target
);
2416 /* Analyze the call statement STMT with respect to formal parameters (described
2417 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2418 formal parameters are called. */
2421 ipa_analyze_stmt_uses (struct ipa_func_body_info
*fbi
, gimple
*stmt
)
2423 if (is_gimple_call (stmt
))
2424 ipa_analyze_call_uses (fbi
, as_a
<gcall
*> (stmt
));
2427 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2428 If OP is a parameter declaration, mark it as used in the info structure
2432 visit_ref_for_mod_analysis (gimple
*, tree op
, tree
, void *data
)
2434 struct ipa_node_params
*info
= (struct ipa_node_params
*) data
;
2436 op
= get_base_address (op
);
2438 && TREE_CODE (op
) == PARM_DECL
)
2440 int index
= ipa_get_param_decl_index (info
, op
);
2441 gcc_assert (index
>= 0);
2442 ipa_set_param_used (info
, index
, true);
2448 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2449 the findings in various structures of the associated ipa_node_params
2450 structure, such as parameter flags, notes etc. FBI holds various data about
2451 the function being analyzed. */
2454 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info
*fbi
, basic_block bb
)
2456 gimple_stmt_iterator gsi
;
2457 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2459 gimple
*stmt
= gsi_stmt (gsi
);
2461 if (is_gimple_debug (stmt
))
2464 ipa_analyze_stmt_uses (fbi
, stmt
);
2465 walk_stmt_load_store_addr_ops (stmt
, fbi
->info
,
2466 visit_ref_for_mod_analysis
,
2467 visit_ref_for_mod_analysis
,
2468 visit_ref_for_mod_analysis
);
2470 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2471 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), fbi
->info
,
2472 visit_ref_for_mod_analysis
,
2473 visit_ref_for_mod_analysis
,
2474 visit_ref_for_mod_analysis
);
2477 /* Calculate controlled uses of parameters of NODE. */
2480 ipa_analyze_controlled_uses (struct cgraph_node
*node
)
2482 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
2484 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2486 tree parm
= ipa_get_param (info
, i
);
2487 int controlled_uses
= 0;
2489 /* For SSA regs see if parameter is used. For non-SSA we compute
2490 the flag during modification analysis. */
2491 if (is_gimple_reg (parm
))
2493 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
),
2495 if (ddef
&& !has_zero_uses (ddef
))
2497 imm_use_iterator imm_iter
;
2498 use_operand_p use_p
;
2500 ipa_set_param_used (info
, i
, true);
2501 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, ddef
)
2502 if (!is_gimple_call (USE_STMT (use_p
)))
2504 if (!is_gimple_debug (USE_STMT (use_p
)))
2506 controlled_uses
= IPA_UNDESCRIBED_USE
;
2514 controlled_uses
= 0;
2517 controlled_uses
= IPA_UNDESCRIBED_USE
;
2518 ipa_set_controlled_uses (info
, i
, controlled_uses
);
2522 /* Free stuff in BI. */
2525 free_ipa_bb_info (struct ipa_bb_info
*bi
)
2527 bi
->cg_edges
.release ();
2528 bi
->param_aa_statuses
.release ();
2531 /* Dominator walker driving the analysis. */
2533 class analysis_dom_walker
: public dom_walker
2536 analysis_dom_walker (struct ipa_func_body_info
*fbi
)
2537 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
) {}
2539 virtual edge
before_dom_children (basic_block
);
2542 struct ipa_func_body_info
*m_fbi
;
2546 analysis_dom_walker::before_dom_children (basic_block bb
)
2548 ipa_analyze_params_uses_in_bb (m_fbi
, bb
);
2549 ipa_compute_jump_functions_for_bb (m_fbi
, bb
);
2553 /* Release body info FBI. */
2556 ipa_release_body_info (struct ipa_func_body_info
*fbi
)
2559 struct ipa_bb_info
*bi
;
2561 FOR_EACH_VEC_ELT (fbi
->bb_infos
, i
, bi
)
2562 free_ipa_bb_info (bi
);
2563 fbi
->bb_infos
.release ();
2566 /* Initialize the array describing properties of formal parameters
2567 of NODE, analyze their uses and compute jump functions associated
2568 with actual arguments of calls from within NODE. */
2571 ipa_analyze_node (struct cgraph_node
*node
)
2573 struct ipa_func_body_info fbi
;
2574 struct ipa_node_params
*info
;
2576 ipa_check_create_node_params ();
2577 ipa_check_create_edge_args ();
2578 info
= IPA_NODE_REF (node
);
2580 if (info
->analysis_done
)
2582 info
->analysis_done
= 1;
2584 if (ipa_func_spec_opts_forbid_analysis_p (node
))
2586 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2588 ipa_set_param_used (info
, i
, true);
2589 ipa_set_controlled_uses (info
, i
, IPA_UNDESCRIBED_USE
);
2594 struct function
*func
= DECL_STRUCT_FUNCTION (node
->decl
);
2596 calculate_dominance_info (CDI_DOMINATORS
);
2597 ipa_initialize_node_params (node
);
2598 ipa_analyze_controlled_uses (node
);
2601 fbi
.info
= IPA_NODE_REF (node
);
2602 fbi
.bb_infos
= vNULL
;
2603 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
));
2604 fbi
.param_count
= ipa_get_param_count (info
);
2607 for (struct cgraph_edge
*cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
2609 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
2610 bi
->cg_edges
.safe_push (cs
);
2613 for (struct cgraph_edge
*cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
2615 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
2616 bi
->cg_edges
.safe_push (cs
);
2619 analysis_dom_walker (&fbi
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
2621 ipa_release_body_info (&fbi
);
2622 free_dominance_info (CDI_DOMINATORS
);
2626 /* Update the jump functions associated with call graph edge E when the call
2627 graph edge CS is being inlined, assuming that E->caller is already (possibly
2628 indirectly) inlined into CS->callee and that E has not been inlined. */
2631 update_jump_functions_after_inlining (struct cgraph_edge
*cs
,
2632 struct cgraph_edge
*e
)
2634 struct ipa_edge_args
*top
= IPA_EDGE_REF (cs
);
2635 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
2636 int count
= ipa_get_cs_argument_count (args
);
2639 for (i
= 0; i
< count
; i
++)
2641 struct ipa_jump_func
*dst
= ipa_get_ith_jump_func (args
, i
);
2642 struct ipa_polymorphic_call_context
*dst_ctx
2643 = ipa_get_ith_polymorhic_call_context (args
, i
);
2645 if (dst
->type
== IPA_JF_ANCESTOR
)
2647 struct ipa_jump_func
*src
;
2648 int dst_fid
= dst
->value
.ancestor
.formal_id
;
2649 struct ipa_polymorphic_call_context
*src_ctx
2650 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
2652 /* Variable number of arguments can cause havoc if we try to access
2653 one that does not exist in the inlined edge. So make sure we
2655 if (dst_fid
>= ipa_get_cs_argument_count (top
))
2657 ipa_set_jf_unknown (dst
);
2661 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2663 if (src_ctx
&& !src_ctx
->useless_p ())
2665 struct ipa_polymorphic_call_context ctx
= *src_ctx
;
2667 /* TODO: Make type preserved safe WRT contexts. */
2668 if (!ipa_get_jf_ancestor_type_preserved (dst
))
2669 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
2670 ctx
.offset_by (dst
->value
.ancestor
.offset
);
2671 if (!ctx
.useless_p ())
2675 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
2677 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
2680 dst_ctx
->combine_with (ctx
);
2685 && (dst
->value
.ancestor
.agg_preserved
|| !src
->agg
.by_ref
))
2687 struct ipa_agg_jf_item
*item
;
2690 /* Currently we do not produce clobber aggregate jump functions,
2691 replace with merging when we do. */
2692 gcc_assert (!dst
->agg
.items
);
2694 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2695 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2696 FOR_EACH_VEC_SAFE_ELT (dst
->agg
.items
, j
, item
)
2697 item
->offset
-= dst
->value
.ancestor
.offset
;
2700 if (src
->type
== IPA_JF_PASS_THROUGH
2701 && src
->value
.pass_through
.operation
== NOP_EXPR
)
2703 dst
->value
.ancestor
.formal_id
= src
->value
.pass_through
.formal_id
;
2704 dst
->value
.ancestor
.agg_preserved
&=
2705 src
->value
.pass_through
.agg_preserved
;
2707 else if (src
->type
== IPA_JF_PASS_THROUGH
2708 && TREE_CODE_CLASS (src
->value
.pass_through
.operation
) == tcc_unary
)
2710 dst
->value
.ancestor
.formal_id
= src
->value
.pass_through
.formal_id
;
2711 dst
->value
.ancestor
.agg_preserved
= false;
2713 else if (src
->type
== IPA_JF_ANCESTOR
)
2715 dst
->value
.ancestor
.formal_id
= src
->value
.ancestor
.formal_id
;
2716 dst
->value
.ancestor
.offset
+= src
->value
.ancestor
.offset
;
2717 dst
->value
.ancestor
.agg_preserved
&=
2718 src
->value
.ancestor
.agg_preserved
;
2721 ipa_set_jf_unknown (dst
);
2723 else if (dst
->type
== IPA_JF_PASS_THROUGH
)
2725 struct ipa_jump_func
*src
;
2726 /* We must check range due to calls with variable number of arguments
2727 and we cannot combine jump functions with operations. */
2728 if (dst
->value
.pass_through
.operation
== NOP_EXPR
2729 && (dst
->value
.pass_through
.formal_id
2730 < ipa_get_cs_argument_count (top
)))
2732 int dst_fid
= dst
->value
.pass_through
.formal_id
;
2733 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2734 bool dst_agg_p
= ipa_get_jf_pass_through_agg_preserved (dst
);
2735 struct ipa_polymorphic_call_context
*src_ctx
2736 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
2738 if (src_ctx
&& !src_ctx
->useless_p ())
2740 struct ipa_polymorphic_call_context ctx
= *src_ctx
;
2742 /* TODO: Make type preserved safe WRT contexts. */
2743 if (!ipa_get_jf_pass_through_type_preserved (dst
))
2744 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
2745 if (!ctx
.useless_p ())
2749 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
2751 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
2753 dst_ctx
->combine_with (ctx
);
2758 case IPA_JF_UNKNOWN
:
2759 ipa_set_jf_unknown (dst
);
2762 ipa_set_jf_cst_copy (dst
, src
);
2765 case IPA_JF_PASS_THROUGH
:
2767 int formal_id
= ipa_get_jf_pass_through_formal_id (src
);
2768 enum tree_code operation
;
2769 operation
= ipa_get_jf_pass_through_operation (src
);
2771 if (operation
== NOP_EXPR
)
2775 && ipa_get_jf_pass_through_agg_preserved (src
);
2776 ipa_set_jf_simple_pass_through (dst
, formal_id
, agg_p
);
2778 else if (TREE_CODE_CLASS (operation
) == tcc_unary
)
2779 ipa_set_jf_unary_pass_through (dst
, formal_id
, operation
);
2782 tree operand
= ipa_get_jf_pass_through_operand (src
);
2783 ipa_set_jf_arith_pass_through (dst
, formal_id
, operand
,
2788 case IPA_JF_ANCESTOR
:
2792 && ipa_get_jf_ancestor_agg_preserved (src
);
2793 ipa_set_ancestor_jf (dst
,
2794 ipa_get_jf_ancestor_offset (src
),
2795 ipa_get_jf_ancestor_formal_id (src
),
2804 && (dst_agg_p
|| !src
->agg
.by_ref
))
2806 /* Currently we do not produce clobber aggregate jump
2807 functions, replace with merging when we do. */
2808 gcc_assert (!dst
->agg
.items
);
2810 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2811 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2815 ipa_set_jf_unknown (dst
);
2820 /* If TARGET is an addr_expr of a function declaration, make it the
2821 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2822 Otherwise, return NULL. */
2824 struct cgraph_edge
*
2825 ipa_make_edge_direct_to_target (struct cgraph_edge
*ie
, tree target
,
2828 struct cgraph_node
*callee
;
2829 struct ipa_call_summary
*es
= ipa_call_summaries
->get (ie
);
2830 bool unreachable
= false;
2832 if (TREE_CODE (target
) == ADDR_EXPR
)
2833 target
= TREE_OPERAND (target
, 0);
2834 if (TREE_CODE (target
) != FUNCTION_DECL
)
2836 target
= canonicalize_constructor_val (target
, NULL
);
2837 if (!target
|| TREE_CODE (target
) != FUNCTION_DECL
)
2839 /* Member pointer call that goes through a VMT lookup. */
2840 if (ie
->indirect_info
->member_ptr
2841 /* Or if target is not an invariant expression and we do not
2842 know if it will evaulate to function at runtime.
2843 This can happen when folding through &VAR, where &VAR
2844 is IP invariant, but VAR itself is not.
2846 TODO: Revisit this when GCC 5 is branched. It seems that
2847 member_ptr check is not needed and that we may try to fold
2848 the expression and see if VAR is readonly. */
2849 || !is_gimple_ip_invariant (target
))
2851 if (dump_enabled_p ())
2853 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2854 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2855 "discovered direct call non-invariant %s\n",
2856 ie
->caller
->dump_name ());
2862 if (dump_enabled_p ())
2864 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2865 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2866 "discovered direct call to non-function in %s, "
2867 "making it __builtin_unreachable\n",
2868 ie
->caller
->dump_name ());
2871 target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2872 callee
= cgraph_node::get_create (target
);
2876 callee
= cgraph_node::get (target
);
2879 callee
= cgraph_node::get (target
);
2881 /* Because may-edges are not explicitely represented and vtable may be external,
2882 we may create the first reference to the object in the unit. */
2883 if (!callee
|| callee
->global
.inlined_to
)
2886 /* We are better to ensure we can refer to it.
2887 In the case of static functions we are out of luck, since we already
2888 removed its body. In the case of public functions we may or may
2889 not introduce the reference. */
2890 if (!canonicalize_constructor_val (target
, NULL
)
2891 || !TREE_PUBLIC (target
))
2894 fprintf (dump_file
, "ipa-prop: Discovered call to a known target "
2895 "(%s -> %s) but can not refer to it. Giving up.\n",
2896 ie
->caller
->dump_name (),
2897 ie
->callee
->dump_name ());
2900 callee
= cgraph_node::get_create (target
);
2903 /* If the edge is already speculated. */
2904 if (speculative
&& ie
->speculative
)
2906 struct cgraph_edge
*e2
;
2907 struct ipa_ref
*ref
;
2908 ie
->speculative_call_info (e2
, ie
, ref
);
2909 if (e2
->callee
->ultimate_alias_target ()
2910 != callee
->ultimate_alias_target ())
2913 fprintf (dump_file
, "ipa-prop: Discovered call to a speculative "
2914 "target (%s -> %s) but the call is already "
2915 "speculated to %s. Giving up.\n",
2916 ie
->caller
->dump_name (), callee
->dump_name (),
2917 e2
->callee
->dump_name ());
2922 fprintf (dump_file
, "ipa-prop: Discovered call to a speculative target "
2923 "(%s -> %s) this agree with previous speculation.\n",
2924 ie
->caller
->dump_name (), callee
->dump_name ());
2929 if (!dbg_cnt (devirt
))
2932 ipa_check_create_node_params ();
2934 /* We can not make edges to inline clones. It is bug that someone removed
2935 the cgraph node too early. */
2936 gcc_assert (!callee
->global
.inlined_to
);
2938 if (dump_file
&& !unreachable
)
2940 fprintf (dump_file
, "ipa-prop: Discovered %s call to a %s target "
2941 "(%s -> %s), for stmt ",
2942 ie
->indirect_info
->polymorphic
? "a virtual" : "an indirect",
2943 speculative
? "speculative" : "known",
2944 ie
->caller
->dump_name (),
2945 callee
->dump_name ());
2947 print_gimple_stmt (dump_file
, ie
->call_stmt
, 2, TDF_SLIM
);
2949 fprintf (dump_file
, "with uid %i\n", ie
->lto_stmt_uid
);
2951 if (dump_enabled_p ())
2953 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2955 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2956 "converting indirect call in %s to direct call to %s\n",
2957 ie
->caller
->name (), callee
->name ());
2961 struct cgraph_edge
*orig
= ie
;
2962 ie
= ie
->make_direct (callee
);
2963 /* If we resolved speculative edge the cost is already up to date
2964 for direct call (adjusted by inline_edge_duplication_hook). */
2967 es
= ipa_call_summaries
->get (ie
);
2968 es
->call_stmt_size
-= (eni_size_weights
.indirect_call_cost
2969 - eni_size_weights
.call_cost
);
2970 es
->call_stmt_time
-= (eni_time_weights
.indirect_call_cost
2971 - eni_time_weights
.call_cost
);
2976 if (!callee
->can_be_discarded_p ())
2979 alias
= dyn_cast
<cgraph_node
*> (callee
->noninterposable_alias ());
2983 /* make_speculative will update ie's cost to direct call cost. */
2984 ie
= ie
->make_speculative
2985 (callee
, ie
->count
.apply_scale (8, 10));
2991 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
2992 CONSTRUCTOR and return it. Return NULL if the search fails for some
2996 find_constructor_constant_at_offset (tree constructor
, HOST_WIDE_INT req_offset
)
2998 tree type
= TREE_TYPE (constructor
);
2999 if (TREE_CODE (type
) != ARRAY_TYPE
3000 && TREE_CODE (type
) != RECORD_TYPE
)
3005 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor
), ix
, index
, val
)
3007 HOST_WIDE_INT elt_offset
;
3008 if (TREE_CODE (type
) == ARRAY_TYPE
)
3011 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (type
));
3012 gcc_assert (TREE_CODE (unit_size
) == INTEGER_CST
);
3016 if (TREE_CODE (index
) == RANGE_EXPR
)
3017 off
= wi::to_offset (TREE_OPERAND (index
, 0));
3019 off
= wi::to_offset (index
);
3020 if (TYPE_DOMAIN (type
) && TYPE_MIN_VALUE (TYPE_DOMAIN (type
)))
3022 tree low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
3023 gcc_assert (TREE_CODE (unit_size
) == INTEGER_CST
);
3024 off
= wi::sext (off
- wi::to_offset (low_bound
),
3025 TYPE_PRECISION (TREE_TYPE (index
)));
3027 off
*= wi::to_offset (unit_size
);
3028 /* ??? Handle more than just the first index of a
3032 off
= wi::to_offset (unit_size
) * ix
;
3034 off
= wi::lshift (off
, LOG2_BITS_PER_UNIT
);
3035 if (!wi::fits_shwi_p (off
) || wi::neg_p (off
))
3037 elt_offset
= off
.to_shwi ();
3039 else if (TREE_CODE (type
) == RECORD_TYPE
)
3041 gcc_checking_assert (index
&& TREE_CODE (index
) == FIELD_DECL
);
3042 if (DECL_BIT_FIELD (index
))
3044 elt_offset
= int_bit_position (index
);
3049 if (elt_offset
> req_offset
)
3052 if (TREE_CODE (val
) == CONSTRUCTOR
)
3053 return find_constructor_constant_at_offset (val
,
3054 req_offset
- elt_offset
);
3056 if (elt_offset
== req_offset
3057 && is_gimple_reg_type (TREE_TYPE (val
))
3058 && is_gimple_ip_invariant (val
))
3064 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3065 invariant from a static constructor and if so, return it. Otherwise return
3069 ipa_find_agg_cst_from_init (tree scalar
, HOST_WIDE_INT offset
, bool by_ref
)
3073 if (TREE_CODE (scalar
) != ADDR_EXPR
)
3075 scalar
= TREE_OPERAND (scalar
, 0);
3079 || !is_global_var (scalar
)
3080 || !TREE_READONLY (scalar
)
3081 || !DECL_INITIAL (scalar
)
3082 || TREE_CODE (DECL_INITIAL (scalar
)) != CONSTRUCTOR
)
3085 return find_constructor_constant_at_offset (DECL_INITIAL (scalar
), offset
);
3088 /* Retrieve value from aggregate jump function AGG or static initializer of
3089 SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
3090 none. BY_REF specifies whether the value has to be passed by reference or
3091 by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
3092 to is set to true if the value comes from an initializer of a constant. */
3095 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function
*agg
, tree scalar
,
3096 HOST_WIDE_INT offset
, bool by_ref
,
3097 bool *from_global_constant
)
3099 struct ipa_agg_jf_item
*item
;
3104 tree res
= ipa_find_agg_cst_from_init (scalar
, offset
, by_ref
);
3107 if (from_global_constant
)
3108 *from_global_constant
= true;
3114 || by_ref
!= agg
->by_ref
)
3117 FOR_EACH_VEC_SAFE_ELT (agg
->items
, i
, item
)
3118 if (item
->offset
== offset
)
3120 /* Currently we do not have clobber values, return NULL for them once
3122 gcc_checking_assert (is_gimple_ip_invariant (item
->value
));
3123 if (from_global_constant
)
3124 *from_global_constant
= false;
3130 /* Remove a reference to SYMBOL from the list of references of a node given by
3131 reference description RDESC. Return true if the reference has been
3132 successfully found and removed. */
3135 remove_described_reference (symtab_node
*symbol
, struct ipa_cst_ref_desc
*rdesc
)
3137 struct ipa_ref
*to_del
;
3138 struct cgraph_edge
*origin
;
3143 to_del
= origin
->caller
->find_reference (symbol
, origin
->call_stmt
,
3144 origin
->lto_stmt_uid
);
3148 to_del
->remove_reference ();
3150 fprintf (dump_file
, "ipa-prop: Removed a reference from %s to %s.\n",
3151 origin
->caller
->dump_name (), xstrdup_for_dump (symbol
->name ()));
3155 /* If JFUNC has a reference description with refcount different from
3156 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3157 NULL. JFUNC must be a constant jump function. */
3159 static struct ipa_cst_ref_desc
*
3160 jfunc_rdesc_usable (struct ipa_jump_func
*jfunc
)
3162 struct ipa_cst_ref_desc
*rdesc
= ipa_get_jf_constant_rdesc (jfunc
);
3163 if (rdesc
&& rdesc
->refcount
!= IPA_UNDESCRIBED_USE
)
3169 /* If the value of constant jump function JFUNC is an address of a function
3170 declaration, return the associated call graph node. Otherwise return
3173 static cgraph_node
*
3174 cgraph_node_for_jfunc (struct ipa_jump_func
*jfunc
)
3176 gcc_checking_assert (jfunc
->type
== IPA_JF_CONST
);
3177 tree cst
= ipa_get_jf_constant (jfunc
);
3178 if (TREE_CODE (cst
) != ADDR_EXPR
3179 || TREE_CODE (TREE_OPERAND (cst
, 0)) != FUNCTION_DECL
)
3182 return cgraph_node::get (TREE_OPERAND (cst
, 0));
3186 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3187 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3188 the edge specified in the rdesc. Return false if either the symbol or the
3189 reference could not be found, otherwise return true. */
3192 try_decrement_rdesc_refcount (struct ipa_jump_func
*jfunc
)
3194 struct ipa_cst_ref_desc
*rdesc
;
3195 if (jfunc
->type
== IPA_JF_CONST
3196 && (rdesc
= jfunc_rdesc_usable (jfunc
))
3197 && --rdesc
->refcount
== 0)
3199 symtab_node
*symbol
= cgraph_node_for_jfunc (jfunc
);
3203 return remove_described_reference (symbol
, rdesc
);
3208 /* Try to find a destination for indirect edge IE that corresponds to a simple
3209 call or a call of a member function pointer and where the destination is a
3210 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3211 the type of the parameter to which the result of JFUNC is passed. If it can
3212 be determined, return the newly direct edge, otherwise return NULL.
3213 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3215 static struct cgraph_edge
*
3216 try_make_edge_direct_simple_call (struct cgraph_edge
*ie
,
3217 struct ipa_jump_func
*jfunc
, tree target_type
,
3218 struct ipa_node_params
*new_root_info
)
3220 struct cgraph_edge
*cs
;
3222 bool agg_contents
= ie
->indirect_info
->agg_contents
;
3223 tree scalar
= ipa_value_from_jfunc (new_root_info
, jfunc
, target_type
);
3226 bool from_global_constant
;
3227 target
= ipa_find_agg_cst_for_param (&jfunc
->agg
, scalar
,
3228 ie
->indirect_info
->offset
,
3229 ie
->indirect_info
->by_ref
,
3230 &from_global_constant
);
3232 && !from_global_constant
3233 && !ie
->indirect_info
->guaranteed_unmodified
)
3240 cs
= ipa_make_edge_direct_to_target (ie
, target
);
3242 if (cs
&& !agg_contents
)
3245 gcc_checking_assert (cs
->callee
3247 || jfunc
->type
!= IPA_JF_CONST
3248 || !cgraph_node_for_jfunc (jfunc
)
3249 || cs
->callee
== cgraph_node_for_jfunc (jfunc
)));
3250 ok
= try_decrement_rdesc_refcount (jfunc
);
3251 gcc_checking_assert (ok
);
3257 /* Return the target to be used in cases of impossible devirtualization. IE
3258 and target (the latter can be NULL) are dumped when dumping is enabled. */
3261 ipa_impossible_devirt_target (struct cgraph_edge
*ie
, tree target
)
3267 "Type inconsistent devirtualization: %s->%s\n",
3268 ie
->caller
->dump_name (),
3269 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target
)));
3272 "No devirtualization target in %s\n",
3273 ie
->caller
->dump_name ());
3275 tree new_target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
3276 cgraph_node::get_create (new_target
);
3280 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3281 call based on a formal parameter which is described by jump function JFUNC
3282 and if it can be determined, make it direct and return the direct edge.
3283 Otherwise, return NULL. CTX describes the polymorphic context that the
3284 parameter the call is based on brings along with it. */
3286 static struct cgraph_edge
*
3287 try_make_edge_direct_virtual_call (struct cgraph_edge
*ie
,
3288 struct ipa_jump_func
*jfunc
,
3289 struct ipa_polymorphic_call_context ctx
)
3292 bool speculative
= false;
3294 if (!opt_for_fn (ie
->caller
->decl
, flag_devirtualize
))
3297 gcc_assert (!ie
->indirect_info
->by_ref
);
3299 /* Try to do lookup via known virtual table pointer value. */
3300 if (!ie
->indirect_info
->vptr_changed
3301 || opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
))
3304 unsigned HOST_WIDE_INT offset
;
3305 tree scalar
= (jfunc
->type
== IPA_JF_CONST
) ? ipa_get_jf_constant (jfunc
)
3307 tree t
= ipa_find_agg_cst_for_param (&jfunc
->agg
, scalar
,
3308 ie
->indirect_info
->offset
,
3310 if (t
&& vtable_pointer_value_to_vtable (t
, &vtable
, &offset
))
3313 t
= gimple_get_virt_method_for_vtable (ie
->indirect_info
->otr_token
,
3314 vtable
, offset
, &can_refer
);
3318 || (TREE_CODE (TREE_TYPE (t
)) == FUNCTION_TYPE
3319 && DECL_FUNCTION_CODE (t
) == BUILT_IN_UNREACHABLE
)
3320 || !possible_polymorphic_call_target_p
3321 (ie
, cgraph_node::get (t
)))
3323 /* Do not speculate builtin_unreachable, it is stupid! */
3324 if (!ie
->indirect_info
->vptr_changed
)
3325 target
= ipa_impossible_devirt_target (ie
, target
);
3332 speculative
= ie
->indirect_info
->vptr_changed
;
3338 ipa_polymorphic_call_context
ie_context (ie
);
3339 vec
<cgraph_node
*>targets
;
3342 ctx
.offset_by (ie
->indirect_info
->offset
);
3343 if (ie
->indirect_info
->vptr_changed
)
3344 ctx
.possible_dynamic_type_change (ie
->in_polymorphic_cdtor
,
3345 ie
->indirect_info
->otr_type
);
3346 ctx
.combine_with (ie_context
, ie
->indirect_info
->otr_type
);
3347 targets
= possible_polymorphic_call_targets
3348 (ie
->indirect_info
->otr_type
,
3349 ie
->indirect_info
->otr_token
,
3351 if (final
&& targets
.length () <= 1)
3353 speculative
= false;
3354 if (targets
.length () == 1)
3355 target
= targets
[0]->decl
;
3357 target
= ipa_impossible_devirt_target (ie
, NULL_TREE
);
3359 else if (!target
&& opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
)
3360 && !ie
->speculative
&& ie
->maybe_hot_p ())
3363 n
= try_speculative_devirtualization (ie
->indirect_info
->otr_type
,
3364 ie
->indirect_info
->otr_token
,
3365 ie
->indirect_info
->context
);
3375 if (!possible_polymorphic_call_target_p
3376 (ie
, cgraph_node::get_create (target
)))
3380 target
= ipa_impossible_devirt_target (ie
, target
);
3382 return ipa_make_edge_direct_to_target (ie
, target
, speculative
);
3388 /* Update the param called notes associated with NODE when CS is being inlined,
3389 assuming NODE is (potentially indirectly) inlined into CS->callee.
3390 Moreover, if the callee is discovered to be constant, create a new cgraph
3391 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3392 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3395 update_indirect_edges_after_inlining (struct cgraph_edge
*cs
,
3396 struct cgraph_node
*node
,
3397 vec
<cgraph_edge
*> *new_edges
)
3399 struct ipa_edge_args
*top
;
3400 struct cgraph_edge
*ie
, *next_ie
, *new_direct_edge
;
3401 struct ipa_node_params
*new_root_info
, *inlined_node_info
;
3404 ipa_check_create_edge_args ();
3405 top
= IPA_EDGE_REF (cs
);
3406 new_root_info
= IPA_NODE_REF (cs
->caller
->global
.inlined_to
3407 ? cs
->caller
->global
.inlined_to
3409 inlined_node_info
= IPA_NODE_REF (cs
->callee
->function_symbol ());
3411 for (ie
= node
->indirect_calls
; ie
; ie
= next_ie
)
3413 struct cgraph_indirect_call_info
*ici
= ie
->indirect_info
;
3414 struct ipa_jump_func
*jfunc
;
3416 cgraph_node
*spec_target
= NULL
;
3418 next_ie
= ie
->next_callee
;
3420 if (ici
->param_index
== -1)
3423 /* We must check range due to calls with variable number of arguments: */
3424 if (ici
->param_index
>= ipa_get_cs_argument_count (top
))
3426 ici
->param_index
= -1;
3430 param_index
= ici
->param_index
;
3431 jfunc
= ipa_get_ith_jump_func (top
, param_index
);
3433 if (ie
->speculative
)
3435 struct cgraph_edge
*de
;
3436 struct ipa_ref
*ref
;
3437 ie
->speculative_call_info (de
, ie
, ref
);
3438 spec_target
= de
->callee
;
3441 if (!opt_for_fn (node
->decl
, flag_indirect_inlining
))
3442 new_direct_edge
= NULL
;
3443 else if (ici
->polymorphic
)
3445 ipa_polymorphic_call_context ctx
;
3446 ctx
= ipa_context_from_jfunc (new_root_info
, cs
, param_index
, jfunc
);
3447 new_direct_edge
= try_make_edge_direct_virtual_call (ie
, jfunc
, ctx
);
3451 tree target_type
= ipa_get_type (inlined_node_info
, param_index
);
3452 new_direct_edge
= try_make_edge_direct_simple_call (ie
, jfunc
,
3457 /* If speculation was removed, then we need to do nothing. */
3458 if (new_direct_edge
&& new_direct_edge
!= ie
3459 && new_direct_edge
->callee
== spec_target
)
3461 new_direct_edge
->indirect_inlining_edge
= 1;
3462 top
= IPA_EDGE_REF (cs
);
3464 if (!new_direct_edge
->speculative
)
3467 else if (new_direct_edge
)
3469 new_direct_edge
->indirect_inlining_edge
= 1;
3470 if (new_direct_edge
->call_stmt
)
3471 new_direct_edge
->call_stmt_cannot_inline_p
3472 = !gimple_check_call_matching_types (
3473 new_direct_edge
->call_stmt
,
3474 new_direct_edge
->callee
->decl
, false);
3477 new_edges
->safe_push (new_direct_edge
);
3480 top
= IPA_EDGE_REF (cs
);
3481 /* If speculative edge was introduced we still need to update
3482 call info of the indirect edge. */
3483 if (!new_direct_edge
->speculative
)
3486 if (jfunc
->type
== IPA_JF_PASS_THROUGH
3487 && ipa_get_jf_pass_through_operation (jfunc
) == NOP_EXPR
)
3489 if (ici
->agg_contents
3490 && !ipa_get_jf_pass_through_agg_preserved (jfunc
)
3491 && !ici
->polymorphic
)
3492 ici
->param_index
= -1;
3495 ici
->param_index
= ipa_get_jf_pass_through_formal_id (jfunc
);
3496 if (ici
->polymorphic
3497 && !ipa_get_jf_pass_through_type_preserved (jfunc
))
3498 ici
->vptr_changed
= true;
3501 else if (jfunc
->type
== IPA_JF_ANCESTOR
)
3503 if (ici
->agg_contents
3504 && !ipa_get_jf_ancestor_agg_preserved (jfunc
)
3505 && !ici
->polymorphic
)
3506 ici
->param_index
= -1;
3509 ici
->param_index
= ipa_get_jf_ancestor_formal_id (jfunc
);
3510 ici
->offset
+= ipa_get_jf_ancestor_offset (jfunc
);
3511 if (ici
->polymorphic
3512 && !ipa_get_jf_ancestor_type_preserved (jfunc
))
3513 ici
->vptr_changed
= true;
3517 /* Either we can find a destination for this edge now or never. */
3518 ici
->param_index
= -1;
3524 /* Recursively traverse subtree of NODE (including node) made of inlined
3525 cgraph_edges when CS has been inlined and invoke
3526 update_indirect_edges_after_inlining on all nodes and
3527 update_jump_functions_after_inlining on all non-inlined edges that lead out
3528 of this subtree. Newly discovered indirect edges will be added to
3529 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3533 propagate_info_to_inlined_callees (struct cgraph_edge
*cs
,
3534 struct cgraph_node
*node
,
3535 vec
<cgraph_edge
*> *new_edges
)
3537 struct cgraph_edge
*e
;
3540 res
= update_indirect_edges_after_inlining (cs
, node
, new_edges
);
3542 for (e
= node
->callees
; e
; e
= e
->next_callee
)
3543 if (!e
->inline_failed
)
3544 res
|= propagate_info_to_inlined_callees (cs
, e
->callee
, new_edges
);
3546 update_jump_functions_after_inlining (cs
, e
);
3547 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
3548 update_jump_functions_after_inlining (cs
, e
);
3553 /* Combine two controlled uses counts as done during inlining. */
3556 combine_controlled_uses_counters (int c
, int d
)
3558 if (c
== IPA_UNDESCRIBED_USE
|| d
== IPA_UNDESCRIBED_USE
)
3559 return IPA_UNDESCRIBED_USE
;
3564 /* Propagate number of controlled users from CS->caleee to the new root of the
3565 tree of inlined nodes. */
3568 propagate_controlled_uses (struct cgraph_edge
*cs
)
3570 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
3571 struct cgraph_node
*new_root
= cs
->caller
->global
.inlined_to
3572 ? cs
->caller
->global
.inlined_to
: cs
->caller
;
3573 struct ipa_node_params
*new_root_info
= IPA_NODE_REF (new_root
);
3574 struct ipa_node_params
*old_root_info
= IPA_NODE_REF (cs
->callee
);
3577 count
= MIN (ipa_get_cs_argument_count (args
),
3578 ipa_get_param_count (old_root_info
));
3579 for (i
= 0; i
< count
; i
++)
3581 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
3582 struct ipa_cst_ref_desc
*rdesc
;
3584 if (jf
->type
== IPA_JF_PASS_THROUGH
)
3587 src_idx
= ipa_get_jf_pass_through_formal_id (jf
);
3588 c
= ipa_get_controlled_uses (new_root_info
, src_idx
);
3589 d
= ipa_get_controlled_uses (old_root_info
, i
);
3591 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf
)
3592 == NOP_EXPR
|| c
== IPA_UNDESCRIBED_USE
);
3593 c
= combine_controlled_uses_counters (c
, d
);
3594 ipa_set_controlled_uses (new_root_info
, src_idx
, c
);
3595 if (c
== 0 && new_root_info
->ipcp_orig_node
)
3597 struct cgraph_node
*n
;
3598 struct ipa_ref
*ref
;
3599 tree t
= new_root_info
->known_csts
[src_idx
];
3601 if (t
&& TREE_CODE (t
) == ADDR_EXPR
3602 && TREE_CODE (TREE_OPERAND (t
, 0)) == FUNCTION_DECL
3603 && (n
= cgraph_node::get (TREE_OPERAND (t
, 0)))
3604 && (ref
= new_root
->find_reference (n
, NULL
, 0)))
3607 fprintf (dump_file
, "ipa-prop: Removing cloning-created "
3608 "reference from %s to %s.\n",
3609 new_root
->dump_name (),
3611 ref
->remove_reference ();
3615 else if (jf
->type
== IPA_JF_CONST
3616 && (rdesc
= jfunc_rdesc_usable (jf
)))
3618 int d
= ipa_get_controlled_uses (old_root_info
, i
);
3619 int c
= rdesc
->refcount
;
3620 rdesc
->refcount
= combine_controlled_uses_counters (c
, d
);
3621 if (rdesc
->refcount
== 0)
3623 tree cst
= ipa_get_jf_constant (jf
);
3624 struct cgraph_node
*n
;
3625 gcc_checking_assert (TREE_CODE (cst
) == ADDR_EXPR
3626 && TREE_CODE (TREE_OPERAND (cst
, 0))
3628 n
= cgraph_node::get (TREE_OPERAND (cst
, 0));
3631 struct cgraph_node
*clone
;
3633 ok
= remove_described_reference (n
, rdesc
);
3634 gcc_checking_assert (ok
);
3637 while (clone
->global
.inlined_to
3638 && clone
!= rdesc
->cs
->caller
3639 && IPA_NODE_REF (clone
)->ipcp_orig_node
)
3641 struct ipa_ref
*ref
;
3642 ref
= clone
->find_reference (n
, NULL
, 0);
3646 fprintf (dump_file
, "ipa-prop: Removing "
3647 "cloning-created reference "
3649 clone
->dump_name (),
3651 ref
->remove_reference ();
3653 clone
= clone
->callers
->caller
;
3660 for (i
= ipa_get_param_count (old_root_info
);
3661 i
< ipa_get_cs_argument_count (args
);
3664 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
3666 if (jf
->type
== IPA_JF_CONST
)
3668 struct ipa_cst_ref_desc
*rdesc
= jfunc_rdesc_usable (jf
);
3670 rdesc
->refcount
= IPA_UNDESCRIBED_USE
;
3672 else if (jf
->type
== IPA_JF_PASS_THROUGH
)
3673 ipa_set_controlled_uses (new_root_info
,
3674 jf
->value
.pass_through
.formal_id
,
3675 IPA_UNDESCRIBED_USE
);
3679 /* Update jump functions and call note functions on inlining the call site CS.
3680 CS is expected to lead to a node already cloned by
3681 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3682 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3686 ipa_propagate_indirect_call_infos (struct cgraph_edge
*cs
,
3687 vec
<cgraph_edge
*> *new_edges
)
3690 /* Do nothing if the preparation phase has not been carried out yet
3691 (i.e. during early inlining). */
3692 if (!ipa_node_params_sum
)
3694 gcc_assert (ipa_edge_args_sum
);
3696 propagate_controlled_uses (cs
);
3697 changed
= propagate_info_to_inlined_callees (cs
, cs
->callee
, new_edges
);
3702 /* Ensure that array of edge arguments infos is big enough to accommodate a
3703 structure for all edges and reallocates it if not. Also, allocate
3704 associated hash tables is they do not already exist. */
3707 ipa_check_create_edge_args (void)
3709 if (!ipa_edge_args_sum
)
3711 = (new (ggc_cleared_alloc
<ipa_edge_args_sum_t
> ())
3712 ipa_edge_args_sum_t (symtab
, true));
3713 if (!ipa_bits_hash_table
)
3714 ipa_bits_hash_table
= hash_table
<ipa_bit_ggc_hash_traits
>::create_ggc (37);
3715 if (!ipa_vr_hash_table
)
3716 ipa_vr_hash_table
= hash_table
<ipa_vr_ggc_hash_traits
>::create_ggc (37);
3719 /* Frees all dynamically allocated structures that the argument info points
3723 ipa_free_edge_args_substructures (struct ipa_edge_args
*args
)
3725 vec_free (args
->jump_functions
);
3726 *args
= ipa_edge_args ();
3729 /* Free all ipa_edge structures. */
3732 ipa_free_all_edge_args (void)
3734 if (!ipa_edge_args_sum
)
3737 ipa_edge_args_sum
->release ();
3738 ipa_edge_args_sum
= NULL
;
3741 /* Free all ipa_node_params structures. */
3744 ipa_free_all_node_params (void)
3746 ipa_node_params_sum
->release ();
3747 ipa_node_params_sum
= NULL
;
3750 /* Grow ipcp_transformations if necessary. Also allocate any necessary hash
3751 tables if they do not already exist. */
3754 ipcp_grow_transformations_if_necessary (void)
3756 if (vec_safe_length (ipcp_transformations
)
3757 <= (unsigned) symtab
->cgraph_max_uid
)
3758 vec_safe_grow_cleared (ipcp_transformations
, symtab
->cgraph_max_uid
+ 1);
3759 if (!ipa_bits_hash_table
)
3760 ipa_bits_hash_table
= hash_table
<ipa_bit_ggc_hash_traits
>::create_ggc (37);
3761 if (!ipa_vr_hash_table
)
3762 ipa_vr_hash_table
= hash_table
<ipa_vr_ggc_hash_traits
>::create_ggc (37);
3765 /* Set the aggregate replacements of NODE to be AGGVALS. */
3768 ipa_set_node_agg_value_chain (struct cgraph_node
*node
,
3769 struct ipa_agg_replacement_value
*aggvals
)
3771 ipcp_grow_transformations_if_necessary ();
3772 (*ipcp_transformations
)[node
->uid
].agg_values
= aggvals
;
3775 /* Hook that is called by cgraph.c when an edge is removed. Adjust reference
3776 count data structures accordingly. */
3779 ipa_edge_args_sum_t::remove (cgraph_edge
*cs
, ipa_edge_args
*args
)
3781 if (args
->jump_functions
)
3783 struct ipa_jump_func
*jf
;
3785 FOR_EACH_VEC_ELT (*args
->jump_functions
, i
, jf
)
3787 struct ipa_cst_ref_desc
*rdesc
;
3788 try_decrement_rdesc_refcount (jf
);
3789 if (jf
->type
== IPA_JF_CONST
3790 && (rdesc
= ipa_get_jf_constant_rdesc (jf
))
3797 /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
3798 reference count data strucutres accordingly. */
3801 ipa_edge_args_sum_t::duplicate (cgraph_edge
*src
, cgraph_edge
*dst
,
3802 ipa_edge_args
*old_args
, ipa_edge_args
*new_args
)
3806 new_args
->jump_functions
= vec_safe_copy (old_args
->jump_functions
);
3807 if (old_args
->polymorphic_call_contexts
)
3808 new_args
->polymorphic_call_contexts
3809 = vec_safe_copy (old_args
->polymorphic_call_contexts
);
3811 for (i
= 0; i
< vec_safe_length (old_args
->jump_functions
); i
++)
3813 struct ipa_jump_func
*src_jf
= ipa_get_ith_jump_func (old_args
, i
);
3814 struct ipa_jump_func
*dst_jf
= ipa_get_ith_jump_func (new_args
, i
);
3816 dst_jf
->agg
.items
= vec_safe_copy (dst_jf
->agg
.items
);
3818 if (src_jf
->type
== IPA_JF_CONST
)
3820 struct ipa_cst_ref_desc
*src_rdesc
= jfunc_rdesc_usable (src_jf
);
3823 dst_jf
->value
.constant
.rdesc
= NULL
;
3824 else if (src
->caller
== dst
->caller
)
3826 struct ipa_ref
*ref
;
3827 symtab_node
*n
= cgraph_node_for_jfunc (src_jf
);
3828 gcc_checking_assert (n
);
3829 ref
= src
->caller
->find_reference (n
, src
->call_stmt
,
3831 gcc_checking_assert (ref
);
3832 dst
->caller
->clone_reference (ref
, ref
->stmt
);
3834 struct ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
3835 dst_rdesc
->cs
= dst
;
3836 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3837 dst_rdesc
->next_duplicate
= NULL
;
3838 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3840 else if (src_rdesc
->cs
== src
)
3842 struct ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
3843 dst_rdesc
->cs
= dst
;
3844 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3845 dst_rdesc
->next_duplicate
= src_rdesc
->next_duplicate
;
3846 src_rdesc
->next_duplicate
= dst_rdesc
;
3847 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3851 struct ipa_cst_ref_desc
*dst_rdesc
;
3852 /* This can happen during inlining, when a JFUNC can refer to a
3853 reference taken in a function up in the tree of inline clones.
3854 We need to find the duplicate that refers to our tree of
3857 gcc_assert (dst
->caller
->global
.inlined_to
);
3858 for (dst_rdesc
= src_rdesc
->next_duplicate
;
3860 dst_rdesc
= dst_rdesc
->next_duplicate
)
3862 struct cgraph_node
*top
;
3863 top
= dst_rdesc
->cs
->caller
->global
.inlined_to
3864 ? dst_rdesc
->cs
->caller
->global
.inlined_to
3865 : dst_rdesc
->cs
->caller
;
3866 if (dst
->caller
->global
.inlined_to
== top
)
3869 gcc_assert (dst_rdesc
);
3870 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3873 else if (dst_jf
->type
== IPA_JF_PASS_THROUGH
3874 && src
->caller
== dst
->caller
)
3876 struct cgraph_node
*inline_root
= dst
->caller
->global
.inlined_to
3877 ? dst
->caller
->global
.inlined_to
: dst
->caller
;
3878 struct ipa_node_params
*root_info
= IPA_NODE_REF (inline_root
);
3879 int idx
= ipa_get_jf_pass_through_formal_id (dst_jf
);
3881 int c
= ipa_get_controlled_uses (root_info
, idx
);
3882 if (c
!= IPA_UNDESCRIBED_USE
)
3885 ipa_set_controlled_uses (root_info
, idx
, c
);
3891 /* Analyze newly added function into callgraph. */
3894 ipa_add_new_function (cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
3896 if (node
->has_gimple_body_p ())
3897 ipa_analyze_node (node
);
3900 /* Hook that is called by summary when a node is duplicated. */
3903 ipa_node_params_t::duplicate(cgraph_node
*src
, cgraph_node
*dst
,
3904 ipa_node_params
*old_info
,
3905 ipa_node_params
*new_info
)
3907 ipa_agg_replacement_value
*old_av
, *new_av
;
3909 new_info
->descriptors
= vec_safe_copy (old_info
->descriptors
);
3910 new_info
->lattices
= NULL
;
3911 new_info
->ipcp_orig_node
= old_info
->ipcp_orig_node
;
3912 new_info
->known_csts
= old_info
->known_csts
.copy ();
3913 new_info
->known_contexts
= old_info
->known_contexts
.copy ();
3915 new_info
->analysis_done
= old_info
->analysis_done
;
3916 new_info
->node_enqueued
= old_info
->node_enqueued
;
3917 new_info
->versionable
= old_info
->versionable
;
3919 old_av
= ipa_get_agg_replacements_for_node (src
);
3925 struct ipa_agg_replacement_value
*v
;
3927 v
= ggc_alloc
<ipa_agg_replacement_value
> ();
3928 memcpy (v
, old_av
, sizeof (*v
));
3931 old_av
= old_av
->next
;
3933 ipa_set_node_agg_value_chain (dst
, new_av
);
3936 ipcp_transformation_summary
*src_trans
3937 = ipcp_get_transformation_summary (src
);
3941 ipcp_grow_transformations_if_necessary ();
3942 src_trans
= ipcp_get_transformation_summary (src
);
3943 ipcp_transformation_summary
*dst_trans
3944 = ipcp_get_transformation_summary (dst
);
3946 dst_trans
->bits
= vec_safe_copy (src_trans
->bits
);
3948 const vec
<ipa_vr
, va_gc
> *src_vr
= src_trans
->m_vr
;
3949 vec
<ipa_vr
, va_gc
> *&dst_vr
3950 = ipcp_get_transformation_summary (dst
)->m_vr
;
3951 if (vec_safe_length (src_trans
->m_vr
) > 0)
3953 vec_safe_reserve_exact (dst_vr
, src_vr
->length ());
3954 for (unsigned i
= 0; i
< src_vr
->length (); ++i
)
3955 dst_vr
->quick_push ((*src_vr
)[i
]);
3960 /* Register our cgraph hooks if they are not already there. */
3963 ipa_register_cgraph_hooks (void)
3965 ipa_check_create_node_params ();
3966 ipa_check_create_edge_args ();
3968 function_insertion_hook_holder
=
3969 symtab
->add_cgraph_insertion_hook (&ipa_add_new_function
, NULL
);
3972 /* Unregister our cgraph hooks if they are not already there. */
3975 ipa_unregister_cgraph_hooks (void)
3977 symtab
->remove_cgraph_insertion_hook (function_insertion_hook_holder
);
3978 function_insertion_hook_holder
= NULL
;
3981 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3982 longer needed after ipa-cp. */
3985 ipa_free_all_structures_after_ipa_cp (void)
3987 if (!optimize
&& !in_lto_p
)
3989 ipa_free_all_edge_args ();
3990 ipa_free_all_node_params ();
3991 ipcp_sources_pool
.release ();
3992 ipcp_cst_values_pool
.release ();
3993 ipcp_poly_ctx_values_pool
.release ();
3994 ipcp_agg_lattice_pool
.release ();
3995 ipa_unregister_cgraph_hooks ();
3996 ipa_refdesc_pool
.release ();
4000 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4001 longer needed after indirect inlining. */
4004 ipa_free_all_structures_after_iinln (void)
4006 ipa_free_all_edge_args ();
4007 ipa_free_all_node_params ();
4008 ipa_unregister_cgraph_hooks ();
4009 ipcp_sources_pool
.release ();
4010 ipcp_cst_values_pool
.release ();
4011 ipcp_poly_ctx_values_pool
.release ();
4012 ipcp_agg_lattice_pool
.release ();
4013 ipa_refdesc_pool
.release ();
4016 /* Print ipa_tree_map data structures of all functions in the
4020 ipa_print_node_params (FILE *f
, struct cgraph_node
*node
)
4023 struct ipa_node_params
*info
;
4025 if (!node
->definition
)
4027 info
= IPA_NODE_REF (node
);
4028 fprintf (f
, " function %s parameter descriptors:\n", node
->dump_name ());
4029 count
= ipa_get_param_count (info
);
4030 for (i
= 0; i
< count
; i
++)
4035 ipa_dump_param (f
, info
, i
);
4036 if (ipa_is_param_used (info
, i
))
4037 fprintf (f
, " used");
4038 c
= ipa_get_controlled_uses (info
, i
);
4039 if (c
== IPA_UNDESCRIBED_USE
)
4040 fprintf (f
, " undescribed_use");
4042 fprintf (f
, " controlled_uses=%i", c
);
4047 /* Print ipa_tree_map data structures of all functions in the
4051 ipa_print_all_params (FILE * f
)
4053 struct cgraph_node
*node
;
4055 fprintf (f
, "\nFunction parameters:\n");
4056 FOR_EACH_FUNCTION (node
)
4057 ipa_print_node_params (f
, node
);
4060 /* Dump the AV linked list. */
4063 ipa_dump_agg_replacement_values (FILE *f
, struct ipa_agg_replacement_value
*av
)
4066 fprintf (f
, " Aggregate replacements:");
4067 for (; av
; av
= av
->next
)
4069 fprintf (f
, "%s %i[" HOST_WIDE_INT_PRINT_DEC
"]=", comma
? "," : "",
4070 av
->index
, av
->offset
);
4071 print_generic_expr (f
, av
->value
);
4077 /* Stream out jump function JUMP_FUNC to OB. */
4080 ipa_write_jump_function (struct output_block
*ob
,
4081 struct ipa_jump_func
*jump_func
)
4083 struct ipa_agg_jf_item
*item
;
4084 struct bitpack_d bp
;
4087 streamer_write_uhwi (ob
, jump_func
->type
);
4088 switch (jump_func
->type
)
4090 case IPA_JF_UNKNOWN
:
4094 EXPR_LOCATION (jump_func
->value
.constant
.value
) == UNKNOWN_LOCATION
);
4095 stream_write_tree (ob
, jump_func
->value
.constant
.value
, true);
4097 case IPA_JF_PASS_THROUGH
:
4098 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.operation
);
4099 if (jump_func
->value
.pass_through
.operation
== NOP_EXPR
)
4101 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4102 bp
= bitpack_create (ob
->main_stream
);
4103 bp_pack_value (&bp
, jump_func
->value
.pass_through
.agg_preserved
, 1);
4104 streamer_write_bitpack (&bp
);
4106 else if (TREE_CODE_CLASS (jump_func
->value
.pass_through
.operation
)
4108 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4111 stream_write_tree (ob
, jump_func
->value
.pass_through
.operand
, true);
4112 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4115 case IPA_JF_ANCESTOR
:
4116 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.offset
);
4117 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.formal_id
);
4118 bp
= bitpack_create (ob
->main_stream
);
4119 bp_pack_value (&bp
, jump_func
->value
.ancestor
.agg_preserved
, 1);
4120 streamer_write_bitpack (&bp
);
4124 count
= vec_safe_length (jump_func
->agg
.items
);
4125 streamer_write_uhwi (ob
, count
);
4128 bp
= bitpack_create (ob
->main_stream
);
4129 bp_pack_value (&bp
, jump_func
->agg
.by_ref
, 1);
4130 streamer_write_bitpack (&bp
);
4133 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, i
, item
)
4135 streamer_write_uhwi (ob
, item
->offset
);
4136 stream_write_tree (ob
, item
->value
, true);
4139 bp
= bitpack_create (ob
->main_stream
);
4140 bp_pack_value (&bp
, !!jump_func
->bits
, 1);
4141 streamer_write_bitpack (&bp
);
4142 if (jump_func
->bits
)
4144 streamer_write_widest_int (ob
, jump_func
->bits
->value
);
4145 streamer_write_widest_int (ob
, jump_func
->bits
->mask
);
4147 bp_pack_value (&bp
, !!jump_func
->m_vr
, 1);
4148 streamer_write_bitpack (&bp
);
4149 if (jump_func
->m_vr
)
4151 streamer_write_enum (ob
->main_stream
, value_rang_type
,
4152 VR_LAST
, jump_func
->m_vr
->type
);
4153 stream_write_tree (ob
, jump_func
->m_vr
->min
, true);
4154 stream_write_tree (ob
, jump_func
->m_vr
->max
, true);
4158 /* Read in jump function JUMP_FUNC from IB. */
4161 ipa_read_jump_function (struct lto_input_block
*ib
,
4162 struct ipa_jump_func
*jump_func
,
4163 struct cgraph_edge
*cs
,
4164 struct data_in
*data_in
)
4166 enum jump_func_type jftype
;
4167 enum tree_code operation
;
4170 jftype
= (enum jump_func_type
) streamer_read_uhwi (ib
);
4173 case IPA_JF_UNKNOWN
:
4174 ipa_set_jf_unknown (jump_func
);
4177 ipa_set_jf_constant (jump_func
, stream_read_tree (ib
, data_in
), cs
);
4179 case IPA_JF_PASS_THROUGH
:
4180 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
4181 if (operation
== NOP_EXPR
)
4183 int formal_id
= streamer_read_uhwi (ib
);
4184 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4185 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4186 ipa_set_jf_simple_pass_through (jump_func
, formal_id
, agg_preserved
);
4188 else if (TREE_CODE_CLASS (operation
) == tcc_unary
)
4190 int formal_id
= streamer_read_uhwi (ib
);
4191 ipa_set_jf_unary_pass_through (jump_func
, formal_id
, operation
);
4195 tree operand
= stream_read_tree (ib
, data_in
);
4196 int formal_id
= streamer_read_uhwi (ib
);
4197 ipa_set_jf_arith_pass_through (jump_func
, formal_id
, operand
,
4201 case IPA_JF_ANCESTOR
:
4203 HOST_WIDE_INT offset
= streamer_read_uhwi (ib
);
4204 int formal_id
= streamer_read_uhwi (ib
);
4205 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4206 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4207 ipa_set_ancestor_jf (jump_func
, offset
, formal_id
, agg_preserved
);
4212 count
= streamer_read_uhwi (ib
);
4213 vec_alloc (jump_func
->agg
.items
, count
);
4216 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4217 jump_func
->agg
.by_ref
= bp_unpack_value (&bp
, 1);
4219 for (i
= 0; i
< count
; i
++)
4221 struct ipa_agg_jf_item item
;
4222 item
.offset
= streamer_read_uhwi (ib
);
4223 item
.value
= stream_read_tree (ib
, data_in
);
4224 jump_func
->agg
.items
->quick_push (item
);
4227 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4228 bool bits_known
= bp_unpack_value (&bp
, 1);
4231 widest_int value
= streamer_read_widest_int (ib
);
4232 widest_int mask
= streamer_read_widest_int (ib
);
4233 ipa_set_jfunc_bits (jump_func
, value
, mask
);
4236 jump_func
->bits
= NULL
;
4238 struct bitpack_d vr_bp
= streamer_read_bitpack (ib
);
4239 bool vr_known
= bp_unpack_value (&vr_bp
, 1);
4242 enum value_range_type type
= streamer_read_enum (ib
, value_range_type
,
4244 tree min
= stream_read_tree (ib
, data_in
);
4245 tree max
= stream_read_tree (ib
, data_in
);
4246 ipa_set_jfunc_vr (jump_func
, type
, min
, max
);
4249 jump_func
->m_vr
= NULL
;
4252 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4253 relevant to indirect inlining to OB. */
4256 ipa_write_indirect_edge_info (struct output_block
*ob
,
4257 struct cgraph_edge
*cs
)
4259 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4260 struct bitpack_d bp
;
4262 streamer_write_hwi (ob
, ii
->param_index
);
4263 bp
= bitpack_create (ob
->main_stream
);
4264 bp_pack_value (&bp
, ii
->polymorphic
, 1);
4265 bp_pack_value (&bp
, ii
->agg_contents
, 1);
4266 bp_pack_value (&bp
, ii
->member_ptr
, 1);
4267 bp_pack_value (&bp
, ii
->by_ref
, 1);
4268 bp_pack_value (&bp
, ii
->guaranteed_unmodified
, 1);
4269 bp_pack_value (&bp
, ii
->vptr_changed
, 1);
4270 streamer_write_bitpack (&bp
);
4271 if (ii
->agg_contents
|| ii
->polymorphic
)
4272 streamer_write_hwi (ob
, ii
->offset
);
4274 gcc_assert (ii
->offset
== 0);
4276 if (ii
->polymorphic
)
4278 streamer_write_hwi (ob
, ii
->otr_token
);
4279 stream_write_tree (ob
, ii
->otr_type
, true);
4280 ii
->context
.stream_out (ob
);
4284 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4285 relevant to indirect inlining from IB. */
4288 ipa_read_indirect_edge_info (struct lto_input_block
*ib
,
4289 struct data_in
*data_in
,
4290 struct cgraph_edge
*cs
)
4292 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4293 struct bitpack_d bp
;
4295 ii
->param_index
= (int) streamer_read_hwi (ib
);
4296 bp
= streamer_read_bitpack (ib
);
4297 ii
->polymorphic
= bp_unpack_value (&bp
, 1);
4298 ii
->agg_contents
= bp_unpack_value (&bp
, 1);
4299 ii
->member_ptr
= bp_unpack_value (&bp
, 1);
4300 ii
->by_ref
= bp_unpack_value (&bp
, 1);
4301 ii
->guaranteed_unmodified
= bp_unpack_value (&bp
, 1);
4302 ii
->vptr_changed
= bp_unpack_value (&bp
, 1);
4303 if (ii
->agg_contents
|| ii
->polymorphic
)
4304 ii
->offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4307 if (ii
->polymorphic
)
4309 ii
->otr_token
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4310 ii
->otr_type
= stream_read_tree (ib
, data_in
);
4311 ii
->context
.stream_in (ib
, data_in
);
4315 /* Stream out NODE info to OB. */
4318 ipa_write_node_info (struct output_block
*ob
, struct cgraph_node
*node
)
4321 lto_symtab_encoder_t encoder
;
4322 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
4324 struct cgraph_edge
*e
;
4325 struct bitpack_d bp
;
4327 encoder
= ob
->decl_state
->symtab_node_encoder
;
4328 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4329 streamer_write_uhwi (ob
, node_ref
);
4331 streamer_write_uhwi (ob
, ipa_get_param_count (info
));
4332 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4333 streamer_write_uhwi (ob
, ipa_get_param_move_cost (info
, j
));
4334 bp
= bitpack_create (ob
->main_stream
);
4335 gcc_assert (info
->analysis_done
4336 || ipa_get_param_count (info
) == 0);
4337 gcc_assert (!info
->node_enqueued
);
4338 gcc_assert (!info
->ipcp_orig_node
);
4339 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4340 bp_pack_value (&bp
, ipa_is_param_used (info
, j
), 1);
4341 streamer_write_bitpack (&bp
);
4342 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4344 streamer_write_hwi (ob
, ipa_get_controlled_uses (info
, j
));
4345 stream_write_tree (ob
, ipa_get_type (info
, j
), true);
4347 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4349 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4351 streamer_write_uhwi (ob
,
4352 ipa_get_cs_argument_count (args
) * 2
4353 + (args
->polymorphic_call_contexts
!= NULL
));
4354 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4356 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4357 if (args
->polymorphic_call_contexts
!= NULL
)
4358 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
4361 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4363 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4365 streamer_write_uhwi (ob
,
4366 ipa_get_cs_argument_count (args
) * 2
4367 + (args
->polymorphic_call_contexts
!= NULL
));
4368 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4370 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4371 if (args
->polymorphic_call_contexts
!= NULL
)
4372 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
4374 ipa_write_indirect_edge_info (ob
, e
);
4378 /* Stream in NODE info from IB. */
4381 ipa_read_node_info (struct lto_input_block
*ib
, struct cgraph_node
*node
,
4382 struct data_in
*data_in
)
4384 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
4386 struct cgraph_edge
*e
;
4387 struct bitpack_d bp
;
4389 ipa_alloc_node_params (node
, streamer_read_uhwi (ib
));
4391 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4392 (*info
->descriptors
)[k
].move_cost
= streamer_read_uhwi (ib
);
4394 bp
= streamer_read_bitpack (ib
);
4395 if (ipa_get_param_count (info
) != 0)
4396 info
->analysis_done
= true;
4397 info
->node_enqueued
= false;
4398 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4399 ipa_set_param_used (info
, k
, bp_unpack_value (&bp
, 1));
4400 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4402 ipa_set_controlled_uses (info
, k
, streamer_read_hwi (ib
));
4403 (*info
->descriptors
)[k
].decl_or_type
= stream_read_tree (ib
, data_in
);
4405 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4407 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4408 int count
= streamer_read_uhwi (ib
);
4409 bool contexts_computed
= count
& 1;
4414 vec_safe_grow_cleared (args
->jump_functions
, count
);
4415 if (contexts_computed
)
4416 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
);
4418 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
4420 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4422 if (contexts_computed
)
4423 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in (ib
, data_in
);
4426 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4428 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4429 int count
= streamer_read_uhwi (ib
);
4430 bool contexts_computed
= count
& 1;
4435 vec_safe_grow_cleared (args
->jump_functions
, count
);
4436 if (contexts_computed
)
4437 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
);
4438 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
4440 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4442 if (contexts_computed
)
4443 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in (ib
, data_in
);
4446 ipa_read_indirect_edge_info (ib
, data_in
, e
);
4450 /* Write jump functions for nodes in SET. */
4453 ipa_prop_write_jump_functions (void)
4455 struct cgraph_node
*node
;
4456 struct output_block
*ob
;
4457 unsigned int count
= 0;
4458 lto_symtab_encoder_iterator lsei
;
4459 lto_symtab_encoder_t encoder
;
4461 if (!ipa_node_params_sum
|| !ipa_edge_args_sum
)
4464 ob
= create_output_block (LTO_section_jump_functions
);
4465 encoder
= ob
->decl_state
->symtab_node_encoder
;
4467 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4468 lsei_next_function_in_partition (&lsei
))
4470 node
= lsei_cgraph_node (lsei
);
4471 if (node
->has_gimple_body_p ()
4472 && IPA_NODE_REF (node
) != NULL
)
4476 streamer_write_uhwi (ob
, count
);
4478 /* Process all of the functions. */
4479 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4480 lsei_next_function_in_partition (&lsei
))
4482 node
= lsei_cgraph_node (lsei
);
4483 if (node
->has_gimple_body_p ()
4484 && IPA_NODE_REF (node
) != NULL
)
4485 ipa_write_node_info (ob
, node
);
4487 streamer_write_char_stream (ob
->main_stream
, 0);
4488 produce_asm (ob
, NULL
);
4489 destroy_output_block (ob
);
4492 /* Read section in file FILE_DATA of length LEN with data DATA. */
4495 ipa_prop_read_section (struct lto_file_decl_data
*file_data
, const char *data
,
4498 const struct lto_function_header
*header
=
4499 (const struct lto_function_header
*) data
;
4500 const int cfg_offset
= sizeof (struct lto_function_header
);
4501 const int main_offset
= cfg_offset
+ header
->cfg_size
;
4502 const int string_offset
= main_offset
+ header
->main_size
;
4503 struct data_in
*data_in
;
4507 lto_input_block
ib_main ((const char *) data
+ main_offset
,
4508 header
->main_size
, file_data
->mode_table
);
4511 lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
4512 header
->string_size
, vNULL
);
4513 count
= streamer_read_uhwi (&ib_main
);
4515 for (i
= 0; i
< count
; i
++)
4518 struct cgraph_node
*node
;
4519 lto_symtab_encoder_t encoder
;
4521 index
= streamer_read_uhwi (&ib_main
);
4522 encoder
= file_data
->symtab_node_encoder
;
4523 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
4525 gcc_assert (node
->definition
);
4526 ipa_read_node_info (&ib_main
, node
, data_in
);
4528 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
4530 lto_data_in_delete (data_in
);
4533 /* Read ipcp jump functions. */
4536 ipa_prop_read_jump_functions (void)
4538 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
4539 struct lto_file_decl_data
*file_data
;
4542 ipa_check_create_node_params ();
4543 ipa_check_create_edge_args ();
4544 ipa_register_cgraph_hooks ();
4546 while ((file_data
= file_data_vec
[j
++]))
4549 const char *data
= lto_get_section_data (file_data
, LTO_section_jump_functions
, NULL
, &len
);
4552 ipa_prop_read_section (file_data
, data
, len
);
4557 write_ipcp_transformation_info (output_block
*ob
, cgraph_node
*node
)
4560 unsigned int count
= 0;
4561 lto_symtab_encoder_t encoder
;
4562 struct ipa_agg_replacement_value
*aggvals
, *av
;
4564 aggvals
= ipa_get_agg_replacements_for_node (node
);
4565 encoder
= ob
->decl_state
->symtab_node_encoder
;
4566 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4567 streamer_write_uhwi (ob
, node_ref
);
4569 for (av
= aggvals
; av
; av
= av
->next
)
4571 streamer_write_uhwi (ob
, count
);
4573 for (av
= aggvals
; av
; av
= av
->next
)
4575 struct bitpack_d bp
;
4577 streamer_write_uhwi (ob
, av
->offset
);
4578 streamer_write_uhwi (ob
, av
->index
);
4579 stream_write_tree (ob
, av
->value
, true);
4581 bp
= bitpack_create (ob
->main_stream
);
4582 bp_pack_value (&bp
, av
->by_ref
, 1);
4583 streamer_write_bitpack (&bp
);
4586 ipcp_transformation_summary
*ts
= ipcp_get_transformation_summary (node
);
4587 if (ts
&& vec_safe_length (ts
->m_vr
) > 0)
4589 count
= ts
->m_vr
->length ();
4590 streamer_write_uhwi (ob
, count
);
4591 for (unsigned i
= 0; i
< count
; ++i
)
4593 struct bitpack_d bp
;
4594 ipa_vr
*parm_vr
= &(*ts
->m_vr
)[i
];
4595 bp
= bitpack_create (ob
->main_stream
);
4596 bp_pack_value (&bp
, parm_vr
->known
, 1);
4597 streamer_write_bitpack (&bp
);
4600 streamer_write_enum (ob
->main_stream
, value_rang_type
,
4601 VR_LAST
, parm_vr
->type
);
4602 streamer_write_wide_int (ob
, parm_vr
->min
);
4603 streamer_write_wide_int (ob
, parm_vr
->max
);
4608 streamer_write_uhwi (ob
, 0);
4610 if (ts
&& vec_safe_length (ts
->bits
) > 0)
4612 count
= ts
->bits
->length ();
4613 streamer_write_uhwi (ob
, count
);
4615 for (unsigned i
= 0; i
< count
; ++i
)
4617 const ipa_bits
*bits_jfunc
= (*ts
->bits
)[i
];
4618 struct bitpack_d bp
= bitpack_create (ob
->main_stream
);
4619 bp_pack_value (&bp
, !!bits_jfunc
, 1);
4620 streamer_write_bitpack (&bp
);
4623 streamer_write_widest_int (ob
, bits_jfunc
->value
);
4624 streamer_write_widest_int (ob
, bits_jfunc
->mask
);
4629 streamer_write_uhwi (ob
, 0);
4632 /* Stream in the aggregate value replacement chain for NODE from IB. */
4635 read_ipcp_transformation_info (lto_input_block
*ib
, cgraph_node
*node
,
4638 struct ipa_agg_replacement_value
*aggvals
= NULL
;
4639 unsigned int count
, i
;
4641 count
= streamer_read_uhwi (ib
);
4642 for (i
= 0; i
<count
; i
++)
4644 struct ipa_agg_replacement_value
*av
;
4645 struct bitpack_d bp
;
4647 av
= ggc_alloc
<ipa_agg_replacement_value
> ();
4648 av
->offset
= streamer_read_uhwi (ib
);
4649 av
->index
= streamer_read_uhwi (ib
);
4650 av
->value
= stream_read_tree (ib
, data_in
);
4651 bp
= streamer_read_bitpack (ib
);
4652 av
->by_ref
= bp_unpack_value (&bp
, 1);
4656 ipa_set_node_agg_value_chain (node
, aggvals
);
4658 count
= streamer_read_uhwi (ib
);
4661 ipcp_grow_transformations_if_necessary ();
4663 ipcp_transformation_summary
*ts
= ipcp_get_transformation_summary (node
);
4664 vec_safe_grow_cleared (ts
->m_vr
, count
);
4665 for (i
= 0; i
< count
; i
++)
4668 parm_vr
= &(*ts
->m_vr
)[i
];
4669 struct bitpack_d bp
;
4670 bp
= streamer_read_bitpack (ib
);
4671 parm_vr
->known
= bp_unpack_value (&bp
, 1);
4674 parm_vr
->type
= streamer_read_enum (ib
, value_range_type
,
4676 parm_vr
->min
= streamer_read_wide_int (ib
);
4677 parm_vr
->max
= streamer_read_wide_int (ib
);
4681 count
= streamer_read_uhwi (ib
);
4684 ipcp_grow_transformations_if_necessary ();
4686 ipcp_transformation_summary
*ts
= ipcp_get_transformation_summary (node
);
4687 vec_safe_grow_cleared (ts
->bits
, count
);
4689 for (i
= 0; i
< count
; i
++)
4691 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4692 bool known
= bp_unpack_value (&bp
, 1);
4696 = ipa_get_ipa_bits_for_value (streamer_read_widest_int (ib
),
4697 streamer_read_widest_int (ib
));
4698 (*ts
->bits
)[i
] = bits
;
4704 /* Write all aggregate replacement for nodes in set. */
4707 ipcp_write_transformation_summaries (void)
4709 struct cgraph_node
*node
;
4710 struct output_block
*ob
;
4711 unsigned int count
= 0;
4712 lto_symtab_encoder_iterator lsei
;
4713 lto_symtab_encoder_t encoder
;
4715 ob
= create_output_block (LTO_section_ipcp_transform
);
4716 encoder
= ob
->decl_state
->symtab_node_encoder
;
4718 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4719 lsei_next_function_in_partition (&lsei
))
4721 node
= lsei_cgraph_node (lsei
);
4722 if (node
->has_gimple_body_p ())
4726 streamer_write_uhwi (ob
, count
);
4728 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4729 lsei_next_function_in_partition (&lsei
))
4731 node
= lsei_cgraph_node (lsei
);
4732 if (node
->has_gimple_body_p ())
4733 write_ipcp_transformation_info (ob
, node
);
4735 streamer_write_char_stream (ob
->main_stream
, 0);
4736 produce_asm (ob
, NULL
);
4737 destroy_output_block (ob
);
4740 /* Read replacements section in file FILE_DATA of length LEN with data
4744 read_replacements_section (struct lto_file_decl_data
*file_data
,
4748 const struct lto_function_header
*header
=
4749 (const struct lto_function_header
*) data
;
4750 const int cfg_offset
= sizeof (struct lto_function_header
);
4751 const int main_offset
= cfg_offset
+ header
->cfg_size
;
4752 const int string_offset
= main_offset
+ header
->main_size
;
4753 struct data_in
*data_in
;
4757 lto_input_block
ib_main ((const char *) data
+ main_offset
,
4758 header
->main_size
, file_data
->mode_table
);
4760 data_in
= lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
4761 header
->string_size
, vNULL
);
4762 count
= streamer_read_uhwi (&ib_main
);
4764 for (i
= 0; i
< count
; i
++)
4767 struct cgraph_node
*node
;
4768 lto_symtab_encoder_t encoder
;
4770 index
= streamer_read_uhwi (&ib_main
);
4771 encoder
= file_data
->symtab_node_encoder
;
4772 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
4774 gcc_assert (node
->definition
);
4775 read_ipcp_transformation_info (&ib_main
, node
, data_in
);
4777 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
4779 lto_data_in_delete (data_in
);
4782 /* Read IPA-CP aggregate replacements. */
4785 ipcp_read_transformation_summaries (void)
4787 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
4788 struct lto_file_decl_data
*file_data
;
4791 while ((file_data
= file_data_vec
[j
++]))
4794 const char *data
= lto_get_section_data (file_data
,
4795 LTO_section_ipcp_transform
,
4798 read_replacements_section (file_data
, data
, len
);
4802 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4806 adjust_agg_replacement_values (struct cgraph_node
*node
,
4807 struct ipa_agg_replacement_value
*aggval
)
4809 struct ipa_agg_replacement_value
*v
;
4810 int i
, c
= 0, d
= 0, *adj
;
4812 if (!node
->clone
.combined_args_to_skip
)
4815 for (v
= aggval
; v
; v
= v
->next
)
4817 gcc_assert (v
->index
>= 0);
4823 adj
= XALLOCAVEC (int, c
);
4824 for (i
= 0; i
< c
; i
++)
4825 if (bitmap_bit_p (node
->clone
.combined_args_to_skip
, i
))
4833 for (v
= aggval
; v
; v
= v
->next
)
4834 v
->index
= adj
[v
->index
];
4837 /* Dominator walker driving the ipcp modification phase. */
4839 class ipcp_modif_dom_walker
: public dom_walker
4842 ipcp_modif_dom_walker (struct ipa_func_body_info
*fbi
,
4843 vec
<ipa_param_descriptor
, va_gc
> *descs
,
4844 struct ipa_agg_replacement_value
*av
,
4846 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
), m_descriptors (descs
),
4847 m_aggval (av
), m_something_changed (sc
), m_cfg_changed (cc
) {}
4849 virtual edge
before_dom_children (basic_block
);
4852 struct ipa_func_body_info
*m_fbi
;
4853 vec
<ipa_param_descriptor
, va_gc
> *m_descriptors
;
4854 struct ipa_agg_replacement_value
*m_aggval
;
4855 bool *m_something_changed
, *m_cfg_changed
;
4859 ipcp_modif_dom_walker::before_dom_children (basic_block bb
)
4861 gimple_stmt_iterator gsi
;
4862 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4864 struct ipa_agg_replacement_value
*v
;
4865 gimple
*stmt
= gsi_stmt (gsi
);
4867 HOST_WIDE_INT offset
, size
;
4871 if (!gimple_assign_load_p (stmt
))
4873 rhs
= gimple_assign_rhs1 (stmt
);
4874 if (!is_gimple_reg_type (TREE_TYPE (rhs
)))
4879 while (handled_component_p (t
))
4881 /* V_C_E can do things like convert an array of integers to one
4882 bigger integer and similar things we do not handle below. */
4883 if (TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
)
4888 t
= TREE_OPERAND (t
, 0);
4893 if (!ipa_load_from_parm_agg (m_fbi
, m_descriptors
, stmt
, rhs
, &index
,
4894 &offset
, &size
, &by_ref
))
4896 for (v
= m_aggval
; v
; v
= v
->next
)
4897 if (v
->index
== index
4898 && v
->offset
== offset
)
4901 || v
->by_ref
!= by_ref
4902 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v
->value
))) != size
)
4905 gcc_checking_assert (is_gimple_ip_invariant (v
->value
));
4906 if (!useless_type_conversion_p (TREE_TYPE (rhs
), TREE_TYPE (v
->value
)))
4908 if (fold_convertible_p (TREE_TYPE (rhs
), v
->value
))
4909 val
= fold_build1 (NOP_EXPR
, TREE_TYPE (rhs
), v
->value
);
4910 else if (TYPE_SIZE (TREE_TYPE (rhs
))
4911 == TYPE_SIZE (TREE_TYPE (v
->value
)))
4912 val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (rhs
), v
->value
);
4917 fprintf (dump_file
, " const ");
4918 print_generic_expr (dump_file
, v
->value
);
4919 fprintf (dump_file
, " can't be converted to type of ");
4920 print_generic_expr (dump_file
, rhs
);
4921 fprintf (dump_file
, "\n");
4929 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4931 fprintf (dump_file
, "Modifying stmt:\n ");
4932 print_gimple_stmt (dump_file
, stmt
, 0);
4934 gimple_assign_set_rhs_from_tree (&gsi
, val
);
4937 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4939 fprintf (dump_file
, "into:\n ");
4940 print_gimple_stmt (dump_file
, stmt
, 0);
4941 fprintf (dump_file
, "\n");
4944 *m_something_changed
= true;
4945 if (maybe_clean_eh_stmt (stmt
)
4946 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
4947 *m_cfg_changed
= true;
4952 /* Update bits info of formal parameters as described in
4953 ipcp_transformation_summary. */
4956 ipcp_update_bits (struct cgraph_node
*node
)
4958 tree parm
= DECL_ARGUMENTS (node
->decl
);
4959 tree next_parm
= parm
;
4960 ipcp_transformation_summary
*ts
= ipcp_get_transformation_summary (node
);
4962 if (!ts
|| vec_safe_length (ts
->bits
) == 0)
4965 vec
<ipa_bits
*, va_gc
> &bits
= *ts
->bits
;
4966 unsigned count
= bits
.length ();
4968 for (unsigned i
= 0; i
< count
; ++i
, parm
= next_parm
)
4970 if (node
->clone
.combined_args_to_skip
4971 && bitmap_bit_p (node
->clone
.combined_args_to_skip
, i
))
4974 gcc_checking_assert (parm
);
4975 next_parm
= DECL_CHAIN (parm
);
4978 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm
))
4979 || POINTER_TYPE_P (TREE_TYPE (parm
)))
4980 || !is_gimple_reg (parm
))
4983 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
), parm
);
4989 fprintf (dump_file
, "Adjusting mask for param %u to ", i
);
4990 print_hex (bits
[i
]->mask
, dump_file
);
4991 fprintf (dump_file
, "\n");
4994 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef
)))
4996 unsigned prec
= TYPE_PRECISION (TREE_TYPE (ddef
));
4997 signop sgn
= TYPE_SIGN (TREE_TYPE (ddef
));
4999 wide_int nonzero_bits
= wide_int::from (bits
[i
]->mask
, prec
, UNSIGNED
)
5000 | wide_int::from (bits
[i
]->value
, prec
, sgn
);
5001 set_nonzero_bits (ddef
, nonzero_bits
);
5005 unsigned tem
= bits
[i
]->mask
.to_uhwi ();
5006 unsigned HOST_WIDE_INT bitpos
= bits
[i
]->value
.to_uhwi ();
5007 unsigned align
= tem
& -tem
;
5008 unsigned misalign
= bitpos
& (align
- 1);
5013 fprintf (dump_file
, "Adjusting align: %u, misalign: %u\n", align
, misalign
);
5015 unsigned old_align
, old_misalign
;
5016 struct ptr_info_def
*pi
= get_ptr_info (ddef
);
5017 bool old_known
= get_ptr_info_alignment (pi
, &old_align
, &old_misalign
);
5020 && old_align
> align
)
5024 fprintf (dump_file
, "But alignment was already %u.\n", old_align
);
5025 if ((old_misalign
& (align
- 1)) != misalign
)
5026 fprintf (dump_file
, "old_misalign (%u) and misalign (%u) mismatch\n",
5027 old_misalign
, misalign
);
5033 && ((misalign
& (old_align
- 1)) != old_misalign
)
5035 fprintf (dump_file
, "old_misalign (%u) and misalign (%u) mismatch\n",
5036 old_misalign
, misalign
);
5038 set_ptr_info_alignment (pi
, align
, misalign
);
5044 /* Update value range of formal parameters as described in
5045 ipcp_transformation_summary. */
5048 ipcp_update_vr (struct cgraph_node
*node
)
5050 tree fndecl
= node
->decl
;
5051 tree parm
= DECL_ARGUMENTS (fndecl
);
5052 tree next_parm
= parm
;
5053 ipcp_transformation_summary
*ts
= ipcp_get_transformation_summary (node
);
5054 if (!ts
|| vec_safe_length (ts
->m_vr
) == 0)
5056 const vec
<ipa_vr
, va_gc
> &vr
= *ts
->m_vr
;
5057 unsigned count
= vr
.length ();
5059 for (unsigned i
= 0; i
< count
; ++i
, parm
= next_parm
)
5061 if (node
->clone
.combined_args_to_skip
5062 && bitmap_bit_p (node
->clone
.combined_args_to_skip
, i
))
5064 gcc_checking_assert (parm
);
5065 next_parm
= DECL_CHAIN (parm
);
5066 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
), parm
);
5068 if (!ddef
|| !is_gimple_reg (parm
))
5072 && (vr
[i
].type
== VR_RANGE
|| vr
[i
].type
== VR_ANTI_RANGE
))
5074 tree type
= TREE_TYPE (ddef
);
5075 unsigned prec
= TYPE_PRECISION (type
);
5076 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef
)))
5080 fprintf (dump_file
, "Setting value range of param %u ", i
);
5081 fprintf (dump_file
, "%s[",
5082 (vr
[i
].type
== VR_ANTI_RANGE
) ? "~" : "");
5083 print_decs (vr
[i
].min
, dump_file
);
5084 fprintf (dump_file
, ", ");
5085 print_decs (vr
[i
].max
, dump_file
);
5086 fprintf (dump_file
, "]\n");
5088 set_range_info (ddef
, vr
[i
].type
,
5089 wide_int_storage::from (vr
[i
].min
, prec
,
5091 wide_int_storage::from (vr
[i
].max
, prec
,
5094 else if (POINTER_TYPE_P (TREE_TYPE (ddef
))
5095 && vr
[i
].type
== VR_ANTI_RANGE
5096 && wi::eq_p (vr
[i
].min
, 0)
5097 && wi::eq_p (vr
[i
].max
, 0))
5100 fprintf (dump_file
, "Setting nonnull for %u\n", i
);
5101 set_ptr_nonnull (ddef
);
5107 /* IPCP transformation phase doing propagation of aggregate values. */
5110 ipcp_transform_function (struct cgraph_node
*node
)
5112 vec
<ipa_param_descriptor
, va_gc
> *descriptors
= NULL
;
5113 struct ipa_func_body_info fbi
;
5114 struct ipa_agg_replacement_value
*aggval
;
5116 bool cfg_changed
= false, something_changed
= false;
5118 gcc_checking_assert (cfun
);
5119 gcc_checking_assert (current_function_decl
);
5122 fprintf (dump_file
, "Modification phase of node %s\n",
5123 node
->dump_name ());
5125 ipcp_update_bits (node
);
5126 ipcp_update_vr (node
);
5127 aggval
= ipa_get_agg_replacements_for_node (node
);
5130 param_count
= count_formal_params (node
->decl
);
5131 if (param_count
== 0)
5133 adjust_agg_replacement_values (node
, aggval
);
5135 ipa_dump_agg_replacement_values (dump_file
, aggval
);
5139 fbi
.bb_infos
= vNULL
;
5140 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
));
5141 fbi
.param_count
= param_count
;
5144 vec_safe_grow_cleared (descriptors
, param_count
);
5145 ipa_populate_param_decls (node
, *descriptors
);
5146 calculate_dominance_info (CDI_DOMINATORS
);
5147 ipcp_modif_dom_walker (&fbi
, descriptors
, aggval
, &something_changed
,
5148 &cfg_changed
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5151 struct ipa_bb_info
*bi
;
5152 FOR_EACH_VEC_ELT (fbi
.bb_infos
, i
, bi
)
5153 free_ipa_bb_info (bi
);
5154 fbi
.bb_infos
.release ();
5155 free_dominance_info (CDI_DOMINATORS
);
5156 (*ipcp_transformations
)[node
->uid
].agg_values
= NULL
;
5157 (*ipcp_transformations
)[node
->uid
].bits
= NULL
;
5158 (*ipcp_transformations
)[node
->uid
].m_vr
= NULL
;
5160 vec_free (descriptors
);
5162 if (!something_changed
)
5164 else if (cfg_changed
)
5165 return TODO_update_ssa_only_virtuals
| TODO_cleanup_cfg
;
5167 return TODO_update_ssa_only_virtuals
;
5170 #include "gt-ipa-prop.h"