1 /* Interprocedural analyses.
2 Copyright (C) 2005-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
30 #include "tree-streamer.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
51 #include "ipa-utils.h"
56 /* Function summary where the parameter infos are actually stored. */
57 ipa_node_params_t
*ipa_node_params_sum
= NULL
;
59 function_summary
<ipcp_transformation
*> *ipcp_transformation_sum
= NULL
;
61 /* Edge summary for IPA-CP edge information. */
62 ipa_edge_args_sum_t
*ipa_edge_args_sum
;
64 /* Traits for a hash table for reusing already existing ipa_bits. */
66 struct ipa_bit_ggc_hash_traits
: public ggc_cache_remove
<ipa_bits
*>
68 typedef ipa_bits
*value_type
;
69 typedef ipa_bits
*compare_type
;
71 hash (const ipa_bits
*p
)
73 hashval_t t
= (hashval_t
) p
->value
.to_shwi ();
74 return iterative_hash_host_wide_int (p
->mask
.to_shwi (), t
);
77 equal (const ipa_bits
*a
, const ipa_bits
*b
)
79 return a
->value
== b
->value
&& a
->mask
== b
->mask
;
82 mark_empty (ipa_bits
*&p
)
87 is_empty (const ipa_bits
*p
)
92 is_deleted (const ipa_bits
*p
)
94 return p
== reinterpret_cast<const ipa_bits
*> (1);
97 mark_deleted (ipa_bits
*&p
)
99 p
= reinterpret_cast<ipa_bits
*> (1);
103 /* Hash table for avoid repeated allocations of equal ipa_bits. */
104 static GTY ((cache
)) hash_table
<ipa_bit_ggc_hash_traits
> *ipa_bits_hash_table
;
106 /* Traits for a hash table for reusing value_ranges used for IPA. Note that
107 the equiv bitmap is not hashed and is expected to be NULL. */
109 struct ipa_vr_ggc_hash_traits
: public ggc_cache_remove
<value_range
*>
111 typedef value_range
*value_type
;
112 typedef value_range
*compare_type
;
114 hash (const value_range
*p
)
116 gcc_checking_assert (!p
->equiv
);
117 inchash::hash
hstate (p
->type
);
118 hstate
.add_ptr (p
->min
);
119 hstate
.add_ptr (p
->max
);
120 return hstate
.end ();
123 equal (const value_range
*a
, const value_range
*b
)
125 return a
->type
== b
->type
&& a
->min
== b
->min
&& a
->max
== b
->max
;
128 mark_empty (value_range
*&p
)
133 is_empty (const value_range
*p
)
138 is_deleted (const value_range
*p
)
140 return p
== reinterpret_cast<const value_range
*> (1);
143 mark_deleted (value_range
*&p
)
145 p
= reinterpret_cast<value_range
*> (1);
149 /* Hash table for avoid repeated allocations of equal value_ranges. */
150 static GTY ((cache
)) hash_table
<ipa_vr_ggc_hash_traits
> *ipa_vr_hash_table
;
152 /* Holders of ipa cgraph hooks: */
153 static struct cgraph_node_hook_list
*function_insertion_hook_holder
;
155 /* Description of a reference to an IPA constant. */
156 struct ipa_cst_ref_desc
158 /* Edge that corresponds to the statement which took the reference. */
159 struct cgraph_edge
*cs
;
160 /* Linked list of duplicates created when call graph edges are cloned. */
161 struct ipa_cst_ref_desc
*next_duplicate
;
162 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
163 if out of control. */
167 /* Allocation pool for reference descriptions. */
169 static object_allocator
<ipa_cst_ref_desc
> ipa_refdesc_pool
170 ("IPA-PROP ref descriptions");
172 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
173 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
176 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node
*node
)
178 tree fs_opts
= DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node
->decl
);
182 return !opt_for_fn (node
->decl
, optimize
) || !opt_for_fn (node
->decl
, flag_ipa_cp
);
185 /* Return index of the formal whose tree is PTREE in function which corresponds
189 ipa_get_param_decl_index_1 (vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
194 count
= vec_safe_length (descriptors
);
195 for (i
= 0; i
< count
; i
++)
196 if ((*descriptors
)[i
].decl_or_type
== ptree
)
202 /* Return index of the formal whose tree is PTREE in function which corresponds
206 ipa_get_param_decl_index (struct ipa_node_params
*info
, tree ptree
)
208 return ipa_get_param_decl_index_1 (info
->descriptors
, ptree
);
211 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
215 ipa_populate_param_decls (struct cgraph_node
*node
,
216 vec
<ipa_param_descriptor
, va_gc
> &descriptors
)
224 gcc_assert (gimple_has_body_p (fndecl
));
225 fnargs
= DECL_ARGUMENTS (fndecl
);
227 for (parm
= fnargs
; parm
; parm
= DECL_CHAIN (parm
))
229 descriptors
[param_num
].decl_or_type
= parm
;
230 descriptors
[param_num
].move_cost
= estimate_move_cost (TREE_TYPE (parm
),
236 /* Return how many formal parameters FNDECL has. */
239 count_formal_params (tree fndecl
)
243 gcc_assert (gimple_has_body_p (fndecl
));
245 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
251 /* Return the declaration of Ith formal parameter of the function corresponding
252 to INFO. Note there is no setter function as this array is built just once
253 using ipa_initialize_node_params. */
256 ipa_dump_param (FILE *file
, struct ipa_node_params
*info
, int i
)
258 fprintf (file
, "param #%i", i
);
259 if ((*info
->descriptors
)[i
].decl_or_type
)
262 print_generic_expr (file
, (*info
->descriptors
)[i
].decl_or_type
);
266 /* If necessary, allocate vector of parameter descriptors in info of NODE.
267 Return true if they were allocated, false if not. */
270 ipa_alloc_node_params (struct cgraph_node
*node
, int param_count
)
272 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
274 if (!info
->descriptors
&& param_count
)
276 vec_safe_grow_cleared (info
->descriptors
, param_count
);
283 /* Initialize the ipa_node_params structure associated with NODE by counting
284 the function parameters, creating the descriptors and populating their
288 ipa_initialize_node_params (struct cgraph_node
*node
)
290 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
292 if (!info
->descriptors
293 && ipa_alloc_node_params (node
, count_formal_params (node
->decl
)))
294 ipa_populate_param_decls (node
, *info
->descriptors
);
297 /* Print the jump functions associated with call graph edge CS to file F. */
300 ipa_print_node_jump_functions_for_edge (FILE *f
, struct cgraph_edge
*cs
)
304 count
= ipa_get_cs_argument_count (IPA_EDGE_REF (cs
));
305 for (i
= 0; i
< count
; i
++)
307 struct ipa_jump_func
*jump_func
;
308 enum jump_func_type type
;
310 jump_func
= ipa_get_ith_jump_func (IPA_EDGE_REF (cs
), i
);
311 type
= jump_func
->type
;
313 fprintf (f
, " param %d: ", i
);
314 if (type
== IPA_JF_UNKNOWN
)
315 fprintf (f
, "UNKNOWN\n");
316 else if (type
== IPA_JF_CONST
)
318 tree val
= jump_func
->value
.constant
.value
;
319 fprintf (f
, "CONST: ");
320 print_generic_expr (f
, val
);
321 if (TREE_CODE (val
) == ADDR_EXPR
322 && TREE_CODE (TREE_OPERAND (val
, 0)) == CONST_DECL
)
325 print_generic_expr (f
, DECL_INITIAL (TREE_OPERAND (val
, 0)));
329 else if (type
== IPA_JF_PASS_THROUGH
)
331 fprintf (f
, "PASS THROUGH: ");
332 fprintf (f
, "%d, op %s",
333 jump_func
->value
.pass_through
.formal_id
,
334 get_tree_code_name(jump_func
->value
.pass_through
.operation
));
335 if (jump_func
->value
.pass_through
.operation
!= NOP_EXPR
)
338 print_generic_expr (f
, jump_func
->value
.pass_through
.operand
);
340 if (jump_func
->value
.pass_through
.agg_preserved
)
341 fprintf (f
, ", agg_preserved");
344 else if (type
== IPA_JF_ANCESTOR
)
346 fprintf (f
, "ANCESTOR: ");
347 fprintf (f
, "%d, offset " HOST_WIDE_INT_PRINT_DEC
,
348 jump_func
->value
.ancestor
.formal_id
,
349 jump_func
->value
.ancestor
.offset
);
350 if (jump_func
->value
.ancestor
.agg_preserved
)
351 fprintf (f
, ", agg_preserved");
355 if (jump_func
->agg
.items
)
357 struct ipa_agg_jf_item
*item
;
360 fprintf (f
, " Aggregate passed by %s:\n",
361 jump_func
->agg
.by_ref
? "reference" : "value");
362 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, j
, item
)
364 fprintf (f
, " offset: " HOST_WIDE_INT_PRINT_DEC
", ",
366 if (TYPE_P (item
->value
))
367 fprintf (f
, "clobber of " HOST_WIDE_INT_PRINT_DEC
" bits",
368 tree_to_uhwi (TYPE_SIZE (item
->value
)));
371 fprintf (f
, "cst: ");
372 print_generic_expr (f
, item
->value
);
378 struct ipa_polymorphic_call_context
*ctx
379 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs
), i
);
380 if (ctx
&& !ctx
->useless_p ())
382 fprintf (f
, " Context: ");
383 ctx
->dump (dump_file
);
388 fprintf (f
, " value: ");
389 print_hex (jump_func
->bits
->value
, f
);
390 fprintf (f
, ", mask: ");
391 print_hex (jump_func
->bits
->mask
, f
);
395 fprintf (f
, " Unknown bits\n");
401 (jump_func
->m_vr
->type
== VR_ANTI_RANGE
) ? "~" : "");
402 print_decs (wi::to_wide (jump_func
->m_vr
->min
), f
);
404 print_decs (wi::to_wide (jump_func
->m_vr
->max
), f
);
408 fprintf (f
, " Unknown VR\n");
413 /* Print the jump functions of all arguments on all call graph edges going from
417 ipa_print_node_jump_functions (FILE *f
, struct cgraph_node
*node
)
419 struct cgraph_edge
*cs
;
421 fprintf (f
, " Jump functions of caller %s:\n", node
->dump_name ());
422 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
424 if (!ipa_edge_args_info_available_for_edge_p (cs
))
427 fprintf (f
, " callsite %s -> %s : \n",
429 cs
->callee
->dump_name ());
430 ipa_print_node_jump_functions_for_edge (f
, cs
);
433 for (cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
435 struct cgraph_indirect_call_info
*ii
;
436 if (!ipa_edge_args_info_available_for_edge_p (cs
))
439 ii
= cs
->indirect_info
;
440 if (ii
->agg_contents
)
441 fprintf (f
, " indirect %s callsite, calling param %i, "
442 "offset " HOST_WIDE_INT_PRINT_DEC
", %s",
443 ii
->member_ptr
? "member ptr" : "aggregate",
444 ii
->param_index
, ii
->offset
,
445 ii
->by_ref
? "by reference" : "by_value");
447 fprintf (f
, " indirect %s callsite, calling param %i, "
448 "offset " HOST_WIDE_INT_PRINT_DEC
,
449 ii
->polymorphic
? "polymorphic" : "simple", ii
->param_index
,
454 fprintf (f
, ", for stmt ");
455 print_gimple_stmt (f
, cs
->call_stmt
, 0, TDF_SLIM
);
460 ii
->context
.dump (f
);
461 ipa_print_node_jump_functions_for_edge (f
, cs
);
465 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
468 ipa_print_all_jump_functions (FILE *f
)
470 struct cgraph_node
*node
;
472 fprintf (f
, "\nJump functions:\n");
473 FOR_EACH_FUNCTION (node
)
475 ipa_print_node_jump_functions (f
, node
);
479 /* Set jfunc to be a know-really nothing jump function. */
482 ipa_set_jf_unknown (struct ipa_jump_func
*jfunc
)
484 jfunc
->type
= IPA_JF_UNKNOWN
;
489 /* Set JFUNC to be a copy of another jmp (to be used by jump function
490 combination code). The two functions will share their rdesc. */
493 ipa_set_jf_cst_copy (struct ipa_jump_func
*dst
,
494 struct ipa_jump_func
*src
)
497 gcc_checking_assert (src
->type
== IPA_JF_CONST
);
498 dst
->type
= IPA_JF_CONST
;
499 dst
->value
.constant
= src
->value
.constant
;
502 /* Set JFUNC to be a constant jmp function. */
505 ipa_set_jf_constant (struct ipa_jump_func
*jfunc
, tree constant
,
506 struct cgraph_edge
*cs
)
508 jfunc
->type
= IPA_JF_CONST
;
509 jfunc
->value
.constant
.value
= unshare_expr_without_location (constant
);
511 if (TREE_CODE (constant
) == ADDR_EXPR
512 && TREE_CODE (TREE_OPERAND (constant
, 0)) == FUNCTION_DECL
)
514 struct ipa_cst_ref_desc
*rdesc
;
516 rdesc
= ipa_refdesc_pool
.allocate ();
518 rdesc
->next_duplicate
= NULL
;
520 jfunc
->value
.constant
.rdesc
= rdesc
;
523 jfunc
->value
.constant
.rdesc
= NULL
;
526 /* Set JFUNC to be a simple pass-through jump function. */
528 ipa_set_jf_simple_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
531 jfunc
->type
= IPA_JF_PASS_THROUGH
;
532 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
533 jfunc
->value
.pass_through
.formal_id
= formal_id
;
534 jfunc
->value
.pass_through
.operation
= NOP_EXPR
;
535 jfunc
->value
.pass_through
.agg_preserved
= agg_preserved
;
538 /* Set JFUNC to be an unary pass through jump function. */
541 ipa_set_jf_unary_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
542 enum tree_code operation
)
544 jfunc
->type
= IPA_JF_PASS_THROUGH
;
545 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
546 jfunc
->value
.pass_through
.formal_id
= formal_id
;
547 jfunc
->value
.pass_through
.operation
= operation
;
548 jfunc
->value
.pass_through
.agg_preserved
= false;
550 /* Set JFUNC to be an arithmetic pass through jump function. */
553 ipa_set_jf_arith_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
554 tree operand
, enum tree_code operation
)
556 jfunc
->type
= IPA_JF_PASS_THROUGH
;
557 jfunc
->value
.pass_through
.operand
= unshare_expr_without_location (operand
);
558 jfunc
->value
.pass_through
.formal_id
= formal_id
;
559 jfunc
->value
.pass_through
.operation
= operation
;
560 jfunc
->value
.pass_through
.agg_preserved
= false;
563 /* Set JFUNC to be an ancestor jump function. */
566 ipa_set_ancestor_jf (struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
,
567 int formal_id
, bool agg_preserved
)
569 jfunc
->type
= IPA_JF_ANCESTOR
;
570 jfunc
->value
.ancestor
.formal_id
= formal_id
;
571 jfunc
->value
.ancestor
.offset
= offset
;
572 jfunc
->value
.ancestor
.agg_preserved
= agg_preserved
;
575 /* Get IPA BB information about the given BB. FBI is the context of analyzis
576 of this function body. */
578 static struct ipa_bb_info
*
579 ipa_get_bb_info (struct ipa_func_body_info
*fbi
, basic_block bb
)
581 gcc_checking_assert (fbi
);
582 return &fbi
->bb_infos
[bb
->index
];
585 /* Structure to be passed in between detect_type_change and
586 check_stmt_for_type_change. */
588 struct prop_type_change_info
590 /* Offset into the object where there is the virtual method pointer we are
592 HOST_WIDE_INT offset
;
593 /* The declaration or SSA_NAME pointer of the base that we are checking for
596 /* Set to true if dynamic type change has been detected. */
597 bool type_maybe_changed
;
600 /* Return true if STMT can modify a virtual method table pointer.
602 This function makes special assumptions about both constructors and
603 destructors which are all the functions that are allowed to alter the VMT
604 pointers. It assumes that destructors begin with assignment into all VMT
605 pointers and that constructors essentially look in the following way:
607 1) The very first thing they do is that they call constructors of ancestor
608 sub-objects that have them.
610 2) Then VMT pointers of this and all its ancestors is set to new values
611 corresponding to the type corresponding to the constructor.
613 3) Only afterwards, other stuff such as constructor of member sub-objects
614 and the code written by the user is run. Only this may include calling
615 virtual functions, directly or indirectly.
617 There is no way to call a constructor of an ancestor sub-object in any
620 This means that we do not have to care whether constructors get the correct
621 type information because they will always change it (in fact, if we define
622 the type to be given by the VMT pointer, it is undefined).
624 The most important fact to derive from the above is that if, for some
625 statement in the section 3, we try to detect whether the dynamic type has
626 changed, we can safely ignore all calls as we examine the function body
627 backwards until we reach statements in section 2 because these calls cannot
628 be ancestor constructors or destructors (if the input is not bogus) and so
629 do not change the dynamic type (this holds true only for automatically
630 allocated objects but at the moment we devirtualize only these). We then
631 must detect that statements in section 2 change the dynamic type and can try
632 to derive the new type. That is enough and we can stop, we will never see
633 the calls into constructors of sub-objects in this code. Therefore we can
634 safely ignore all call statements that we traverse.
638 stmt_may_be_vtbl_ptr_store (gimple
*stmt
)
640 if (is_gimple_call (stmt
))
642 if (gimple_clobber_p (stmt
))
644 else if (is_gimple_assign (stmt
))
646 tree lhs
= gimple_assign_lhs (stmt
);
648 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs
)))
650 if (flag_strict_aliasing
651 && !POINTER_TYPE_P (TREE_TYPE (lhs
)))
654 if (TREE_CODE (lhs
) == COMPONENT_REF
655 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
657 /* In the future we might want to use get_ref_base_and_extent to find
658 if there is a field corresponding to the offset and if so, proceed
659 almost like if it was a component ref. */
665 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
666 to check whether a particular statement may modify the virtual table
667 pointerIt stores its result into DATA, which points to a
668 prop_type_change_info structure. */
671 check_stmt_for_type_change (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef
, void *data
)
673 gimple
*stmt
= SSA_NAME_DEF_STMT (vdef
);
674 struct prop_type_change_info
*tci
= (struct prop_type_change_info
*) data
;
676 if (stmt_may_be_vtbl_ptr_store (stmt
))
678 tci
->type_maybe_changed
= true;
685 /* See if ARG is PARAM_DECl describing instance passed by pointer
686 or reference in FUNCTION. Return false if the dynamic type may change
687 in between beggining of the function until CALL is invoked.
689 Generally functions are not allowed to change type of such instances,
690 but they call destructors. We assume that methods can not destroy the THIS
691 pointer. Also as a special cases, constructor and destructors may change
692 type of the THIS pointer. */
695 param_type_may_change_p (tree function
, tree arg
, gimple
*call
)
697 /* Pure functions can not do any changes on the dynamic type;
698 that require writting to memory. */
699 if (flags_from_decl_or_type (function
) & (ECF_PURE
| ECF_CONST
))
701 /* We need to check if we are within inlined consturctor
702 or destructor (ideally we would have way to check that the
703 inline cdtor is actually working on ARG, but we don't have
704 easy tie on this, so punt on all non-pure cdtors.
705 We may also record the types of cdtors and once we know type
706 of the instance match them.
708 Also code unification optimizations may merge calls from
709 different blocks making return values unreliable. So
710 do nothing during late optimization. */
711 if (DECL_STRUCT_FUNCTION (function
)->after_inlining
)
713 if (TREE_CODE (arg
) == SSA_NAME
714 && SSA_NAME_IS_DEFAULT_DEF (arg
)
715 && TREE_CODE (SSA_NAME_VAR (arg
)) == PARM_DECL
)
717 /* Normal (non-THIS) argument. */
718 if ((SSA_NAME_VAR (arg
) != DECL_ARGUMENTS (function
)
719 || TREE_CODE (TREE_TYPE (function
)) != METHOD_TYPE
)
720 /* THIS pointer of an method - here we want to watch constructors
721 and destructors as those definitely may change the dynamic
723 || (TREE_CODE (TREE_TYPE (function
)) == METHOD_TYPE
724 && !DECL_CXX_CONSTRUCTOR_P (function
)
725 && !DECL_CXX_DESTRUCTOR_P (function
)
726 && (SSA_NAME_VAR (arg
) == DECL_ARGUMENTS (function
))))
728 /* Walk the inline stack and watch out for ctors/dtors. */
729 for (tree block
= gimple_block (call
); block
&& TREE_CODE (block
) == BLOCK
;
730 block
= BLOCK_SUPERCONTEXT (block
))
731 if (inlined_polymorphic_ctor_dtor_block_p (block
, false))
739 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
740 callsite CALL) by looking for assignments to its virtual table pointer. If
741 it is, return true and fill in the jump function JFUNC with relevant type
742 information or set it to unknown. ARG is the object itself (not a pointer
743 to it, unless dereferenced). BASE is the base of the memory access as
744 returned by get_ref_base_and_extent, as is the offset.
746 This is helper function for detect_type_change and detect_type_change_ssa
747 that does the heavy work which is usually unnecesary. */
750 detect_type_change_from_memory_writes (tree arg
, tree base
, tree comp_type
,
751 gcall
*call
, struct ipa_jump_func
*jfunc
,
752 HOST_WIDE_INT offset
)
754 struct prop_type_change_info tci
;
756 bool entry_reached
= false;
758 gcc_checking_assert (DECL_P (arg
)
759 || TREE_CODE (arg
) == MEM_REF
760 || handled_component_p (arg
));
762 comp_type
= TYPE_MAIN_VARIANT (comp_type
);
764 /* Const calls cannot call virtual methods through VMT and so type changes do
766 if (!flag_devirtualize
|| !gimple_vuse (call
)
767 /* Be sure expected_type is polymorphic. */
769 || TREE_CODE (comp_type
) != RECORD_TYPE
770 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))
771 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))))
774 ao_ref_init (&ao
, arg
);
777 ao
.size
= POINTER_SIZE
;
778 ao
.max_size
= ao
.size
;
781 tci
.object
= get_base_address (arg
);
782 tci
.type_maybe_changed
= false;
784 walk_aliased_vdefs (&ao
, gimple_vuse (call
), check_stmt_for_type_change
,
785 &tci
, NULL
, &entry_reached
);
786 if (!tci
.type_maybe_changed
)
789 ipa_set_jf_unknown (jfunc
);
793 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
794 If it is, return true and fill in the jump function JFUNC with relevant type
795 information or set it to unknown. ARG is the object itself (not a pointer
796 to it, unless dereferenced). BASE is the base of the memory access as
797 returned by get_ref_base_and_extent, as is the offset. */
800 detect_type_change (tree arg
, tree base
, tree comp_type
, gcall
*call
,
801 struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
)
803 if (!flag_devirtualize
)
806 if (TREE_CODE (base
) == MEM_REF
807 && !param_type_may_change_p (current_function_decl
,
808 TREE_OPERAND (base
, 0),
811 return detect_type_change_from_memory_writes (arg
, base
, comp_type
,
812 call
, jfunc
, offset
);
815 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
816 SSA name (its dereference will become the base and the offset is assumed to
820 detect_type_change_ssa (tree arg
, tree comp_type
,
821 gcall
*call
, struct ipa_jump_func
*jfunc
)
823 gcc_checking_assert (TREE_CODE (arg
) == SSA_NAME
);
824 if (!flag_devirtualize
825 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
828 if (!param_type_may_change_p (current_function_decl
, arg
, call
))
831 arg
= build2 (MEM_REF
, ptr_type_node
, arg
,
832 build_int_cst (ptr_type_node
, 0));
834 return detect_type_change_from_memory_writes (arg
, arg
, comp_type
,
838 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
839 boolean variable pointed to by DATA. */
842 mark_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
845 bool *b
= (bool *) data
;
850 /* Return true if we have already walked so many statements in AA that we
851 should really just start giving up. */
854 aa_overwalked (struct ipa_func_body_info
*fbi
)
856 gcc_checking_assert (fbi
);
857 return fbi
->aa_walked
> (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS
);
860 /* Find the nearest valid aa status for parameter specified by INDEX that
863 static struct ipa_param_aa_status
*
864 find_dominating_aa_status (struct ipa_func_body_info
*fbi
, basic_block bb
,
869 bb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
872 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
873 if (!bi
->param_aa_statuses
.is_empty ()
874 && bi
->param_aa_statuses
[index
].valid
)
875 return &bi
->param_aa_statuses
[index
];
879 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
880 structures and/or intialize the result with a dominating description as
883 static struct ipa_param_aa_status
*
884 parm_bb_aa_status_for_bb (struct ipa_func_body_info
*fbi
, basic_block bb
,
887 gcc_checking_assert (fbi
);
888 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
889 if (bi
->param_aa_statuses
.is_empty ())
890 bi
->param_aa_statuses
.safe_grow_cleared (fbi
->param_count
);
891 struct ipa_param_aa_status
*paa
= &bi
->param_aa_statuses
[index
];
894 gcc_checking_assert (!paa
->parm_modified
895 && !paa
->ref_modified
896 && !paa
->pt_modified
);
897 struct ipa_param_aa_status
*dom_paa
;
898 dom_paa
= find_dominating_aa_status (fbi
, bb
, index
);
908 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
909 a value known not to be modified in this function before reaching the
910 statement STMT. FBI holds information about the function we have so far
911 gathered but do not survive the summary building stage. */
914 parm_preserved_before_stmt_p (struct ipa_func_body_info
*fbi
, int index
,
915 gimple
*stmt
, tree parm_load
)
917 struct ipa_param_aa_status
*paa
;
918 bool modified
= false;
921 tree base
= get_base_address (parm_load
);
922 gcc_assert (TREE_CODE (base
) == PARM_DECL
);
923 if (TREE_READONLY (base
))
926 /* FIXME: FBI can be NULL if we are being called from outside
927 ipa_node_analysis or ipcp_transform_function, which currently happens
928 during inlining analysis. It would be great to extend fbi's lifetime and
929 always have it. Currently, we are just not afraid of too much walking in
933 if (aa_overwalked (fbi
))
935 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
936 if (paa
->parm_modified
)
942 gcc_checking_assert (gimple_vuse (stmt
) != NULL_TREE
);
943 ao_ref_init (&refd
, parm_load
);
944 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
947 fbi
->aa_walked
+= walked
;
949 paa
->parm_modified
= true;
953 /* If STMT is an assignment that loads a value from an parameter declaration,
954 return the index of the parameter in ipa_node_params which has not been
955 modified. Otherwise return -1. */
958 load_from_unmodified_param (struct ipa_func_body_info
*fbi
,
959 vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
965 if (!gimple_assign_single_p (stmt
))
968 op1
= gimple_assign_rhs1 (stmt
);
969 if (TREE_CODE (op1
) != PARM_DECL
)
972 index
= ipa_get_param_decl_index_1 (descriptors
, op1
);
974 || !parm_preserved_before_stmt_p (fbi
, index
, stmt
, op1
))
980 /* Return true if memory reference REF (which must be a load through parameter
981 with INDEX) loads data that are known to be unmodified in this function
982 before reaching statement STMT. */
985 parm_ref_data_preserved_p (struct ipa_func_body_info
*fbi
,
986 int index
, gimple
*stmt
, tree ref
)
988 struct ipa_param_aa_status
*paa
;
989 bool modified
= false;
992 /* FIXME: FBI can be NULL if we are being called from outside
993 ipa_node_analysis or ipcp_transform_function, which currently happens
994 during inlining analysis. It would be great to extend fbi's lifetime and
995 always have it. Currently, we are just not afraid of too much walking in
999 if (aa_overwalked (fbi
))
1001 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
1002 if (paa
->ref_modified
)
1008 gcc_checking_assert (gimple_vuse (stmt
));
1009 ao_ref_init (&refd
, ref
);
1010 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
1013 fbi
->aa_walked
+= walked
;
1014 if (paa
&& modified
)
1015 paa
->ref_modified
= true;
1019 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1020 is known to be unmodified in this function before reaching call statement
1021 CALL into which it is passed. FBI describes the function body. */
1024 parm_ref_data_pass_through_p (struct ipa_func_body_info
*fbi
, int index
,
1025 gimple
*call
, tree parm
)
1027 bool modified
= false;
1030 /* It's unnecessary to calculate anything about memory contnets for a const
1031 function because it is not goin to use it. But do not cache the result
1032 either. Also, no such calculations for non-pointers. */
1033 if (!gimple_vuse (call
)
1034 || !POINTER_TYPE_P (TREE_TYPE (parm
))
1035 || aa_overwalked (fbi
))
1038 struct ipa_param_aa_status
*paa
= parm_bb_aa_status_for_bb (fbi
,
1041 if (paa
->pt_modified
)
1044 ao_ref_init_from_ptr_and_size (&refd
, parm
, NULL_TREE
);
1045 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (call
), mark_modified
,
1047 fbi
->aa_walked
+= walked
;
1049 paa
->pt_modified
= true;
1053 /* Return true if we can prove that OP is a memory reference loading
1054 data from an aggregate passed as a parameter.
1056 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1057 false if it cannot prove that the value has not been modified before the
1058 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1059 if it cannot prove the value has not been modified, in that case it will
1060 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1062 INFO and PARMS_AINFO describe parameters of the current function (but the
1063 latter can be NULL), STMT is the load statement. If function returns true,
1064 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1065 within the aggregate and whether it is a load from a value passed by
1066 reference respectively. */
1069 ipa_load_from_parm_agg (struct ipa_func_body_info
*fbi
,
1070 vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
1071 gimple
*stmt
, tree op
, int *index_p
,
1072 HOST_WIDE_INT
*offset_p
, HOST_WIDE_INT
*size_p
,
1073 bool *by_ref_p
, bool *guaranteed_unmodified
)
1078 tree base
= get_ref_base_and_extent_hwi (op
, offset_p
, &size
, &reverse
);
1085 int index
= ipa_get_param_decl_index_1 (descriptors
, base
);
1087 && parm_preserved_before_stmt_p (fbi
, index
, stmt
, op
))
1093 if (guaranteed_unmodified
)
1094 *guaranteed_unmodified
= true;
1100 if (TREE_CODE (base
) != MEM_REF
1101 || TREE_CODE (TREE_OPERAND (base
, 0)) != SSA_NAME
1102 || !integer_zerop (TREE_OPERAND (base
, 1)))
1105 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base
, 0)))
1107 tree parm
= SSA_NAME_VAR (TREE_OPERAND (base
, 0));
1108 index
= ipa_get_param_decl_index_1 (descriptors
, parm
);
1112 /* This branch catches situations where a pointer parameter is not a
1113 gimple register, for example:
1115 void hip7(S*) (struct S * p)
1117 void (*<T2e4>) (struct S *) D.1867;
1122 D.1867_2 = p.1_1->f;
1127 gimple
*def
= SSA_NAME_DEF_STMT (TREE_OPERAND (base
, 0));
1128 index
= load_from_unmodified_param (fbi
, descriptors
, def
);
1133 bool data_preserved
= parm_ref_data_preserved_p (fbi
, index
, stmt
, op
);
1134 if (!data_preserved
&& !guaranteed_unmodified
)
1141 if (guaranteed_unmodified
)
1142 *guaranteed_unmodified
= data_preserved
;
1148 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1149 of an assignment statement STMT, try to determine whether we are actually
1150 handling any of the following cases and construct an appropriate jump
1151 function into JFUNC if so:
1153 1) The passed value is loaded from a formal parameter which is not a gimple
1154 register (most probably because it is addressable, the value has to be
1155 scalar) and we can guarantee the value has not changed. This case can
1156 therefore be described by a simple pass-through jump function. For example:
1165 2) The passed value can be described by a simple arithmetic pass-through
1172 D.2064_4 = a.1(D) + 4;
1175 This case can also occur in combination of the previous one, e.g.:
1183 D.2064_4 = a.0_3 + 4;
1186 3) The passed value is an address of an object within another one (which
1187 also passed by reference). Such situations are described by an ancestor
1188 jump function and describe situations such as:
1190 B::foo() (struct B * const this)
1194 D.1845_2 = &this_1(D)->D.1748;
1197 INFO is the structure describing individual parameters access different
1198 stages of IPA optimizations. PARMS_AINFO contains the information that is
1199 only needed for intraprocedural analysis. */
1202 compute_complex_assign_jump_func (struct ipa_func_body_info
*fbi
,
1203 struct ipa_node_params
*info
,
1204 struct ipa_jump_func
*jfunc
,
1205 gcall
*call
, gimple
*stmt
, tree name
,
1208 HOST_WIDE_INT offset
, size
;
1209 tree op1
, tc_ssa
, base
, ssa
;
1213 op1
= gimple_assign_rhs1 (stmt
);
1215 if (TREE_CODE (op1
) == SSA_NAME
)
1217 if (SSA_NAME_IS_DEFAULT_DEF (op1
))
1218 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (op1
));
1220 index
= load_from_unmodified_param (fbi
, info
->descriptors
,
1221 SSA_NAME_DEF_STMT (op1
));
1226 index
= load_from_unmodified_param (fbi
, info
->descriptors
, stmt
);
1227 tc_ssa
= gimple_assign_lhs (stmt
);
1232 switch (gimple_assign_rhs_class (stmt
))
1234 case GIMPLE_BINARY_RHS
:
1236 tree op2
= gimple_assign_rhs2 (stmt
);
1237 if (!is_gimple_ip_invariant (op2
)
1238 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt
))
1240 && !useless_type_conversion_p (TREE_TYPE (name
),
1244 ipa_set_jf_arith_pass_through (jfunc
, index
, op2
,
1245 gimple_assign_rhs_code (stmt
));
1248 case GIMPLE_SINGLE_RHS
:
1250 bool agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
,
1252 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1255 case GIMPLE_UNARY_RHS
:
1256 if (is_gimple_assign (stmt
)
1257 && gimple_assign_rhs_class (stmt
) == GIMPLE_UNARY_RHS
1258 && ! CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
)))
1259 ipa_set_jf_unary_pass_through (jfunc
, index
,
1260 gimple_assign_rhs_code (stmt
));
1266 if (TREE_CODE (op1
) != ADDR_EXPR
)
1268 op1
= TREE_OPERAND (op1
, 0);
1269 if (TREE_CODE (TREE_TYPE (op1
)) != RECORD_TYPE
)
1271 base
= get_ref_base_and_extent_hwi (op1
, &offset
, &size
, &reverse
);
1272 offset_int mem_offset
;
1274 || TREE_CODE (base
) != MEM_REF
1275 || !mem_ref_offset (base
).is_constant (&mem_offset
))
1277 offset
+= mem_offset
.to_short_addr () * BITS_PER_UNIT
;
1278 ssa
= TREE_OPERAND (base
, 0);
1279 if (TREE_CODE (ssa
) != SSA_NAME
1280 || !SSA_NAME_IS_DEFAULT_DEF (ssa
)
1284 /* Dynamic types are changed in constructors and destructors. */
1285 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (ssa
));
1286 if (index
>= 0 && param_type
&& POINTER_TYPE_P (param_type
))
1287 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1288 parm_ref_data_pass_through_p (fbi
, index
, call
, ssa
));
1291 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1294 iftmp.1_3 = &obj_2(D)->D.1762;
1296 The base of the MEM_REF must be a default definition SSA NAME of a
1297 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1298 whole MEM_REF expression is returned and the offset calculated from any
1299 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1300 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1303 get_ancestor_addr_info (gimple
*assign
, tree
*obj_p
, HOST_WIDE_INT
*offset
)
1306 tree expr
, parm
, obj
;
1309 if (!gimple_assign_single_p (assign
))
1311 expr
= gimple_assign_rhs1 (assign
);
1313 if (TREE_CODE (expr
) != ADDR_EXPR
)
1315 expr
= TREE_OPERAND (expr
, 0);
1317 expr
= get_ref_base_and_extent_hwi (expr
, offset
, &size
, &reverse
);
1319 offset_int mem_offset
;
1321 || TREE_CODE (expr
) != MEM_REF
1322 || !mem_ref_offset (expr
).is_constant (&mem_offset
))
1324 parm
= TREE_OPERAND (expr
, 0);
1325 if (TREE_CODE (parm
) != SSA_NAME
1326 || !SSA_NAME_IS_DEFAULT_DEF (parm
)
1327 || TREE_CODE (SSA_NAME_VAR (parm
)) != PARM_DECL
)
1330 *offset
+= mem_offset
.to_short_addr () * BITS_PER_UNIT
;
1336 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1337 statement PHI, try to find out whether NAME is in fact a
1338 multiple-inheritance typecast from a descendant into an ancestor of a formal
1339 parameter and thus can be described by an ancestor jump function and if so,
1340 write the appropriate function into JFUNC.
1342 Essentially we want to match the following pattern:
1350 iftmp.1_3 = &obj_2(D)->D.1762;
1353 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1354 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1358 compute_complex_ancestor_jump_func (struct ipa_func_body_info
*fbi
,
1359 struct ipa_node_params
*info
,
1360 struct ipa_jump_func
*jfunc
,
1361 gcall
*call
, gphi
*phi
)
1363 HOST_WIDE_INT offset
;
1364 gimple
*assign
, *cond
;
1365 basic_block phi_bb
, assign_bb
, cond_bb
;
1366 tree tmp
, parm
, expr
, obj
;
1369 if (gimple_phi_num_args (phi
) != 2)
1372 if (integer_zerop (PHI_ARG_DEF (phi
, 1)))
1373 tmp
= PHI_ARG_DEF (phi
, 0);
1374 else if (integer_zerop (PHI_ARG_DEF (phi
, 0)))
1375 tmp
= PHI_ARG_DEF (phi
, 1);
1378 if (TREE_CODE (tmp
) != SSA_NAME
1379 || SSA_NAME_IS_DEFAULT_DEF (tmp
)
1380 || !POINTER_TYPE_P (TREE_TYPE (tmp
))
1381 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp
))) != RECORD_TYPE
)
1384 assign
= SSA_NAME_DEF_STMT (tmp
);
1385 assign_bb
= gimple_bb (assign
);
1386 if (!single_pred_p (assign_bb
))
1388 expr
= get_ancestor_addr_info (assign
, &obj
, &offset
);
1391 parm
= TREE_OPERAND (expr
, 0);
1392 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (parm
));
1396 cond_bb
= single_pred (assign_bb
);
1397 cond
= last_stmt (cond_bb
);
1399 || gimple_code (cond
) != GIMPLE_COND
1400 || gimple_cond_code (cond
) != NE_EXPR
1401 || gimple_cond_lhs (cond
) != parm
1402 || !integer_zerop (gimple_cond_rhs (cond
)))
1405 phi_bb
= gimple_bb (phi
);
1406 for (i
= 0; i
< 2; i
++)
1408 basic_block pred
= EDGE_PRED (phi_bb
, i
)->src
;
1409 if (pred
!= assign_bb
&& pred
!= cond_bb
)
1413 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1414 parm_ref_data_pass_through_p (fbi
, index
, call
, parm
));
1417 /* Inspect the given TYPE and return true iff it has the same structure (the
1418 same number of fields of the same types) as a C++ member pointer. If
1419 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1420 corresponding fields there. */
1423 type_like_member_ptr_p (tree type
, tree
*method_ptr
, tree
*delta
)
1427 if (TREE_CODE (type
) != RECORD_TYPE
)
1430 fld
= TYPE_FIELDS (type
);
1431 if (!fld
|| !POINTER_TYPE_P (TREE_TYPE (fld
))
1432 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld
))) != METHOD_TYPE
1433 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1439 fld
= DECL_CHAIN (fld
);
1440 if (!fld
|| INTEGRAL_TYPE_P (fld
)
1441 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1446 if (DECL_CHAIN (fld
))
1452 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1453 return the rhs of its defining statement. Otherwise return RHS as it
1457 get_ssa_def_if_simple_copy (tree rhs
)
1459 while (TREE_CODE (rhs
) == SSA_NAME
&& !SSA_NAME_IS_DEFAULT_DEF (rhs
))
1461 gimple
*def_stmt
= SSA_NAME_DEF_STMT (rhs
);
1463 if (gimple_assign_single_p (def_stmt
))
1464 rhs
= gimple_assign_rhs1 (def_stmt
);
1471 /* Simple linked list, describing known contents of an aggregate beforere
1474 struct ipa_known_agg_contents_list
1476 /* Offset and size of the described part of the aggregate. */
1477 HOST_WIDE_INT offset
, size
;
1478 /* Known constant value or NULL if the contents is known to be unknown. */
1480 /* Pointer to the next structure in the list. */
1481 struct ipa_known_agg_contents_list
*next
;
1484 /* Find the proper place in linked list of ipa_known_agg_contents_list
1485 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1486 unless there is a partial overlap, in which case return NULL, or such
1487 element is already there, in which case set *ALREADY_THERE to true. */
1489 static struct ipa_known_agg_contents_list
**
1490 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list
**list
,
1491 HOST_WIDE_INT lhs_offset
,
1492 HOST_WIDE_INT lhs_size
,
1493 bool *already_there
)
1495 struct ipa_known_agg_contents_list
**p
= list
;
1496 while (*p
&& (*p
)->offset
< lhs_offset
)
1498 if ((*p
)->offset
+ (*p
)->size
> lhs_offset
)
1503 if (*p
&& (*p
)->offset
< lhs_offset
+ lhs_size
)
1505 if ((*p
)->offset
== lhs_offset
&& (*p
)->size
== lhs_size
)
1506 /* We already know this value is subsequently overwritten with
1508 *already_there
= true;
1510 /* Otherwise this is a partial overlap which we cannot
1517 /* Build aggregate jump function from LIST, assuming there are exactly
1518 CONST_COUNT constant entries there and that th offset of the passed argument
1519 is ARG_OFFSET and store it into JFUNC. */
1522 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list
*list
,
1523 int const_count
, HOST_WIDE_INT arg_offset
,
1524 struct ipa_jump_func
*jfunc
)
1526 vec_alloc (jfunc
->agg
.items
, const_count
);
1531 struct ipa_agg_jf_item item
;
1532 item
.offset
= list
->offset
- arg_offset
;
1533 gcc_assert ((item
.offset
% BITS_PER_UNIT
) == 0);
1534 item
.value
= unshare_expr_without_location (list
->constant
);
1535 jfunc
->agg
.items
->quick_push (item
);
1541 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1542 in ARG is filled in with constant values. ARG can either be an aggregate
1543 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1544 aggregate. JFUNC is the jump function into which the constants are
1545 subsequently stored. */
1548 determine_locally_known_aggregate_parts (gcall
*call
, tree arg
,
1550 struct ipa_jump_func
*jfunc
)
1552 struct ipa_known_agg_contents_list
*list
= NULL
;
1553 int item_count
= 0, const_count
= 0;
1554 HOST_WIDE_INT arg_offset
, arg_size
;
1555 gimple_stmt_iterator gsi
;
1557 bool check_ref
, by_ref
;
1560 if (PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
) == 0)
1563 /* The function operates in three stages. First, we prepare check_ref, r,
1564 arg_base and arg_offset based on what is actually passed as an actual
1567 if (POINTER_TYPE_P (arg_type
))
1570 if (TREE_CODE (arg
) == SSA_NAME
)
1573 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type
))))
1578 type_size
= TYPE_SIZE (TREE_TYPE (arg_type
));
1579 arg_size
= tree_to_uhwi (type_size
);
1580 ao_ref_init_from_ptr_and_size (&r
, arg_base
, NULL_TREE
);
1582 else if (TREE_CODE (arg
) == ADDR_EXPR
)
1586 arg
= TREE_OPERAND (arg
, 0);
1587 arg_base
= get_ref_base_and_extent_hwi (arg
, &arg_offset
,
1588 &arg_size
, &reverse
);
1591 if (DECL_P (arg_base
))
1594 ao_ref_init (&r
, arg_base
);
1606 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg
)));
1610 arg_base
= get_ref_base_and_extent_hwi (arg
, &arg_offset
,
1611 &arg_size
, &reverse
);
1615 ao_ref_init (&r
, arg
);
1618 /* Second stage walks back the BB, looks at individual statements and as long
1619 as it is confident of how the statements affect contents of the
1620 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1622 gsi
= gsi_for_stmt (call
);
1624 for (; !gsi_end_p (gsi
); gsi_prev (&gsi
))
1626 struct ipa_known_agg_contents_list
*n
, **p
;
1627 gimple
*stmt
= gsi_stmt (gsi
);
1628 HOST_WIDE_INT lhs_offset
, lhs_size
;
1629 tree lhs
, rhs
, lhs_base
;
1632 if (!stmt_may_clobber_ref_p_1 (stmt
, &r
))
1634 if (!gimple_assign_single_p (stmt
))
1637 lhs
= gimple_assign_lhs (stmt
);
1638 rhs
= gimple_assign_rhs1 (stmt
);
1639 if (!is_gimple_reg_type (TREE_TYPE (rhs
))
1640 || TREE_CODE (lhs
) == BIT_FIELD_REF
1641 || contains_bitfld_component_ref_p (lhs
))
1644 lhs_base
= get_ref_base_and_extent_hwi (lhs
, &lhs_offset
,
1645 &lhs_size
, &reverse
);
1651 if (TREE_CODE (lhs_base
) != MEM_REF
1652 || TREE_OPERAND (lhs_base
, 0) != arg_base
1653 || !integer_zerop (TREE_OPERAND (lhs_base
, 1)))
1656 else if (lhs_base
!= arg_base
)
1658 if (DECL_P (lhs_base
))
1664 bool already_there
= false;
1665 p
= get_place_in_agg_contents_list (&list
, lhs_offset
, lhs_size
,
1672 rhs
= get_ssa_def_if_simple_copy (rhs
);
1673 n
= XALLOCA (struct ipa_known_agg_contents_list
);
1675 n
->offset
= lhs_offset
;
1676 if (is_gimple_ip_invariant (rhs
))
1682 n
->constant
= NULL_TREE
;
1687 if (const_count
== PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
)
1688 || item_count
== 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
))
1692 /* Third stage just goes over the list and creates an appropriate vector of
1693 ipa_agg_jf_item structures out of it, of sourse only if there are
1694 any known constants to begin with. */
1698 jfunc
->agg
.by_ref
= by_ref
;
1699 build_agg_jump_func_from_list (list
, const_count
, arg_offset
, jfunc
);
1703 /* Return the Ith param type of callee associated with call graph
1707 ipa_get_callee_param_type (struct cgraph_edge
*e
, int i
)
1710 tree type
= (e
->callee
1711 ? TREE_TYPE (e
->callee
->decl
)
1712 : gimple_call_fntype (e
->call_stmt
));
1713 tree t
= TYPE_ARG_TYPES (type
);
1715 for (n
= 0; n
< i
; n
++)
1722 return TREE_VALUE (t
);
1725 t
= DECL_ARGUMENTS (e
->callee
->decl
);
1726 for (n
= 0; n
< i
; n
++)
1733 return TREE_TYPE (t
);
1737 /* Return ipa_bits with VALUE and MASK values, which can be either a newly
1738 allocated structure or a previously existing one shared with other jump
1739 functions and/or transformation summaries. */
1742 ipa_get_ipa_bits_for_value (const widest_int
&value
, const widest_int
&mask
)
1748 ipa_bits
**slot
= ipa_bits_hash_table
->find_slot (&tmp
, INSERT
);
1752 ipa_bits
*res
= ggc_alloc
<ipa_bits
> ();
1760 /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
1761 table in order to avoid creating multiple same ipa_bits structures. */
1764 ipa_set_jfunc_bits (ipa_jump_func
*jf
, const widest_int
&value
,
1765 const widest_int
&mask
)
1767 jf
->bits
= ipa_get_ipa_bits_for_value (value
, mask
);
1770 /* Return a pointer to a value_range just like *TMP, but either find it in
1771 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
1773 static value_range
*
1774 ipa_get_value_range (value_range
*tmp
)
1776 value_range
**slot
= ipa_vr_hash_table
->find_slot (tmp
, INSERT
);
1780 value_range
*vr
= ggc_alloc
<value_range
> ();
1787 /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
1788 equiv set. Use hash table in order to avoid creating multiple same copies of
1791 static value_range
*
1792 ipa_get_value_range (enum value_range_type type
, tree min
, tree max
)
1799 return ipa_get_value_range (&tmp
);
1802 /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
1803 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
1804 same value_range structures. */
1807 ipa_set_jfunc_vr (ipa_jump_func
*jf
, enum value_range_type type
,
1810 jf
->m_vr
= ipa_get_value_range (type
, min
, max
);
1813 /* Assign to JF a pointer to a value_range just liek TMP but either fetch a
1814 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
1817 ipa_set_jfunc_vr (ipa_jump_func
*jf
, value_range
*tmp
)
1819 jf
->m_vr
= ipa_get_value_range (tmp
);
1822 /* Compute jump function for all arguments of callsite CS and insert the
1823 information in the jump_functions array in the ipa_edge_args corresponding
1824 to this callsite. */
1827 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info
*fbi
,
1828 struct cgraph_edge
*cs
)
1830 struct ipa_node_params
*info
= IPA_NODE_REF (cs
->caller
);
1831 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
1832 gcall
*call
= cs
->call_stmt
;
1833 int n
, arg_num
= gimple_call_num_args (call
);
1834 bool useful_context
= false;
1836 if (arg_num
== 0 || args
->jump_functions
)
1838 vec_safe_grow_cleared (args
->jump_functions
, arg_num
);
1839 if (flag_devirtualize
)
1840 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, arg_num
);
1842 if (gimple_call_internal_p (call
))
1844 if (ipa_func_spec_opts_forbid_analysis_p (cs
->caller
))
1847 for (n
= 0; n
< arg_num
; n
++)
1849 struct ipa_jump_func
*jfunc
= ipa_get_ith_jump_func (args
, n
);
1850 tree arg
= gimple_call_arg (call
, n
);
1851 tree param_type
= ipa_get_callee_param_type (cs
, n
);
1852 if (flag_devirtualize
&& POINTER_TYPE_P (TREE_TYPE (arg
)))
1855 struct ipa_polymorphic_call_context
context (cs
->caller
->decl
,
1858 context
.get_dynamic_type (instance
, arg
, NULL
, cs
->call_stmt
);
1859 *ipa_get_ith_polymorhic_call_context (args
, n
) = context
;
1860 if (!context
.useless_p ())
1861 useful_context
= true;
1864 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
1866 bool addr_nonzero
= false;
1867 bool strict_overflow
= false;
1869 if (TREE_CODE (arg
) == SSA_NAME
1871 && get_ptr_nonnull (arg
))
1872 addr_nonzero
= true;
1873 else if (tree_single_nonzero_warnv_p (arg
, &strict_overflow
))
1874 addr_nonzero
= true;
1878 tree z
= build_int_cst (TREE_TYPE (arg
), 0);
1879 ipa_set_jfunc_vr (jfunc
, VR_ANTI_RANGE
, z
, z
);
1882 gcc_assert (!jfunc
->m_vr
);
1887 value_range_type type
;
1888 if (TREE_CODE (arg
) == SSA_NAME
1890 && (type
= get_range_info (arg
, &min
, &max
))
1891 && (type
== VR_RANGE
|| type
== VR_ANTI_RANGE
))
1893 value_range tmpvr
,resvr
;
1896 tmpvr
.min
= wide_int_to_tree (TREE_TYPE (arg
), min
);
1897 tmpvr
.max
= wide_int_to_tree (TREE_TYPE (arg
), max
);
1899 memset (&resvr
, 0, sizeof (resvr
));
1900 extract_range_from_unary_expr (&resvr
, NOP_EXPR
, param_type
,
1901 &tmpvr
, TREE_TYPE (arg
));
1902 if (resvr
.type
== VR_RANGE
|| resvr
.type
== VR_ANTI_RANGE
)
1903 ipa_set_jfunc_vr (jfunc
, &resvr
);
1905 gcc_assert (!jfunc
->m_vr
);
1908 gcc_assert (!jfunc
->m_vr
);
1911 if (INTEGRAL_TYPE_P (TREE_TYPE (arg
))
1912 && (TREE_CODE (arg
) == SSA_NAME
|| TREE_CODE (arg
) == INTEGER_CST
))
1914 if (TREE_CODE (arg
) == SSA_NAME
)
1915 ipa_set_jfunc_bits (jfunc
, 0,
1916 widest_int::from (get_nonzero_bits (arg
),
1917 TYPE_SIGN (TREE_TYPE (arg
))));
1919 ipa_set_jfunc_bits (jfunc
, wi::to_widest (arg
), 0);
1921 else if (POINTER_TYPE_P (TREE_TYPE (arg
)))
1923 unsigned HOST_WIDE_INT bitpos
;
1926 get_pointer_alignment_1 (arg
, &align
, &bitpos
);
1927 widest_int mask
= wi::bit_and_not
1928 (wi::mask
<widest_int
> (TYPE_PRECISION (TREE_TYPE (arg
)), false),
1929 align
/ BITS_PER_UNIT
- 1);
1930 widest_int value
= bitpos
/ BITS_PER_UNIT
;
1931 ipa_set_jfunc_bits (jfunc
, value
, mask
);
1934 gcc_assert (!jfunc
->bits
);
1936 if (is_gimple_ip_invariant (arg
)
1938 && is_global_var (arg
)
1939 && TREE_READONLY (arg
)))
1940 ipa_set_jf_constant (jfunc
, arg
, cs
);
1941 else if (!is_gimple_reg_type (TREE_TYPE (arg
))
1942 && TREE_CODE (arg
) == PARM_DECL
)
1944 int index
= ipa_get_param_decl_index (info
, arg
);
1946 gcc_assert (index
>=0);
1947 /* Aggregate passed by value, check for pass-through, otherwise we
1948 will attempt to fill in aggregate contents later in this
1950 if (parm_preserved_before_stmt_p (fbi
, index
, call
, arg
))
1952 ipa_set_jf_simple_pass_through (jfunc
, index
, false);
1956 else if (TREE_CODE (arg
) == SSA_NAME
)
1958 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
1960 int index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (arg
));
1964 agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
, arg
);
1965 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1970 gimple
*stmt
= SSA_NAME_DEF_STMT (arg
);
1971 if (is_gimple_assign (stmt
))
1972 compute_complex_assign_jump_func (fbi
, info
, jfunc
,
1973 call
, stmt
, arg
, param_type
);
1974 else if (gimple_code (stmt
) == GIMPLE_PHI
)
1975 compute_complex_ancestor_jump_func (fbi
, info
, jfunc
,
1977 as_a
<gphi
*> (stmt
));
1981 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1982 passed (because type conversions are ignored in gimple). Usually we can
1983 safely get type from function declaration, but in case of K&R prototypes or
1984 variadic functions we can try our luck with type of the pointer passed.
1985 TODO: Since we look for actual initialization of the memory object, we may better
1986 work out the type based on the memory stores we find. */
1988 param_type
= TREE_TYPE (arg
);
1990 if ((jfunc
->type
!= IPA_JF_PASS_THROUGH
1991 || !ipa_get_jf_pass_through_agg_preserved (jfunc
))
1992 && (jfunc
->type
!= IPA_JF_ANCESTOR
1993 || !ipa_get_jf_ancestor_agg_preserved (jfunc
))
1994 && (AGGREGATE_TYPE_P (TREE_TYPE (arg
))
1995 || POINTER_TYPE_P (param_type
)))
1996 determine_locally_known_aggregate_parts (call
, arg
, param_type
, jfunc
);
1998 if (!useful_context
)
1999 vec_free (args
->polymorphic_call_contexts
);
2002 /* Compute jump functions for all edges - both direct and indirect - outgoing
2006 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info
*fbi
, basic_block bb
)
2008 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
2010 struct cgraph_edge
*cs
;
2012 FOR_EACH_VEC_ELT_REVERSE (bi
->cg_edges
, i
, cs
)
2014 struct cgraph_node
*callee
= cs
->callee
;
2018 callee
->ultimate_alias_target ();
2019 /* We do not need to bother analyzing calls to unknown functions
2020 unless they may become known during lto/whopr. */
2021 if (!callee
->definition
&& !flag_lto
)
2024 ipa_compute_jump_functions_for_edge (fbi
, cs
);
2028 /* If STMT looks like a statement loading a value from a member pointer formal
2029 parameter, return that parameter and store the offset of the field to
2030 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2031 might be clobbered). If USE_DELTA, then we look for a use of the delta
2032 field rather than the pfn. */
2035 ipa_get_stmt_member_ptr_load_param (gimple
*stmt
, bool use_delta
,
2036 HOST_WIDE_INT
*offset_p
)
2038 tree rhs
, rec
, ref_field
, ref_offset
, fld
, ptr_field
, delta_field
;
2040 if (!gimple_assign_single_p (stmt
))
2043 rhs
= gimple_assign_rhs1 (stmt
);
2044 if (TREE_CODE (rhs
) == COMPONENT_REF
)
2046 ref_field
= TREE_OPERAND (rhs
, 1);
2047 rhs
= TREE_OPERAND (rhs
, 0);
2050 ref_field
= NULL_TREE
;
2051 if (TREE_CODE (rhs
) != MEM_REF
)
2053 rec
= TREE_OPERAND (rhs
, 0);
2054 if (TREE_CODE (rec
) != ADDR_EXPR
)
2056 rec
= TREE_OPERAND (rec
, 0);
2057 if (TREE_CODE (rec
) != PARM_DECL
2058 || !type_like_member_ptr_p (TREE_TYPE (rec
), &ptr_field
, &delta_field
))
2060 ref_offset
= TREE_OPERAND (rhs
, 1);
2067 *offset_p
= int_bit_position (fld
);
2071 if (integer_nonzerop (ref_offset
))
2073 return ref_field
== fld
? rec
: NULL_TREE
;
2076 return tree_int_cst_equal (byte_position (fld
), ref_offset
) ? rec
2080 /* Returns true iff T is an SSA_NAME defined by a statement. */
2083 ipa_is_ssa_with_stmt_def (tree t
)
2085 if (TREE_CODE (t
) == SSA_NAME
2086 && !SSA_NAME_IS_DEFAULT_DEF (t
))
2092 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2093 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2094 indirect call graph edge. */
2096 static struct cgraph_edge
*
2097 ipa_note_param_call (struct cgraph_node
*node
, int param_index
,
2100 struct cgraph_edge
*cs
;
2102 cs
= node
->get_edge (stmt
);
2103 cs
->indirect_info
->param_index
= param_index
;
2104 cs
->indirect_info
->agg_contents
= 0;
2105 cs
->indirect_info
->member_ptr
= 0;
2106 cs
->indirect_info
->guaranteed_unmodified
= 0;
2110 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2111 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2112 intermediate information about each formal parameter. Currently it checks
2113 whether the call calls a pointer that is a formal parameter and if so, the
2114 parameter is marked with the called flag and an indirect call graph edge
2115 describing the call is created. This is very simple for ordinary pointers
2116 represented in SSA but not-so-nice when it comes to member pointers. The
2117 ugly part of this function does nothing more than trying to match the
2118 pattern of such a call. An example of such a pattern is the gimple dump
2119 below, the call is on the last line:
2122 f$__delta_5 = f.__delta;
2123 f$__pfn_24 = f.__pfn;
2127 f$__delta_5 = MEM[(struct *)&f];
2128 f$__pfn_24 = MEM[(struct *)&f + 4B];
2130 and a few lines below:
2133 D.2496_3 = (int) f$__pfn_24;
2134 D.2497_4 = D.2496_3 & 1;
2141 D.2500_7 = (unsigned int) f$__delta_5;
2142 D.2501_8 = &S + D.2500_7;
2143 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2144 D.2503_10 = *D.2502_9;
2145 D.2504_12 = f$__pfn_24 + -1;
2146 D.2505_13 = (unsigned int) D.2504_12;
2147 D.2506_14 = D.2503_10 + D.2505_13;
2148 D.2507_15 = *D.2506_14;
2149 iftmp.11_16 = (String:: *) D.2507_15;
2152 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2153 D.2500_19 = (unsigned int) f$__delta_5;
2154 D.2508_20 = &S + D.2500_19;
2155 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2157 Such patterns are results of simple calls to a member pointer:
2159 int doprinting (int (MyString::* f)(int) const)
2161 MyString S ("somestring");
2166 Moreover, the function also looks for called pointers loaded from aggregates
2167 passed by value or reference. */
2170 ipa_analyze_indirect_call_uses (struct ipa_func_body_info
*fbi
, gcall
*call
,
2173 struct ipa_node_params
*info
= fbi
->info
;
2174 HOST_WIDE_INT offset
;
2177 if (SSA_NAME_IS_DEFAULT_DEF (target
))
2179 tree var
= SSA_NAME_VAR (target
);
2180 int index
= ipa_get_param_decl_index (info
, var
);
2182 ipa_note_param_call (fbi
->node
, index
, call
);
2187 gimple
*def
= SSA_NAME_DEF_STMT (target
);
2188 bool guaranteed_unmodified
;
2189 if (gimple_assign_single_p (def
)
2190 && ipa_load_from_parm_agg (fbi
, info
->descriptors
, def
,
2191 gimple_assign_rhs1 (def
), &index
, &offset
,
2192 NULL
, &by_ref
, &guaranteed_unmodified
))
2194 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2195 cs
->indirect_info
->offset
= offset
;
2196 cs
->indirect_info
->agg_contents
= 1;
2197 cs
->indirect_info
->by_ref
= by_ref
;
2198 cs
->indirect_info
->guaranteed_unmodified
= guaranteed_unmodified
;
2202 /* Now we need to try to match the complex pattern of calling a member
2204 if (gimple_code (def
) != GIMPLE_PHI
2205 || gimple_phi_num_args (def
) != 2
2206 || !POINTER_TYPE_P (TREE_TYPE (target
))
2207 || TREE_CODE (TREE_TYPE (TREE_TYPE (target
))) != METHOD_TYPE
)
2210 /* First, we need to check whether one of these is a load from a member
2211 pointer that is a parameter to this function. */
2212 tree n1
= PHI_ARG_DEF (def
, 0);
2213 tree n2
= PHI_ARG_DEF (def
, 1);
2214 if (!ipa_is_ssa_with_stmt_def (n1
) || !ipa_is_ssa_with_stmt_def (n2
))
2216 gimple
*d1
= SSA_NAME_DEF_STMT (n1
);
2217 gimple
*d2
= SSA_NAME_DEF_STMT (n2
);
2220 basic_block bb
, virt_bb
;
2221 basic_block join
= gimple_bb (def
);
2222 if ((rec
= ipa_get_stmt_member_ptr_load_param (d1
, false, &offset
)))
2224 if (ipa_get_stmt_member_ptr_load_param (d2
, false, NULL
))
2227 bb
= EDGE_PRED (join
, 0)->src
;
2228 virt_bb
= gimple_bb (d2
);
2230 else if ((rec
= ipa_get_stmt_member_ptr_load_param (d2
, false, &offset
)))
2232 bb
= EDGE_PRED (join
, 1)->src
;
2233 virt_bb
= gimple_bb (d1
);
2238 /* Second, we need to check that the basic blocks are laid out in the way
2239 corresponding to the pattern. */
2241 if (!single_pred_p (virt_bb
) || !single_succ_p (virt_bb
)
2242 || single_pred (virt_bb
) != bb
2243 || single_succ (virt_bb
) != join
)
2246 /* Third, let's see that the branching is done depending on the least
2247 significant bit of the pfn. */
2249 gimple
*branch
= last_stmt (bb
);
2250 if (!branch
|| gimple_code (branch
) != GIMPLE_COND
)
2253 if ((gimple_cond_code (branch
) != NE_EXPR
2254 && gimple_cond_code (branch
) != EQ_EXPR
)
2255 || !integer_zerop (gimple_cond_rhs (branch
)))
2258 tree cond
= gimple_cond_lhs (branch
);
2259 if (!ipa_is_ssa_with_stmt_def (cond
))
2262 def
= SSA_NAME_DEF_STMT (cond
);
2263 if (!is_gimple_assign (def
)
2264 || gimple_assign_rhs_code (def
) != BIT_AND_EXPR
2265 || !integer_onep (gimple_assign_rhs2 (def
)))
2268 cond
= gimple_assign_rhs1 (def
);
2269 if (!ipa_is_ssa_with_stmt_def (cond
))
2272 def
= SSA_NAME_DEF_STMT (cond
);
2274 if (is_gimple_assign (def
)
2275 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
2277 cond
= gimple_assign_rhs1 (def
);
2278 if (!ipa_is_ssa_with_stmt_def (cond
))
2280 def
= SSA_NAME_DEF_STMT (cond
);
2284 rec2
= ipa_get_stmt_member_ptr_load_param (def
,
2285 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2286 == ptrmemfunc_vbit_in_delta
),
2291 index
= ipa_get_param_decl_index (info
, rec
);
2293 && parm_preserved_before_stmt_p (fbi
, index
, call
, rec
))
2295 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2296 cs
->indirect_info
->offset
= offset
;
2297 cs
->indirect_info
->agg_contents
= 1;
2298 cs
->indirect_info
->member_ptr
= 1;
2299 cs
->indirect_info
->guaranteed_unmodified
= 1;
2305 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2306 object referenced in the expression is a formal parameter of the caller
2307 FBI->node (described by FBI->info), create a call note for the
2311 ipa_analyze_virtual_call_uses (struct ipa_func_body_info
*fbi
,
2312 gcall
*call
, tree target
)
2314 tree obj
= OBJ_TYPE_REF_OBJECT (target
);
2316 HOST_WIDE_INT anc_offset
;
2318 if (!flag_devirtualize
)
2321 if (TREE_CODE (obj
) != SSA_NAME
)
2324 struct ipa_node_params
*info
= fbi
->info
;
2325 if (SSA_NAME_IS_DEFAULT_DEF (obj
))
2327 struct ipa_jump_func jfunc
;
2328 if (TREE_CODE (SSA_NAME_VAR (obj
)) != PARM_DECL
)
2332 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (obj
));
2333 gcc_assert (index
>= 0);
2334 if (detect_type_change_ssa (obj
, obj_type_ref_class (target
),
2340 struct ipa_jump_func jfunc
;
2341 gimple
*stmt
= SSA_NAME_DEF_STMT (obj
);
2344 expr
= get_ancestor_addr_info (stmt
, &obj
, &anc_offset
);
2347 index
= ipa_get_param_decl_index (info
,
2348 SSA_NAME_VAR (TREE_OPERAND (expr
, 0)));
2349 gcc_assert (index
>= 0);
2350 if (detect_type_change (obj
, expr
, obj_type_ref_class (target
),
2351 call
, &jfunc
, anc_offset
))
2355 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2356 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
2357 ii
->offset
= anc_offset
;
2358 ii
->otr_token
= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target
));
2359 ii
->otr_type
= obj_type_ref_class (target
);
2360 ii
->polymorphic
= 1;
2363 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2364 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2365 containing intermediate information about each formal parameter. */
2368 ipa_analyze_call_uses (struct ipa_func_body_info
*fbi
, gcall
*call
)
2370 tree target
= gimple_call_fn (call
);
2373 || (TREE_CODE (target
) != SSA_NAME
2374 && !virtual_method_call_p (target
)))
2377 struct cgraph_edge
*cs
= fbi
->node
->get_edge (call
);
2378 /* If we previously turned the call into a direct call, there is
2379 no need to analyze. */
2380 if (cs
&& !cs
->indirect_unknown_callee
)
2383 if (cs
->indirect_info
->polymorphic
&& flag_devirtualize
)
2386 tree target
= gimple_call_fn (call
);
2387 ipa_polymorphic_call_context
context (current_function_decl
,
2388 target
, call
, &instance
);
2390 gcc_checking_assert (cs
->indirect_info
->otr_type
2391 == obj_type_ref_class (target
));
2392 gcc_checking_assert (cs
->indirect_info
->otr_token
2393 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target
)));
2395 cs
->indirect_info
->vptr_changed
2396 = !context
.get_dynamic_type (instance
,
2397 OBJ_TYPE_REF_OBJECT (target
),
2398 obj_type_ref_class (target
), call
);
2399 cs
->indirect_info
->context
= context
;
2402 if (TREE_CODE (target
) == SSA_NAME
)
2403 ipa_analyze_indirect_call_uses (fbi
, call
, target
);
2404 else if (virtual_method_call_p (target
))
2405 ipa_analyze_virtual_call_uses (fbi
, call
, target
);
2409 /* Analyze the call statement STMT with respect to formal parameters (described
2410 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2411 formal parameters are called. */
2414 ipa_analyze_stmt_uses (struct ipa_func_body_info
*fbi
, gimple
*stmt
)
2416 if (is_gimple_call (stmt
))
2417 ipa_analyze_call_uses (fbi
, as_a
<gcall
*> (stmt
));
2420 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2421 If OP is a parameter declaration, mark it as used in the info structure
2425 visit_ref_for_mod_analysis (gimple
*, tree op
, tree
, void *data
)
2427 struct ipa_node_params
*info
= (struct ipa_node_params
*) data
;
2429 op
= get_base_address (op
);
2431 && TREE_CODE (op
) == PARM_DECL
)
2433 int index
= ipa_get_param_decl_index (info
, op
);
2434 gcc_assert (index
>= 0);
2435 ipa_set_param_used (info
, index
, true);
2441 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2442 the findings in various structures of the associated ipa_node_params
2443 structure, such as parameter flags, notes etc. FBI holds various data about
2444 the function being analyzed. */
2447 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info
*fbi
, basic_block bb
)
2449 gimple_stmt_iterator gsi
;
2450 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2452 gimple
*stmt
= gsi_stmt (gsi
);
2454 if (is_gimple_debug (stmt
))
2457 ipa_analyze_stmt_uses (fbi
, stmt
);
2458 walk_stmt_load_store_addr_ops (stmt
, fbi
->info
,
2459 visit_ref_for_mod_analysis
,
2460 visit_ref_for_mod_analysis
,
2461 visit_ref_for_mod_analysis
);
2463 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2464 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), fbi
->info
,
2465 visit_ref_for_mod_analysis
,
2466 visit_ref_for_mod_analysis
,
2467 visit_ref_for_mod_analysis
);
2470 /* Calculate controlled uses of parameters of NODE. */
2473 ipa_analyze_controlled_uses (struct cgraph_node
*node
)
2475 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
2477 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2479 tree parm
= ipa_get_param (info
, i
);
2480 int controlled_uses
= 0;
2482 /* For SSA regs see if parameter is used. For non-SSA we compute
2483 the flag during modification analysis. */
2484 if (is_gimple_reg (parm
))
2486 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
),
2488 if (ddef
&& !has_zero_uses (ddef
))
2490 imm_use_iterator imm_iter
;
2491 use_operand_p use_p
;
2493 ipa_set_param_used (info
, i
, true);
2494 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, ddef
)
2495 if (!is_gimple_call (USE_STMT (use_p
)))
2497 if (!is_gimple_debug (USE_STMT (use_p
)))
2499 controlled_uses
= IPA_UNDESCRIBED_USE
;
2507 controlled_uses
= 0;
2510 controlled_uses
= IPA_UNDESCRIBED_USE
;
2511 ipa_set_controlled_uses (info
, i
, controlled_uses
);
2515 /* Free stuff in BI. */
2518 free_ipa_bb_info (struct ipa_bb_info
*bi
)
2520 bi
->cg_edges
.release ();
2521 bi
->param_aa_statuses
.release ();
2524 /* Dominator walker driving the analysis. */
2526 class analysis_dom_walker
: public dom_walker
2529 analysis_dom_walker (struct ipa_func_body_info
*fbi
)
2530 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
) {}
2532 virtual edge
before_dom_children (basic_block
);
2535 struct ipa_func_body_info
*m_fbi
;
2539 analysis_dom_walker::before_dom_children (basic_block bb
)
2541 ipa_analyze_params_uses_in_bb (m_fbi
, bb
);
2542 ipa_compute_jump_functions_for_bb (m_fbi
, bb
);
2546 /* Release body info FBI. */
2549 ipa_release_body_info (struct ipa_func_body_info
*fbi
)
2552 struct ipa_bb_info
*bi
;
2554 FOR_EACH_VEC_ELT (fbi
->bb_infos
, i
, bi
)
2555 free_ipa_bb_info (bi
);
2556 fbi
->bb_infos
.release ();
2559 /* Initialize the array describing properties of formal parameters
2560 of NODE, analyze their uses and compute jump functions associated
2561 with actual arguments of calls from within NODE. */
2564 ipa_analyze_node (struct cgraph_node
*node
)
2566 struct ipa_func_body_info fbi
;
2567 struct ipa_node_params
*info
;
2569 ipa_check_create_node_params ();
2570 ipa_check_create_edge_args ();
2571 info
= IPA_NODE_REF (node
);
2573 if (info
->analysis_done
)
2575 info
->analysis_done
= 1;
2577 if (ipa_func_spec_opts_forbid_analysis_p (node
))
2579 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2581 ipa_set_param_used (info
, i
, true);
2582 ipa_set_controlled_uses (info
, i
, IPA_UNDESCRIBED_USE
);
2587 struct function
*func
= DECL_STRUCT_FUNCTION (node
->decl
);
2589 calculate_dominance_info (CDI_DOMINATORS
);
2590 ipa_initialize_node_params (node
);
2591 ipa_analyze_controlled_uses (node
);
2594 fbi
.info
= IPA_NODE_REF (node
);
2595 fbi
.bb_infos
= vNULL
;
2596 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
));
2597 fbi
.param_count
= ipa_get_param_count (info
);
2600 for (struct cgraph_edge
*cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
2602 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
2603 bi
->cg_edges
.safe_push (cs
);
2606 for (struct cgraph_edge
*cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
2608 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
2609 bi
->cg_edges
.safe_push (cs
);
2612 analysis_dom_walker (&fbi
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
2614 ipa_release_body_info (&fbi
);
2615 free_dominance_info (CDI_DOMINATORS
);
2619 /* Update the jump functions associated with call graph edge E when the call
2620 graph edge CS is being inlined, assuming that E->caller is already (possibly
2621 indirectly) inlined into CS->callee and that E has not been inlined. */
2624 update_jump_functions_after_inlining (struct cgraph_edge
*cs
,
2625 struct cgraph_edge
*e
)
2627 struct ipa_edge_args
*top
= IPA_EDGE_REF (cs
);
2628 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
2629 int count
= ipa_get_cs_argument_count (args
);
2632 for (i
= 0; i
< count
; i
++)
2634 struct ipa_jump_func
*dst
= ipa_get_ith_jump_func (args
, i
);
2635 struct ipa_polymorphic_call_context
*dst_ctx
2636 = ipa_get_ith_polymorhic_call_context (args
, i
);
2638 if (dst
->type
== IPA_JF_ANCESTOR
)
2640 struct ipa_jump_func
*src
;
2641 int dst_fid
= dst
->value
.ancestor
.formal_id
;
2642 struct ipa_polymorphic_call_context
*src_ctx
2643 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
2645 /* Variable number of arguments can cause havoc if we try to access
2646 one that does not exist in the inlined edge. So make sure we
2648 if (dst_fid
>= ipa_get_cs_argument_count (top
))
2650 ipa_set_jf_unknown (dst
);
2654 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2656 if (src_ctx
&& !src_ctx
->useless_p ())
2658 struct ipa_polymorphic_call_context ctx
= *src_ctx
;
2660 /* TODO: Make type preserved safe WRT contexts. */
2661 if (!ipa_get_jf_ancestor_type_preserved (dst
))
2662 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
2663 ctx
.offset_by (dst
->value
.ancestor
.offset
);
2664 if (!ctx
.useless_p ())
2668 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
2670 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
2673 dst_ctx
->combine_with (ctx
);
2678 && (dst
->value
.ancestor
.agg_preserved
|| !src
->agg
.by_ref
))
2680 struct ipa_agg_jf_item
*item
;
2683 /* Currently we do not produce clobber aggregate jump functions,
2684 replace with merging when we do. */
2685 gcc_assert (!dst
->agg
.items
);
2687 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2688 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2689 FOR_EACH_VEC_SAFE_ELT (dst
->agg
.items
, j
, item
)
2690 item
->offset
-= dst
->value
.ancestor
.offset
;
2693 if (src
->type
== IPA_JF_PASS_THROUGH
2694 && src
->value
.pass_through
.operation
== NOP_EXPR
)
2696 dst
->value
.ancestor
.formal_id
= src
->value
.pass_through
.formal_id
;
2697 dst
->value
.ancestor
.agg_preserved
&=
2698 src
->value
.pass_through
.agg_preserved
;
2700 else if (src
->type
== IPA_JF_PASS_THROUGH
2701 && TREE_CODE_CLASS (src
->value
.pass_through
.operation
) == tcc_unary
)
2703 dst
->value
.ancestor
.formal_id
= src
->value
.pass_through
.formal_id
;
2704 dst
->value
.ancestor
.agg_preserved
= false;
2706 else if (src
->type
== IPA_JF_ANCESTOR
)
2708 dst
->value
.ancestor
.formal_id
= src
->value
.ancestor
.formal_id
;
2709 dst
->value
.ancestor
.offset
+= src
->value
.ancestor
.offset
;
2710 dst
->value
.ancestor
.agg_preserved
&=
2711 src
->value
.ancestor
.agg_preserved
;
2714 ipa_set_jf_unknown (dst
);
2716 else if (dst
->type
== IPA_JF_PASS_THROUGH
)
2718 struct ipa_jump_func
*src
;
2719 /* We must check range due to calls with variable number of arguments
2720 and we cannot combine jump functions with operations. */
2721 if (dst
->value
.pass_through
.operation
== NOP_EXPR
2722 && (dst
->value
.pass_through
.formal_id
2723 < ipa_get_cs_argument_count (top
)))
2725 int dst_fid
= dst
->value
.pass_through
.formal_id
;
2726 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2727 bool dst_agg_p
= ipa_get_jf_pass_through_agg_preserved (dst
);
2728 struct ipa_polymorphic_call_context
*src_ctx
2729 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
2731 if (src_ctx
&& !src_ctx
->useless_p ())
2733 struct ipa_polymorphic_call_context ctx
= *src_ctx
;
2735 /* TODO: Make type preserved safe WRT contexts. */
2736 if (!ipa_get_jf_pass_through_type_preserved (dst
))
2737 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
2738 if (!ctx
.useless_p ())
2742 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
2744 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
2746 dst_ctx
->combine_with (ctx
);
2751 case IPA_JF_UNKNOWN
:
2752 ipa_set_jf_unknown (dst
);
2755 ipa_set_jf_cst_copy (dst
, src
);
2758 case IPA_JF_PASS_THROUGH
:
2760 int formal_id
= ipa_get_jf_pass_through_formal_id (src
);
2761 enum tree_code operation
;
2762 operation
= ipa_get_jf_pass_through_operation (src
);
2764 if (operation
== NOP_EXPR
)
2768 && ipa_get_jf_pass_through_agg_preserved (src
);
2769 ipa_set_jf_simple_pass_through (dst
, formal_id
, agg_p
);
2771 else if (TREE_CODE_CLASS (operation
) == tcc_unary
)
2772 ipa_set_jf_unary_pass_through (dst
, formal_id
, operation
);
2775 tree operand
= ipa_get_jf_pass_through_operand (src
);
2776 ipa_set_jf_arith_pass_through (dst
, formal_id
, operand
,
2781 case IPA_JF_ANCESTOR
:
2785 && ipa_get_jf_ancestor_agg_preserved (src
);
2786 ipa_set_ancestor_jf (dst
,
2787 ipa_get_jf_ancestor_offset (src
),
2788 ipa_get_jf_ancestor_formal_id (src
),
2797 && (dst_agg_p
|| !src
->agg
.by_ref
))
2799 /* Currently we do not produce clobber aggregate jump
2800 functions, replace with merging when we do. */
2801 gcc_assert (!dst
->agg
.items
);
2803 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2804 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2808 ipa_set_jf_unknown (dst
);
2813 /* If TARGET is an addr_expr of a function declaration, make it the
2814 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2815 Otherwise, return NULL. */
2817 struct cgraph_edge
*
2818 ipa_make_edge_direct_to_target (struct cgraph_edge
*ie
, tree target
,
2821 struct cgraph_node
*callee
;
2822 struct ipa_call_summary
*es
= ipa_call_summaries
->get_create (ie
);
2823 bool unreachable
= false;
2825 if (TREE_CODE (target
) == ADDR_EXPR
)
2826 target
= TREE_OPERAND (target
, 0);
2827 if (TREE_CODE (target
) != FUNCTION_DECL
)
2829 target
= canonicalize_constructor_val (target
, NULL
);
2830 if (!target
|| TREE_CODE (target
) != FUNCTION_DECL
)
2832 /* Member pointer call that goes through a VMT lookup. */
2833 if (ie
->indirect_info
->member_ptr
2834 /* Or if target is not an invariant expression and we do not
2835 know if it will evaulate to function at runtime.
2836 This can happen when folding through &VAR, where &VAR
2837 is IP invariant, but VAR itself is not.
2839 TODO: Revisit this when GCC 5 is branched. It seems that
2840 member_ptr check is not needed and that we may try to fold
2841 the expression and see if VAR is readonly. */
2842 || !is_gimple_ip_invariant (target
))
2844 if (dump_enabled_p ())
2846 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2847 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2848 "discovered direct call non-invariant %s\n",
2849 ie
->caller
->dump_name ());
2855 if (dump_enabled_p ())
2857 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2858 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2859 "discovered direct call to non-function in %s, "
2860 "making it __builtin_unreachable\n",
2861 ie
->caller
->dump_name ());
2864 target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2865 callee
= cgraph_node::get_create (target
);
2869 callee
= cgraph_node::get (target
);
2872 callee
= cgraph_node::get (target
);
2874 /* Because may-edges are not explicitely represented and vtable may be external,
2875 we may create the first reference to the object in the unit. */
2876 if (!callee
|| callee
->global
.inlined_to
)
2879 /* We are better to ensure we can refer to it.
2880 In the case of static functions we are out of luck, since we already
2881 removed its body. In the case of public functions we may or may
2882 not introduce the reference. */
2883 if (!canonicalize_constructor_val (target
, NULL
)
2884 || !TREE_PUBLIC (target
))
2887 fprintf (dump_file
, "ipa-prop: Discovered call to a known target "
2888 "(%s -> %s) but can not refer to it. Giving up.\n",
2889 ie
->caller
->dump_name (),
2890 ie
->callee
->dump_name ());
2893 callee
= cgraph_node::get_create (target
);
2896 /* If the edge is already speculated. */
2897 if (speculative
&& ie
->speculative
)
2899 struct cgraph_edge
*e2
;
2900 struct ipa_ref
*ref
;
2901 ie
->speculative_call_info (e2
, ie
, ref
);
2902 if (e2
->callee
->ultimate_alias_target ()
2903 != callee
->ultimate_alias_target ())
2906 fprintf (dump_file
, "ipa-prop: Discovered call to a speculative "
2907 "target (%s -> %s) but the call is already "
2908 "speculated to %s. Giving up.\n",
2909 ie
->caller
->dump_name (), callee
->dump_name (),
2910 e2
->callee
->dump_name ());
2915 fprintf (dump_file
, "ipa-prop: Discovered call to a speculative target "
2916 "(%s -> %s) this agree with previous speculation.\n",
2917 ie
->caller
->dump_name (), callee
->dump_name ());
2922 if (!dbg_cnt (devirt
))
2925 ipa_check_create_node_params ();
2927 /* We can not make edges to inline clones. It is bug that someone removed
2928 the cgraph node too early. */
2929 gcc_assert (!callee
->global
.inlined_to
);
2931 if (dump_file
&& !unreachable
)
2933 fprintf (dump_file
, "ipa-prop: Discovered %s call to a %s target "
2934 "(%s -> %s), for stmt ",
2935 ie
->indirect_info
->polymorphic
? "a virtual" : "an indirect",
2936 speculative
? "speculative" : "known",
2937 ie
->caller
->dump_name (),
2938 callee
->dump_name ());
2940 print_gimple_stmt (dump_file
, ie
->call_stmt
, 2, TDF_SLIM
);
2942 fprintf (dump_file
, "with uid %i\n", ie
->lto_stmt_uid
);
2944 if (dump_enabled_p ())
2946 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2948 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2949 "converting indirect call in %s to direct call to %s\n",
2950 ie
->caller
->name (), callee
->name ());
2954 struct cgraph_edge
*orig
= ie
;
2955 ie
= ie
->make_direct (callee
);
2956 /* If we resolved speculative edge the cost is already up to date
2957 for direct call (adjusted by inline_edge_duplication_hook). */
2960 es
= ipa_call_summaries
->get_create (ie
);
2961 es
->call_stmt_size
-= (eni_size_weights
.indirect_call_cost
2962 - eni_size_weights
.call_cost
);
2963 es
->call_stmt_time
-= (eni_time_weights
.indirect_call_cost
2964 - eni_time_weights
.call_cost
);
2969 if (!callee
->can_be_discarded_p ())
2972 alias
= dyn_cast
<cgraph_node
*> (callee
->noninterposable_alias ());
2976 /* make_speculative will update ie's cost to direct call cost. */
2977 ie
= ie
->make_speculative
2978 (callee
, ie
->count
.apply_scale (8, 10));
2984 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
2985 CONSTRUCTOR and return it. Return NULL if the search fails for some
2989 find_constructor_constant_at_offset (tree constructor
, HOST_WIDE_INT req_offset
)
2991 tree type
= TREE_TYPE (constructor
);
2992 if (TREE_CODE (type
) != ARRAY_TYPE
2993 && TREE_CODE (type
) != RECORD_TYPE
)
2998 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor
), ix
, index
, val
)
3000 HOST_WIDE_INT elt_offset
;
3001 if (TREE_CODE (type
) == ARRAY_TYPE
)
3004 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (type
));
3005 gcc_assert (TREE_CODE (unit_size
) == INTEGER_CST
);
3009 if (TREE_CODE (index
) == RANGE_EXPR
)
3010 off
= wi::to_offset (TREE_OPERAND (index
, 0));
3012 off
= wi::to_offset (index
);
3013 if (TYPE_DOMAIN (type
) && TYPE_MIN_VALUE (TYPE_DOMAIN (type
)))
3015 tree low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
3016 gcc_assert (TREE_CODE (unit_size
) == INTEGER_CST
);
3017 off
= wi::sext (off
- wi::to_offset (low_bound
),
3018 TYPE_PRECISION (TREE_TYPE (index
)));
3020 off
*= wi::to_offset (unit_size
);
3021 /* ??? Handle more than just the first index of a
3025 off
= wi::to_offset (unit_size
) * ix
;
3027 off
= wi::lshift (off
, LOG2_BITS_PER_UNIT
);
3028 if (!wi::fits_shwi_p (off
) || wi::neg_p (off
))
3030 elt_offset
= off
.to_shwi ();
3032 else if (TREE_CODE (type
) == RECORD_TYPE
)
3034 gcc_checking_assert (index
&& TREE_CODE (index
) == FIELD_DECL
);
3035 if (DECL_BIT_FIELD (index
))
3037 elt_offset
= int_bit_position (index
);
3042 if (elt_offset
> req_offset
)
3045 if (TREE_CODE (val
) == CONSTRUCTOR
)
3046 return find_constructor_constant_at_offset (val
,
3047 req_offset
- elt_offset
);
3049 if (elt_offset
== req_offset
3050 && is_gimple_reg_type (TREE_TYPE (val
))
3051 && is_gimple_ip_invariant (val
))
3057 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3058 invariant from a static constructor and if so, return it. Otherwise return
3062 ipa_find_agg_cst_from_init (tree scalar
, HOST_WIDE_INT offset
, bool by_ref
)
3066 if (TREE_CODE (scalar
) != ADDR_EXPR
)
3068 scalar
= TREE_OPERAND (scalar
, 0);
3072 || !is_global_var (scalar
)
3073 || !TREE_READONLY (scalar
)
3074 || !DECL_INITIAL (scalar
)
3075 || TREE_CODE (DECL_INITIAL (scalar
)) != CONSTRUCTOR
)
3078 return find_constructor_constant_at_offset (DECL_INITIAL (scalar
), offset
);
3081 /* Retrieve value from aggregate jump function AGG or static initializer of
3082 SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
3083 none. BY_REF specifies whether the value has to be passed by reference or
3084 by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
3085 to is set to true if the value comes from an initializer of a constant. */
3088 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function
*agg
, tree scalar
,
3089 HOST_WIDE_INT offset
, bool by_ref
,
3090 bool *from_global_constant
)
3092 struct ipa_agg_jf_item
*item
;
3097 tree res
= ipa_find_agg_cst_from_init (scalar
, offset
, by_ref
);
3100 if (from_global_constant
)
3101 *from_global_constant
= true;
3107 || by_ref
!= agg
->by_ref
)
3110 FOR_EACH_VEC_SAFE_ELT (agg
->items
, i
, item
)
3111 if (item
->offset
== offset
)
3113 /* Currently we do not have clobber values, return NULL for them once
3115 gcc_checking_assert (is_gimple_ip_invariant (item
->value
));
3116 if (from_global_constant
)
3117 *from_global_constant
= false;
3123 /* Remove a reference to SYMBOL from the list of references of a node given by
3124 reference description RDESC. Return true if the reference has been
3125 successfully found and removed. */
3128 remove_described_reference (symtab_node
*symbol
, struct ipa_cst_ref_desc
*rdesc
)
3130 struct ipa_ref
*to_del
;
3131 struct cgraph_edge
*origin
;
3136 to_del
= origin
->caller
->find_reference (symbol
, origin
->call_stmt
,
3137 origin
->lto_stmt_uid
);
3141 to_del
->remove_reference ();
3143 fprintf (dump_file
, "ipa-prop: Removed a reference from %s to %s.\n",
3144 origin
->caller
->dump_name (), xstrdup_for_dump (symbol
->name ()));
3148 /* If JFUNC has a reference description with refcount different from
3149 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3150 NULL. JFUNC must be a constant jump function. */
3152 static struct ipa_cst_ref_desc
*
3153 jfunc_rdesc_usable (struct ipa_jump_func
*jfunc
)
3155 struct ipa_cst_ref_desc
*rdesc
= ipa_get_jf_constant_rdesc (jfunc
);
3156 if (rdesc
&& rdesc
->refcount
!= IPA_UNDESCRIBED_USE
)
3162 /* If the value of constant jump function JFUNC is an address of a function
3163 declaration, return the associated call graph node. Otherwise return
3166 static cgraph_node
*
3167 cgraph_node_for_jfunc (struct ipa_jump_func
*jfunc
)
3169 gcc_checking_assert (jfunc
->type
== IPA_JF_CONST
);
3170 tree cst
= ipa_get_jf_constant (jfunc
);
3171 if (TREE_CODE (cst
) != ADDR_EXPR
3172 || TREE_CODE (TREE_OPERAND (cst
, 0)) != FUNCTION_DECL
)
3175 return cgraph_node::get (TREE_OPERAND (cst
, 0));
3179 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3180 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3181 the edge specified in the rdesc. Return false if either the symbol or the
3182 reference could not be found, otherwise return true. */
3185 try_decrement_rdesc_refcount (struct ipa_jump_func
*jfunc
)
3187 struct ipa_cst_ref_desc
*rdesc
;
3188 if (jfunc
->type
== IPA_JF_CONST
3189 && (rdesc
= jfunc_rdesc_usable (jfunc
))
3190 && --rdesc
->refcount
== 0)
3192 symtab_node
*symbol
= cgraph_node_for_jfunc (jfunc
);
3196 return remove_described_reference (symbol
, rdesc
);
3201 /* Try to find a destination for indirect edge IE that corresponds to a simple
3202 call or a call of a member function pointer and where the destination is a
3203 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3204 the type of the parameter to which the result of JFUNC is passed. If it can
3205 be determined, return the newly direct edge, otherwise return NULL.
3206 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3208 static struct cgraph_edge
*
3209 try_make_edge_direct_simple_call (struct cgraph_edge
*ie
,
3210 struct ipa_jump_func
*jfunc
, tree target_type
,
3211 struct ipa_node_params
*new_root_info
)
3213 struct cgraph_edge
*cs
;
3215 bool agg_contents
= ie
->indirect_info
->agg_contents
;
3216 tree scalar
= ipa_value_from_jfunc (new_root_info
, jfunc
, target_type
);
3219 bool from_global_constant
;
3220 target
= ipa_find_agg_cst_for_param (&jfunc
->agg
, scalar
,
3221 ie
->indirect_info
->offset
,
3222 ie
->indirect_info
->by_ref
,
3223 &from_global_constant
);
3225 && !from_global_constant
3226 && !ie
->indirect_info
->guaranteed_unmodified
)
3233 cs
= ipa_make_edge_direct_to_target (ie
, target
);
3235 if (cs
&& !agg_contents
)
3238 gcc_checking_assert (cs
->callee
3240 || jfunc
->type
!= IPA_JF_CONST
3241 || !cgraph_node_for_jfunc (jfunc
)
3242 || cs
->callee
== cgraph_node_for_jfunc (jfunc
)));
3243 ok
= try_decrement_rdesc_refcount (jfunc
);
3244 gcc_checking_assert (ok
);
3250 /* Return the target to be used in cases of impossible devirtualization. IE
3251 and target (the latter can be NULL) are dumped when dumping is enabled. */
3254 ipa_impossible_devirt_target (struct cgraph_edge
*ie
, tree target
)
3260 "Type inconsistent devirtualization: %s->%s\n",
3261 ie
->caller
->dump_name (),
3262 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target
)));
3265 "No devirtualization target in %s\n",
3266 ie
->caller
->dump_name ());
3268 tree new_target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
3269 cgraph_node::get_create (new_target
);
3273 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3274 call based on a formal parameter which is described by jump function JFUNC
3275 and if it can be determined, make it direct and return the direct edge.
3276 Otherwise, return NULL. CTX describes the polymorphic context that the
3277 parameter the call is based on brings along with it. */
3279 static struct cgraph_edge
*
3280 try_make_edge_direct_virtual_call (struct cgraph_edge
*ie
,
3281 struct ipa_jump_func
*jfunc
,
3282 struct ipa_polymorphic_call_context ctx
)
3285 bool speculative
= false;
3287 if (!opt_for_fn (ie
->caller
->decl
, flag_devirtualize
))
3290 gcc_assert (!ie
->indirect_info
->by_ref
);
3292 /* Try to do lookup via known virtual table pointer value. */
3293 if (!ie
->indirect_info
->vptr_changed
3294 || opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
))
3297 unsigned HOST_WIDE_INT offset
;
3298 tree scalar
= (jfunc
->type
== IPA_JF_CONST
) ? ipa_get_jf_constant (jfunc
)
3300 tree t
= ipa_find_agg_cst_for_param (&jfunc
->agg
, scalar
,
3301 ie
->indirect_info
->offset
,
3303 if (t
&& vtable_pointer_value_to_vtable (t
, &vtable
, &offset
))
3306 t
= gimple_get_virt_method_for_vtable (ie
->indirect_info
->otr_token
,
3307 vtable
, offset
, &can_refer
);
3311 || (TREE_CODE (TREE_TYPE (t
)) == FUNCTION_TYPE
3312 && DECL_FUNCTION_CODE (t
) == BUILT_IN_UNREACHABLE
)
3313 || !possible_polymorphic_call_target_p
3314 (ie
, cgraph_node::get (t
)))
3316 /* Do not speculate builtin_unreachable, it is stupid! */
3317 if (!ie
->indirect_info
->vptr_changed
)
3318 target
= ipa_impossible_devirt_target (ie
, target
);
3325 speculative
= ie
->indirect_info
->vptr_changed
;
3331 ipa_polymorphic_call_context
ie_context (ie
);
3332 vec
<cgraph_node
*>targets
;
3335 ctx
.offset_by (ie
->indirect_info
->offset
);
3336 if (ie
->indirect_info
->vptr_changed
)
3337 ctx
.possible_dynamic_type_change (ie
->in_polymorphic_cdtor
,
3338 ie
->indirect_info
->otr_type
);
3339 ctx
.combine_with (ie_context
, ie
->indirect_info
->otr_type
);
3340 targets
= possible_polymorphic_call_targets
3341 (ie
->indirect_info
->otr_type
,
3342 ie
->indirect_info
->otr_token
,
3344 if (final
&& targets
.length () <= 1)
3346 speculative
= false;
3347 if (targets
.length () == 1)
3348 target
= targets
[0]->decl
;
3350 target
= ipa_impossible_devirt_target (ie
, NULL_TREE
);
3352 else if (!target
&& opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
)
3353 && !ie
->speculative
&& ie
->maybe_hot_p ())
3356 n
= try_speculative_devirtualization (ie
->indirect_info
->otr_type
,
3357 ie
->indirect_info
->otr_token
,
3358 ie
->indirect_info
->context
);
3368 if (!possible_polymorphic_call_target_p
3369 (ie
, cgraph_node::get_create (target
)))
3373 target
= ipa_impossible_devirt_target (ie
, target
);
3375 return ipa_make_edge_direct_to_target (ie
, target
, speculative
);
3381 /* Update the param called notes associated with NODE when CS is being inlined,
3382 assuming NODE is (potentially indirectly) inlined into CS->callee.
3383 Moreover, if the callee is discovered to be constant, create a new cgraph
3384 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3385 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3388 update_indirect_edges_after_inlining (struct cgraph_edge
*cs
,
3389 struct cgraph_node
*node
,
3390 vec
<cgraph_edge
*> *new_edges
)
3392 struct ipa_edge_args
*top
;
3393 struct cgraph_edge
*ie
, *next_ie
, *new_direct_edge
;
3394 struct ipa_node_params
*new_root_info
, *inlined_node_info
;
3397 ipa_check_create_edge_args ();
3398 top
= IPA_EDGE_REF (cs
);
3399 new_root_info
= IPA_NODE_REF (cs
->caller
->global
.inlined_to
3400 ? cs
->caller
->global
.inlined_to
3402 inlined_node_info
= IPA_NODE_REF (cs
->callee
->function_symbol ());
3404 for (ie
= node
->indirect_calls
; ie
; ie
= next_ie
)
3406 struct cgraph_indirect_call_info
*ici
= ie
->indirect_info
;
3407 struct ipa_jump_func
*jfunc
;
3409 cgraph_node
*spec_target
= NULL
;
3411 next_ie
= ie
->next_callee
;
3413 if (ici
->param_index
== -1)
3416 /* We must check range due to calls with variable number of arguments: */
3417 if (ici
->param_index
>= ipa_get_cs_argument_count (top
))
3419 ici
->param_index
= -1;
3423 param_index
= ici
->param_index
;
3424 jfunc
= ipa_get_ith_jump_func (top
, param_index
);
3426 if (ie
->speculative
)
3428 struct cgraph_edge
*de
;
3429 struct ipa_ref
*ref
;
3430 ie
->speculative_call_info (de
, ie
, ref
);
3431 spec_target
= de
->callee
;
3434 if (!opt_for_fn (node
->decl
, flag_indirect_inlining
))
3435 new_direct_edge
= NULL
;
3436 else if (ici
->polymorphic
)
3438 ipa_polymorphic_call_context ctx
;
3439 ctx
= ipa_context_from_jfunc (new_root_info
, cs
, param_index
, jfunc
);
3440 new_direct_edge
= try_make_edge_direct_virtual_call (ie
, jfunc
, ctx
);
3444 tree target_type
= ipa_get_type (inlined_node_info
, param_index
);
3445 new_direct_edge
= try_make_edge_direct_simple_call (ie
, jfunc
,
3450 /* If speculation was removed, then we need to do nothing. */
3451 if (new_direct_edge
&& new_direct_edge
!= ie
3452 && new_direct_edge
->callee
== spec_target
)
3454 new_direct_edge
->indirect_inlining_edge
= 1;
3455 top
= IPA_EDGE_REF (cs
);
3457 if (!new_direct_edge
->speculative
)
3460 else if (new_direct_edge
)
3462 new_direct_edge
->indirect_inlining_edge
= 1;
3463 if (new_direct_edge
->call_stmt
)
3464 new_direct_edge
->call_stmt_cannot_inline_p
3465 = !gimple_check_call_matching_types (
3466 new_direct_edge
->call_stmt
,
3467 new_direct_edge
->callee
->decl
, false);
3470 new_edges
->safe_push (new_direct_edge
);
3473 top
= IPA_EDGE_REF (cs
);
3474 /* If speculative edge was introduced we still need to update
3475 call info of the indirect edge. */
3476 if (!new_direct_edge
->speculative
)
3479 if (jfunc
->type
== IPA_JF_PASS_THROUGH
3480 && ipa_get_jf_pass_through_operation (jfunc
) == NOP_EXPR
)
3482 if (ici
->agg_contents
3483 && !ipa_get_jf_pass_through_agg_preserved (jfunc
)
3484 && !ici
->polymorphic
)
3485 ici
->param_index
= -1;
3488 ici
->param_index
= ipa_get_jf_pass_through_formal_id (jfunc
);
3489 if (ici
->polymorphic
3490 && !ipa_get_jf_pass_through_type_preserved (jfunc
))
3491 ici
->vptr_changed
= true;
3494 else if (jfunc
->type
== IPA_JF_ANCESTOR
)
3496 if (ici
->agg_contents
3497 && !ipa_get_jf_ancestor_agg_preserved (jfunc
)
3498 && !ici
->polymorphic
)
3499 ici
->param_index
= -1;
3502 ici
->param_index
= ipa_get_jf_ancestor_formal_id (jfunc
);
3503 ici
->offset
+= ipa_get_jf_ancestor_offset (jfunc
);
3504 if (ici
->polymorphic
3505 && !ipa_get_jf_ancestor_type_preserved (jfunc
))
3506 ici
->vptr_changed
= true;
3510 /* Either we can find a destination for this edge now or never. */
3511 ici
->param_index
= -1;
3517 /* Recursively traverse subtree of NODE (including node) made of inlined
3518 cgraph_edges when CS has been inlined and invoke
3519 update_indirect_edges_after_inlining on all nodes and
3520 update_jump_functions_after_inlining on all non-inlined edges that lead out
3521 of this subtree. Newly discovered indirect edges will be added to
3522 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3526 propagate_info_to_inlined_callees (struct cgraph_edge
*cs
,
3527 struct cgraph_node
*node
,
3528 vec
<cgraph_edge
*> *new_edges
)
3530 struct cgraph_edge
*e
;
3533 res
= update_indirect_edges_after_inlining (cs
, node
, new_edges
);
3535 for (e
= node
->callees
; e
; e
= e
->next_callee
)
3536 if (!e
->inline_failed
)
3537 res
|= propagate_info_to_inlined_callees (cs
, e
->callee
, new_edges
);
3539 update_jump_functions_after_inlining (cs
, e
);
3540 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
3541 update_jump_functions_after_inlining (cs
, e
);
3546 /* Combine two controlled uses counts as done during inlining. */
3549 combine_controlled_uses_counters (int c
, int d
)
3551 if (c
== IPA_UNDESCRIBED_USE
|| d
== IPA_UNDESCRIBED_USE
)
3552 return IPA_UNDESCRIBED_USE
;
3557 /* Propagate number of controlled users from CS->caleee to the new root of the
3558 tree of inlined nodes. */
3561 propagate_controlled_uses (struct cgraph_edge
*cs
)
3563 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
3564 struct cgraph_node
*new_root
= cs
->caller
->global
.inlined_to
3565 ? cs
->caller
->global
.inlined_to
: cs
->caller
;
3566 struct ipa_node_params
*new_root_info
= IPA_NODE_REF (new_root
);
3567 struct ipa_node_params
*old_root_info
= IPA_NODE_REF (cs
->callee
);
3570 count
= MIN (ipa_get_cs_argument_count (args
),
3571 ipa_get_param_count (old_root_info
));
3572 for (i
= 0; i
< count
; i
++)
3574 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
3575 struct ipa_cst_ref_desc
*rdesc
;
3577 if (jf
->type
== IPA_JF_PASS_THROUGH
)
3580 src_idx
= ipa_get_jf_pass_through_formal_id (jf
);
3581 c
= ipa_get_controlled_uses (new_root_info
, src_idx
);
3582 d
= ipa_get_controlled_uses (old_root_info
, i
);
3584 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf
)
3585 == NOP_EXPR
|| c
== IPA_UNDESCRIBED_USE
);
3586 c
= combine_controlled_uses_counters (c
, d
);
3587 ipa_set_controlled_uses (new_root_info
, src_idx
, c
);
3588 if (c
== 0 && new_root_info
->ipcp_orig_node
)
3590 struct cgraph_node
*n
;
3591 struct ipa_ref
*ref
;
3592 tree t
= new_root_info
->known_csts
[src_idx
];
3594 if (t
&& TREE_CODE (t
) == ADDR_EXPR
3595 && TREE_CODE (TREE_OPERAND (t
, 0)) == FUNCTION_DECL
3596 && (n
= cgraph_node::get (TREE_OPERAND (t
, 0)))
3597 && (ref
= new_root
->find_reference (n
, NULL
, 0)))
3600 fprintf (dump_file
, "ipa-prop: Removing cloning-created "
3601 "reference from %s to %s.\n",
3602 new_root
->dump_name (),
3604 ref
->remove_reference ();
3608 else if (jf
->type
== IPA_JF_CONST
3609 && (rdesc
= jfunc_rdesc_usable (jf
)))
3611 int d
= ipa_get_controlled_uses (old_root_info
, i
);
3612 int c
= rdesc
->refcount
;
3613 rdesc
->refcount
= combine_controlled_uses_counters (c
, d
);
3614 if (rdesc
->refcount
== 0)
3616 tree cst
= ipa_get_jf_constant (jf
);
3617 struct cgraph_node
*n
;
3618 gcc_checking_assert (TREE_CODE (cst
) == ADDR_EXPR
3619 && TREE_CODE (TREE_OPERAND (cst
, 0))
3621 n
= cgraph_node::get (TREE_OPERAND (cst
, 0));
3624 struct cgraph_node
*clone
;
3626 ok
= remove_described_reference (n
, rdesc
);
3627 gcc_checking_assert (ok
);
3630 while (clone
->global
.inlined_to
3631 && clone
!= rdesc
->cs
->caller
3632 && IPA_NODE_REF (clone
)->ipcp_orig_node
)
3634 struct ipa_ref
*ref
;
3635 ref
= clone
->find_reference (n
, NULL
, 0);
3639 fprintf (dump_file
, "ipa-prop: Removing "
3640 "cloning-created reference "
3642 clone
->dump_name (),
3644 ref
->remove_reference ();
3646 clone
= clone
->callers
->caller
;
3653 for (i
= ipa_get_param_count (old_root_info
);
3654 i
< ipa_get_cs_argument_count (args
);
3657 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
3659 if (jf
->type
== IPA_JF_CONST
)
3661 struct ipa_cst_ref_desc
*rdesc
= jfunc_rdesc_usable (jf
);
3663 rdesc
->refcount
= IPA_UNDESCRIBED_USE
;
3665 else if (jf
->type
== IPA_JF_PASS_THROUGH
)
3666 ipa_set_controlled_uses (new_root_info
,
3667 jf
->value
.pass_through
.formal_id
,
3668 IPA_UNDESCRIBED_USE
);
3672 /* Update jump functions and call note functions on inlining the call site CS.
3673 CS is expected to lead to a node already cloned by
3674 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3675 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3679 ipa_propagate_indirect_call_infos (struct cgraph_edge
*cs
,
3680 vec
<cgraph_edge
*> *new_edges
)
3683 /* Do nothing if the preparation phase has not been carried out yet
3684 (i.e. during early inlining). */
3685 if (!ipa_node_params_sum
)
3687 gcc_assert (ipa_edge_args_sum
);
3689 propagate_controlled_uses (cs
);
3690 changed
= propagate_info_to_inlined_callees (cs
, cs
->callee
, new_edges
);
3695 /* Ensure that array of edge arguments infos is big enough to accommodate a
3696 structure for all edges and reallocates it if not. Also, allocate
3697 associated hash tables is they do not already exist. */
3700 ipa_check_create_edge_args (void)
3702 if (!ipa_edge_args_sum
)
3704 = (new (ggc_cleared_alloc
<ipa_edge_args_sum_t
> ())
3705 ipa_edge_args_sum_t (symtab
, true));
3706 if (!ipa_bits_hash_table
)
3707 ipa_bits_hash_table
= hash_table
<ipa_bit_ggc_hash_traits
>::create_ggc (37);
3708 if (!ipa_vr_hash_table
)
3709 ipa_vr_hash_table
= hash_table
<ipa_vr_ggc_hash_traits
>::create_ggc (37);
3712 /* Free all ipa_edge structures. */
3715 ipa_free_all_edge_args (void)
3717 if (!ipa_edge_args_sum
)
3720 ipa_edge_args_sum
->release ();
3721 ipa_edge_args_sum
= NULL
;
3724 /* Free all ipa_node_params structures. */
3727 ipa_free_all_node_params (void)
3729 ipa_node_params_sum
->release ();
3730 ipa_node_params_sum
= NULL
;
3733 /* Initialize IPA CP transformation summary and also allocate any necessary hash
3734 tables if they do not already exist. */
3737 ipcp_transformation_initialize (void)
3739 if (!ipa_bits_hash_table
)
3740 ipa_bits_hash_table
= hash_table
<ipa_bit_ggc_hash_traits
>::create_ggc (37);
3741 if (!ipa_vr_hash_table
)
3742 ipa_vr_hash_table
= hash_table
<ipa_vr_ggc_hash_traits
>::create_ggc (37);
3743 if (ipcp_transformation_sum
== NULL
)
3744 ipcp_transformation_sum
= ipcp_transformation_t::create_ggc (symtab
);
3747 /* Set the aggregate replacements of NODE to be AGGVALS. */
3750 ipa_set_node_agg_value_chain (struct cgraph_node
*node
,
3751 struct ipa_agg_replacement_value
*aggvals
)
3753 ipcp_transformation_initialize ();
3754 ipcp_transformation
*s
= ipcp_transformation_sum
->get_create (node
);
3755 s
->agg_values
= aggvals
;
3758 /* Hook that is called by cgraph.c when an edge is removed. Adjust reference
3759 count data structures accordingly. */
3762 ipa_edge_args_sum_t::remove (cgraph_edge
*cs
, ipa_edge_args
*args
)
3764 if (args
->jump_functions
)
3766 struct ipa_jump_func
*jf
;
3768 FOR_EACH_VEC_ELT (*args
->jump_functions
, i
, jf
)
3770 struct ipa_cst_ref_desc
*rdesc
;
3771 try_decrement_rdesc_refcount (jf
);
3772 if (jf
->type
== IPA_JF_CONST
3773 && (rdesc
= ipa_get_jf_constant_rdesc (jf
))
3780 /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
3781 reference count data strucutres accordingly. */
3784 ipa_edge_args_sum_t::duplicate (cgraph_edge
*src
, cgraph_edge
*dst
,
3785 ipa_edge_args
*old_args
, ipa_edge_args
*new_args
)
3789 new_args
->jump_functions
= vec_safe_copy (old_args
->jump_functions
);
3790 if (old_args
->polymorphic_call_contexts
)
3791 new_args
->polymorphic_call_contexts
3792 = vec_safe_copy (old_args
->polymorphic_call_contexts
);
3794 for (i
= 0; i
< vec_safe_length (old_args
->jump_functions
); i
++)
3796 struct ipa_jump_func
*src_jf
= ipa_get_ith_jump_func (old_args
, i
);
3797 struct ipa_jump_func
*dst_jf
= ipa_get_ith_jump_func (new_args
, i
);
3799 dst_jf
->agg
.items
= vec_safe_copy (dst_jf
->agg
.items
);
3801 if (src_jf
->type
== IPA_JF_CONST
)
3803 struct ipa_cst_ref_desc
*src_rdesc
= jfunc_rdesc_usable (src_jf
);
3806 dst_jf
->value
.constant
.rdesc
= NULL
;
3807 else if (src
->caller
== dst
->caller
)
3809 struct ipa_ref
*ref
;
3810 symtab_node
*n
= cgraph_node_for_jfunc (src_jf
);
3811 gcc_checking_assert (n
);
3812 ref
= src
->caller
->find_reference (n
, src
->call_stmt
,
3814 gcc_checking_assert (ref
);
3815 dst
->caller
->clone_reference (ref
, ref
->stmt
);
3817 struct ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
3818 dst_rdesc
->cs
= dst
;
3819 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3820 dst_rdesc
->next_duplicate
= NULL
;
3821 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3823 else if (src_rdesc
->cs
== src
)
3825 struct ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
3826 dst_rdesc
->cs
= dst
;
3827 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3828 dst_rdesc
->next_duplicate
= src_rdesc
->next_duplicate
;
3829 src_rdesc
->next_duplicate
= dst_rdesc
;
3830 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3834 struct ipa_cst_ref_desc
*dst_rdesc
;
3835 /* This can happen during inlining, when a JFUNC can refer to a
3836 reference taken in a function up in the tree of inline clones.
3837 We need to find the duplicate that refers to our tree of
3840 gcc_assert (dst
->caller
->global
.inlined_to
);
3841 for (dst_rdesc
= src_rdesc
->next_duplicate
;
3843 dst_rdesc
= dst_rdesc
->next_duplicate
)
3845 struct cgraph_node
*top
;
3846 top
= dst_rdesc
->cs
->caller
->global
.inlined_to
3847 ? dst_rdesc
->cs
->caller
->global
.inlined_to
3848 : dst_rdesc
->cs
->caller
;
3849 if (dst
->caller
->global
.inlined_to
== top
)
3852 gcc_assert (dst_rdesc
);
3853 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3856 else if (dst_jf
->type
== IPA_JF_PASS_THROUGH
3857 && src
->caller
== dst
->caller
)
3859 struct cgraph_node
*inline_root
= dst
->caller
->global
.inlined_to
3860 ? dst
->caller
->global
.inlined_to
: dst
->caller
;
3861 struct ipa_node_params
*root_info
= IPA_NODE_REF (inline_root
);
3862 int idx
= ipa_get_jf_pass_through_formal_id (dst_jf
);
3864 int c
= ipa_get_controlled_uses (root_info
, idx
);
3865 if (c
!= IPA_UNDESCRIBED_USE
)
3868 ipa_set_controlled_uses (root_info
, idx
, c
);
3874 /* Analyze newly added function into callgraph. */
3877 ipa_add_new_function (cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
3879 if (node
->has_gimple_body_p ())
3880 ipa_analyze_node (node
);
3883 /* Hook that is called by summary when a node is duplicated. */
3886 ipa_node_params_t::duplicate(cgraph_node
*src
, cgraph_node
*dst
,
3887 ipa_node_params
*old_info
,
3888 ipa_node_params
*new_info
)
3890 ipa_agg_replacement_value
*old_av
, *new_av
;
3892 new_info
->descriptors
= vec_safe_copy (old_info
->descriptors
);
3893 new_info
->lattices
= NULL
;
3894 new_info
->ipcp_orig_node
= old_info
->ipcp_orig_node
;
3895 new_info
->known_csts
= old_info
->known_csts
.copy ();
3896 new_info
->known_contexts
= old_info
->known_contexts
.copy ();
3898 new_info
->analysis_done
= old_info
->analysis_done
;
3899 new_info
->node_enqueued
= old_info
->node_enqueued
;
3900 new_info
->versionable
= old_info
->versionable
;
3902 old_av
= ipa_get_agg_replacements_for_node (src
);
3908 struct ipa_agg_replacement_value
*v
;
3910 v
= ggc_alloc
<ipa_agg_replacement_value
> ();
3911 memcpy (v
, old_av
, sizeof (*v
));
3914 old_av
= old_av
->next
;
3916 ipa_set_node_agg_value_chain (dst
, new_av
);
3919 ipcp_transformation
*src_trans
= ipcp_get_transformation_summary (src
);
3923 ipcp_transformation_initialize ();
3924 src_trans
= ipcp_transformation_sum
->get_create (src
);
3925 ipcp_transformation
*dst_trans
3926 = ipcp_transformation_sum
->get_create (dst
);
3928 dst_trans
->bits
= vec_safe_copy (src_trans
->bits
);
3930 const vec
<ipa_vr
, va_gc
> *src_vr
= src_trans
->m_vr
;
3931 vec
<ipa_vr
, va_gc
> *&dst_vr
3932 = ipcp_get_transformation_summary (dst
)->m_vr
;
3933 if (vec_safe_length (src_trans
->m_vr
) > 0)
3935 vec_safe_reserve_exact (dst_vr
, src_vr
->length ());
3936 for (unsigned i
= 0; i
< src_vr
->length (); ++i
)
3937 dst_vr
->quick_push ((*src_vr
)[i
]);
3942 /* Register our cgraph hooks if they are not already there. */
3945 ipa_register_cgraph_hooks (void)
3947 ipa_check_create_node_params ();
3948 ipa_check_create_edge_args ();
3950 function_insertion_hook_holder
=
3951 symtab
->add_cgraph_insertion_hook (&ipa_add_new_function
, NULL
);
3954 /* Unregister our cgraph hooks if they are not already there. */
3957 ipa_unregister_cgraph_hooks (void)
3959 symtab
->remove_cgraph_insertion_hook (function_insertion_hook_holder
);
3960 function_insertion_hook_holder
= NULL
;
3963 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3964 longer needed after ipa-cp. */
3967 ipa_free_all_structures_after_ipa_cp (void)
3969 if (!optimize
&& !in_lto_p
)
3971 ipa_free_all_edge_args ();
3972 ipa_free_all_node_params ();
3973 ipcp_sources_pool
.release ();
3974 ipcp_cst_values_pool
.release ();
3975 ipcp_poly_ctx_values_pool
.release ();
3976 ipcp_agg_lattice_pool
.release ();
3977 ipa_unregister_cgraph_hooks ();
3978 ipa_refdesc_pool
.release ();
3982 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3983 longer needed after indirect inlining. */
3986 ipa_free_all_structures_after_iinln (void)
3988 ipa_free_all_edge_args ();
3989 ipa_free_all_node_params ();
3990 ipa_unregister_cgraph_hooks ();
3991 ipcp_sources_pool
.release ();
3992 ipcp_cst_values_pool
.release ();
3993 ipcp_poly_ctx_values_pool
.release ();
3994 ipcp_agg_lattice_pool
.release ();
3995 ipa_refdesc_pool
.release ();
3998 /* Print ipa_tree_map data structures of all functions in the
4002 ipa_print_node_params (FILE *f
, struct cgraph_node
*node
)
4005 struct ipa_node_params
*info
;
4007 if (!node
->definition
)
4009 info
= IPA_NODE_REF (node
);
4010 fprintf (f
, " function %s parameter descriptors:\n", node
->dump_name ());
4011 count
= ipa_get_param_count (info
);
4012 for (i
= 0; i
< count
; i
++)
4017 ipa_dump_param (f
, info
, i
);
4018 if (ipa_is_param_used (info
, i
))
4019 fprintf (f
, " used");
4020 c
= ipa_get_controlled_uses (info
, i
);
4021 if (c
== IPA_UNDESCRIBED_USE
)
4022 fprintf (f
, " undescribed_use");
4024 fprintf (f
, " controlled_uses=%i", c
);
4029 /* Print ipa_tree_map data structures of all functions in the
4033 ipa_print_all_params (FILE * f
)
4035 struct cgraph_node
*node
;
4037 fprintf (f
, "\nFunction parameters:\n");
4038 FOR_EACH_FUNCTION (node
)
4039 ipa_print_node_params (f
, node
);
4042 /* Dump the AV linked list. */
4045 ipa_dump_agg_replacement_values (FILE *f
, struct ipa_agg_replacement_value
*av
)
4048 fprintf (f
, " Aggregate replacements:");
4049 for (; av
; av
= av
->next
)
4051 fprintf (f
, "%s %i[" HOST_WIDE_INT_PRINT_DEC
"]=", comma
? "," : "",
4052 av
->index
, av
->offset
);
4053 print_generic_expr (f
, av
->value
);
4059 /* Stream out jump function JUMP_FUNC to OB. */
4062 ipa_write_jump_function (struct output_block
*ob
,
4063 struct ipa_jump_func
*jump_func
)
4065 struct ipa_agg_jf_item
*item
;
4066 struct bitpack_d bp
;
4069 streamer_write_uhwi (ob
, jump_func
->type
);
4070 switch (jump_func
->type
)
4072 case IPA_JF_UNKNOWN
:
4076 EXPR_LOCATION (jump_func
->value
.constant
.value
) == UNKNOWN_LOCATION
);
4077 stream_write_tree (ob
, jump_func
->value
.constant
.value
, true);
4079 case IPA_JF_PASS_THROUGH
:
4080 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.operation
);
4081 if (jump_func
->value
.pass_through
.operation
== NOP_EXPR
)
4083 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4084 bp
= bitpack_create (ob
->main_stream
);
4085 bp_pack_value (&bp
, jump_func
->value
.pass_through
.agg_preserved
, 1);
4086 streamer_write_bitpack (&bp
);
4088 else if (TREE_CODE_CLASS (jump_func
->value
.pass_through
.operation
)
4090 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4093 stream_write_tree (ob
, jump_func
->value
.pass_through
.operand
, true);
4094 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4097 case IPA_JF_ANCESTOR
:
4098 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.offset
);
4099 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.formal_id
);
4100 bp
= bitpack_create (ob
->main_stream
);
4101 bp_pack_value (&bp
, jump_func
->value
.ancestor
.agg_preserved
, 1);
4102 streamer_write_bitpack (&bp
);
4106 count
= vec_safe_length (jump_func
->agg
.items
);
4107 streamer_write_uhwi (ob
, count
);
4110 bp
= bitpack_create (ob
->main_stream
);
4111 bp_pack_value (&bp
, jump_func
->agg
.by_ref
, 1);
4112 streamer_write_bitpack (&bp
);
4115 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, i
, item
)
4117 streamer_write_uhwi (ob
, item
->offset
);
4118 stream_write_tree (ob
, item
->value
, true);
4121 bp
= bitpack_create (ob
->main_stream
);
4122 bp_pack_value (&bp
, !!jump_func
->bits
, 1);
4123 streamer_write_bitpack (&bp
);
4124 if (jump_func
->bits
)
4126 streamer_write_widest_int (ob
, jump_func
->bits
->value
);
4127 streamer_write_widest_int (ob
, jump_func
->bits
->mask
);
4129 bp_pack_value (&bp
, !!jump_func
->m_vr
, 1);
4130 streamer_write_bitpack (&bp
);
4131 if (jump_func
->m_vr
)
4133 streamer_write_enum (ob
->main_stream
, value_rang_type
,
4134 VR_LAST
, jump_func
->m_vr
->type
);
4135 stream_write_tree (ob
, jump_func
->m_vr
->min
, true);
4136 stream_write_tree (ob
, jump_func
->m_vr
->max
, true);
4140 /* Read in jump function JUMP_FUNC from IB. */
4143 ipa_read_jump_function (struct lto_input_block
*ib
,
4144 struct ipa_jump_func
*jump_func
,
4145 struct cgraph_edge
*cs
,
4146 struct data_in
*data_in
)
4148 enum jump_func_type jftype
;
4149 enum tree_code operation
;
4152 jftype
= (enum jump_func_type
) streamer_read_uhwi (ib
);
4155 case IPA_JF_UNKNOWN
:
4156 ipa_set_jf_unknown (jump_func
);
4159 ipa_set_jf_constant (jump_func
, stream_read_tree (ib
, data_in
), cs
);
4161 case IPA_JF_PASS_THROUGH
:
4162 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
4163 if (operation
== NOP_EXPR
)
4165 int formal_id
= streamer_read_uhwi (ib
);
4166 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4167 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4168 ipa_set_jf_simple_pass_through (jump_func
, formal_id
, agg_preserved
);
4170 else if (TREE_CODE_CLASS (operation
) == tcc_unary
)
4172 int formal_id
= streamer_read_uhwi (ib
);
4173 ipa_set_jf_unary_pass_through (jump_func
, formal_id
, operation
);
4177 tree operand
= stream_read_tree (ib
, data_in
);
4178 int formal_id
= streamer_read_uhwi (ib
);
4179 ipa_set_jf_arith_pass_through (jump_func
, formal_id
, operand
,
4183 case IPA_JF_ANCESTOR
:
4185 HOST_WIDE_INT offset
= streamer_read_uhwi (ib
);
4186 int formal_id
= streamer_read_uhwi (ib
);
4187 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4188 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4189 ipa_set_ancestor_jf (jump_func
, offset
, formal_id
, agg_preserved
);
4194 count
= streamer_read_uhwi (ib
);
4195 vec_alloc (jump_func
->agg
.items
, count
);
4198 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4199 jump_func
->agg
.by_ref
= bp_unpack_value (&bp
, 1);
4201 for (i
= 0; i
< count
; i
++)
4203 struct ipa_agg_jf_item item
;
4204 item
.offset
= streamer_read_uhwi (ib
);
4205 item
.value
= stream_read_tree (ib
, data_in
);
4206 jump_func
->agg
.items
->quick_push (item
);
4209 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4210 bool bits_known
= bp_unpack_value (&bp
, 1);
4213 widest_int value
= streamer_read_widest_int (ib
);
4214 widest_int mask
= streamer_read_widest_int (ib
);
4215 ipa_set_jfunc_bits (jump_func
, value
, mask
);
4218 jump_func
->bits
= NULL
;
4220 struct bitpack_d vr_bp
= streamer_read_bitpack (ib
);
4221 bool vr_known
= bp_unpack_value (&vr_bp
, 1);
4224 enum value_range_type type
= streamer_read_enum (ib
, value_range_type
,
4226 tree min
= stream_read_tree (ib
, data_in
);
4227 tree max
= stream_read_tree (ib
, data_in
);
4228 ipa_set_jfunc_vr (jump_func
, type
, min
, max
);
4231 jump_func
->m_vr
= NULL
;
4234 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4235 relevant to indirect inlining to OB. */
4238 ipa_write_indirect_edge_info (struct output_block
*ob
,
4239 struct cgraph_edge
*cs
)
4241 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4242 struct bitpack_d bp
;
4244 streamer_write_hwi (ob
, ii
->param_index
);
4245 bp
= bitpack_create (ob
->main_stream
);
4246 bp_pack_value (&bp
, ii
->polymorphic
, 1);
4247 bp_pack_value (&bp
, ii
->agg_contents
, 1);
4248 bp_pack_value (&bp
, ii
->member_ptr
, 1);
4249 bp_pack_value (&bp
, ii
->by_ref
, 1);
4250 bp_pack_value (&bp
, ii
->guaranteed_unmodified
, 1);
4251 bp_pack_value (&bp
, ii
->vptr_changed
, 1);
4252 streamer_write_bitpack (&bp
);
4253 if (ii
->agg_contents
|| ii
->polymorphic
)
4254 streamer_write_hwi (ob
, ii
->offset
);
4256 gcc_assert (ii
->offset
== 0);
4258 if (ii
->polymorphic
)
4260 streamer_write_hwi (ob
, ii
->otr_token
);
4261 stream_write_tree (ob
, ii
->otr_type
, true);
4262 ii
->context
.stream_out (ob
);
4266 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4267 relevant to indirect inlining from IB. */
4270 ipa_read_indirect_edge_info (struct lto_input_block
*ib
,
4271 struct data_in
*data_in
,
4272 struct cgraph_edge
*cs
)
4274 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4275 struct bitpack_d bp
;
4277 ii
->param_index
= (int) streamer_read_hwi (ib
);
4278 bp
= streamer_read_bitpack (ib
);
4279 ii
->polymorphic
= bp_unpack_value (&bp
, 1);
4280 ii
->agg_contents
= bp_unpack_value (&bp
, 1);
4281 ii
->member_ptr
= bp_unpack_value (&bp
, 1);
4282 ii
->by_ref
= bp_unpack_value (&bp
, 1);
4283 ii
->guaranteed_unmodified
= bp_unpack_value (&bp
, 1);
4284 ii
->vptr_changed
= bp_unpack_value (&bp
, 1);
4285 if (ii
->agg_contents
|| ii
->polymorphic
)
4286 ii
->offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4289 if (ii
->polymorphic
)
4291 ii
->otr_token
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4292 ii
->otr_type
= stream_read_tree (ib
, data_in
);
4293 ii
->context
.stream_in (ib
, data_in
);
4297 /* Stream out NODE info to OB. */
4300 ipa_write_node_info (struct output_block
*ob
, struct cgraph_node
*node
)
4303 lto_symtab_encoder_t encoder
;
4304 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
4306 struct cgraph_edge
*e
;
4307 struct bitpack_d bp
;
4309 encoder
= ob
->decl_state
->symtab_node_encoder
;
4310 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4311 streamer_write_uhwi (ob
, node_ref
);
4313 streamer_write_uhwi (ob
, ipa_get_param_count (info
));
4314 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4315 streamer_write_uhwi (ob
, ipa_get_param_move_cost (info
, j
));
4316 bp
= bitpack_create (ob
->main_stream
);
4317 gcc_assert (info
->analysis_done
4318 || ipa_get_param_count (info
) == 0);
4319 gcc_assert (!info
->node_enqueued
);
4320 gcc_assert (!info
->ipcp_orig_node
);
4321 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4322 bp_pack_value (&bp
, ipa_is_param_used (info
, j
), 1);
4323 streamer_write_bitpack (&bp
);
4324 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4326 streamer_write_hwi (ob
, ipa_get_controlled_uses (info
, j
));
4327 stream_write_tree (ob
, ipa_get_type (info
, j
), true);
4329 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4331 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4333 streamer_write_uhwi (ob
,
4334 ipa_get_cs_argument_count (args
) * 2
4335 + (args
->polymorphic_call_contexts
!= NULL
));
4336 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4338 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4339 if (args
->polymorphic_call_contexts
!= NULL
)
4340 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
4343 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4345 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4347 streamer_write_uhwi (ob
,
4348 ipa_get_cs_argument_count (args
) * 2
4349 + (args
->polymorphic_call_contexts
!= NULL
));
4350 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4352 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4353 if (args
->polymorphic_call_contexts
!= NULL
)
4354 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
4356 ipa_write_indirect_edge_info (ob
, e
);
4360 /* Stream in NODE info from IB. */
4363 ipa_read_node_info (struct lto_input_block
*ib
, struct cgraph_node
*node
,
4364 struct data_in
*data_in
)
4366 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
4368 struct cgraph_edge
*e
;
4369 struct bitpack_d bp
;
4371 ipa_alloc_node_params (node
, streamer_read_uhwi (ib
));
4373 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4374 (*info
->descriptors
)[k
].move_cost
= streamer_read_uhwi (ib
);
4376 bp
= streamer_read_bitpack (ib
);
4377 if (ipa_get_param_count (info
) != 0)
4378 info
->analysis_done
= true;
4379 info
->node_enqueued
= false;
4380 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4381 ipa_set_param_used (info
, k
, bp_unpack_value (&bp
, 1));
4382 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4384 ipa_set_controlled_uses (info
, k
, streamer_read_hwi (ib
));
4385 (*info
->descriptors
)[k
].decl_or_type
= stream_read_tree (ib
, data_in
);
4387 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4389 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4390 int count
= streamer_read_uhwi (ib
);
4391 bool contexts_computed
= count
& 1;
4396 vec_safe_grow_cleared (args
->jump_functions
, count
);
4397 if (contexts_computed
)
4398 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
);
4400 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
4402 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4404 if (contexts_computed
)
4405 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in (ib
, data_in
);
4408 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4410 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4411 int count
= streamer_read_uhwi (ib
);
4412 bool contexts_computed
= count
& 1;
4417 vec_safe_grow_cleared (args
->jump_functions
, count
);
4418 if (contexts_computed
)
4419 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
);
4420 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
4422 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4424 if (contexts_computed
)
4425 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in (ib
, data_in
);
4428 ipa_read_indirect_edge_info (ib
, data_in
, e
);
4432 /* Write jump functions for nodes in SET. */
4435 ipa_prop_write_jump_functions (void)
4437 struct cgraph_node
*node
;
4438 struct output_block
*ob
;
4439 unsigned int count
= 0;
4440 lto_symtab_encoder_iterator lsei
;
4441 lto_symtab_encoder_t encoder
;
4443 if (!ipa_node_params_sum
|| !ipa_edge_args_sum
)
4446 ob
= create_output_block (LTO_section_jump_functions
);
4447 encoder
= ob
->decl_state
->symtab_node_encoder
;
4449 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4450 lsei_next_function_in_partition (&lsei
))
4452 node
= lsei_cgraph_node (lsei
);
4453 if (node
->has_gimple_body_p ()
4454 && IPA_NODE_REF (node
) != NULL
)
4458 streamer_write_uhwi (ob
, count
);
4460 /* Process all of the functions. */
4461 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4462 lsei_next_function_in_partition (&lsei
))
4464 node
= lsei_cgraph_node (lsei
);
4465 if (node
->has_gimple_body_p ()
4466 && IPA_NODE_REF (node
) != NULL
)
4467 ipa_write_node_info (ob
, node
);
4469 streamer_write_char_stream (ob
->main_stream
, 0);
4470 produce_asm (ob
, NULL
);
4471 destroy_output_block (ob
);
4474 /* Read section in file FILE_DATA of length LEN with data DATA. */
4477 ipa_prop_read_section (struct lto_file_decl_data
*file_data
, const char *data
,
4480 const struct lto_function_header
*header
=
4481 (const struct lto_function_header
*) data
;
4482 const int cfg_offset
= sizeof (struct lto_function_header
);
4483 const int main_offset
= cfg_offset
+ header
->cfg_size
;
4484 const int string_offset
= main_offset
+ header
->main_size
;
4485 struct data_in
*data_in
;
4489 lto_input_block
ib_main ((const char *) data
+ main_offset
,
4490 header
->main_size
, file_data
->mode_table
);
4493 lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
4494 header
->string_size
, vNULL
);
4495 count
= streamer_read_uhwi (&ib_main
);
4497 for (i
= 0; i
< count
; i
++)
4500 struct cgraph_node
*node
;
4501 lto_symtab_encoder_t encoder
;
4503 index
= streamer_read_uhwi (&ib_main
);
4504 encoder
= file_data
->symtab_node_encoder
;
4505 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
4507 gcc_assert (node
->definition
);
4508 ipa_read_node_info (&ib_main
, node
, data_in
);
4510 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
4512 lto_data_in_delete (data_in
);
4515 /* Read ipcp jump functions. */
4518 ipa_prop_read_jump_functions (void)
4520 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
4521 struct lto_file_decl_data
*file_data
;
4524 ipa_check_create_node_params ();
4525 ipa_check_create_edge_args ();
4526 ipa_register_cgraph_hooks ();
4528 while ((file_data
= file_data_vec
[j
++]))
4531 const char *data
= lto_get_section_data (file_data
, LTO_section_jump_functions
, NULL
, &len
);
4534 ipa_prop_read_section (file_data
, data
, len
);
4539 write_ipcp_transformation_info (output_block
*ob
, cgraph_node
*node
)
4542 unsigned int count
= 0;
4543 lto_symtab_encoder_t encoder
;
4544 struct ipa_agg_replacement_value
*aggvals
, *av
;
4546 aggvals
= ipa_get_agg_replacements_for_node (node
);
4547 encoder
= ob
->decl_state
->symtab_node_encoder
;
4548 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4549 streamer_write_uhwi (ob
, node_ref
);
4551 for (av
= aggvals
; av
; av
= av
->next
)
4553 streamer_write_uhwi (ob
, count
);
4555 for (av
= aggvals
; av
; av
= av
->next
)
4557 struct bitpack_d bp
;
4559 streamer_write_uhwi (ob
, av
->offset
);
4560 streamer_write_uhwi (ob
, av
->index
);
4561 stream_write_tree (ob
, av
->value
, true);
4563 bp
= bitpack_create (ob
->main_stream
);
4564 bp_pack_value (&bp
, av
->by_ref
, 1);
4565 streamer_write_bitpack (&bp
);
4568 ipcp_transformation
*ts
= ipcp_get_transformation_summary (node
);
4569 if (ts
&& vec_safe_length (ts
->m_vr
) > 0)
4571 count
= ts
->m_vr
->length ();
4572 streamer_write_uhwi (ob
, count
);
4573 for (unsigned i
= 0; i
< count
; ++i
)
4575 struct bitpack_d bp
;
4576 ipa_vr
*parm_vr
= &(*ts
->m_vr
)[i
];
4577 bp
= bitpack_create (ob
->main_stream
);
4578 bp_pack_value (&bp
, parm_vr
->known
, 1);
4579 streamer_write_bitpack (&bp
);
4582 streamer_write_enum (ob
->main_stream
, value_rang_type
,
4583 VR_LAST
, parm_vr
->type
);
4584 streamer_write_wide_int (ob
, parm_vr
->min
);
4585 streamer_write_wide_int (ob
, parm_vr
->max
);
4590 streamer_write_uhwi (ob
, 0);
4592 if (ts
&& vec_safe_length (ts
->bits
) > 0)
4594 count
= ts
->bits
->length ();
4595 streamer_write_uhwi (ob
, count
);
4597 for (unsigned i
= 0; i
< count
; ++i
)
4599 const ipa_bits
*bits_jfunc
= (*ts
->bits
)[i
];
4600 struct bitpack_d bp
= bitpack_create (ob
->main_stream
);
4601 bp_pack_value (&bp
, !!bits_jfunc
, 1);
4602 streamer_write_bitpack (&bp
);
4605 streamer_write_widest_int (ob
, bits_jfunc
->value
);
4606 streamer_write_widest_int (ob
, bits_jfunc
->mask
);
4611 streamer_write_uhwi (ob
, 0);
4614 /* Stream in the aggregate value replacement chain for NODE from IB. */
4617 read_ipcp_transformation_info (lto_input_block
*ib
, cgraph_node
*node
,
4620 struct ipa_agg_replacement_value
*aggvals
= NULL
;
4621 unsigned int count
, i
;
4623 count
= streamer_read_uhwi (ib
);
4624 for (i
= 0; i
<count
; i
++)
4626 struct ipa_agg_replacement_value
*av
;
4627 struct bitpack_d bp
;
4629 av
= ggc_alloc
<ipa_agg_replacement_value
> ();
4630 av
->offset
= streamer_read_uhwi (ib
);
4631 av
->index
= streamer_read_uhwi (ib
);
4632 av
->value
= stream_read_tree (ib
, data_in
);
4633 bp
= streamer_read_bitpack (ib
);
4634 av
->by_ref
= bp_unpack_value (&bp
, 1);
4638 ipa_set_node_agg_value_chain (node
, aggvals
);
4640 count
= streamer_read_uhwi (ib
);
4643 ipcp_transformation_initialize ();
4644 ipcp_transformation
*ts
= ipcp_transformation_sum
->get_create (node
);
4645 vec_safe_grow_cleared (ts
->m_vr
, count
);
4646 for (i
= 0; i
< count
; i
++)
4649 parm_vr
= &(*ts
->m_vr
)[i
];
4650 struct bitpack_d bp
;
4651 bp
= streamer_read_bitpack (ib
);
4652 parm_vr
->known
= bp_unpack_value (&bp
, 1);
4655 parm_vr
->type
= streamer_read_enum (ib
, value_range_type
,
4657 parm_vr
->min
= streamer_read_wide_int (ib
);
4658 parm_vr
->max
= streamer_read_wide_int (ib
);
4662 count
= streamer_read_uhwi (ib
);
4665 ipcp_transformation_initialize ();
4666 ipcp_transformation
*ts
= ipcp_transformation_sum
->get_create (node
);
4667 vec_safe_grow_cleared (ts
->bits
, count
);
4669 for (i
= 0; i
< count
; i
++)
4671 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4672 bool known
= bp_unpack_value (&bp
, 1);
4676 = ipa_get_ipa_bits_for_value (streamer_read_widest_int (ib
),
4677 streamer_read_widest_int (ib
));
4678 (*ts
->bits
)[i
] = bits
;
4684 /* Write all aggregate replacement for nodes in set. */
4687 ipcp_write_transformation_summaries (void)
4689 struct cgraph_node
*node
;
4690 struct output_block
*ob
;
4691 unsigned int count
= 0;
4692 lto_symtab_encoder_iterator lsei
;
4693 lto_symtab_encoder_t encoder
;
4695 ob
= create_output_block (LTO_section_ipcp_transform
);
4696 encoder
= ob
->decl_state
->symtab_node_encoder
;
4698 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4699 lsei_next_function_in_partition (&lsei
))
4701 node
= lsei_cgraph_node (lsei
);
4702 if (node
->has_gimple_body_p ())
4706 streamer_write_uhwi (ob
, count
);
4708 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4709 lsei_next_function_in_partition (&lsei
))
4711 node
= lsei_cgraph_node (lsei
);
4712 if (node
->has_gimple_body_p ())
4713 write_ipcp_transformation_info (ob
, node
);
4715 streamer_write_char_stream (ob
->main_stream
, 0);
4716 produce_asm (ob
, NULL
);
4717 destroy_output_block (ob
);
4720 /* Read replacements section in file FILE_DATA of length LEN with data
4724 read_replacements_section (struct lto_file_decl_data
*file_data
,
4728 const struct lto_function_header
*header
=
4729 (const struct lto_function_header
*) data
;
4730 const int cfg_offset
= sizeof (struct lto_function_header
);
4731 const int main_offset
= cfg_offset
+ header
->cfg_size
;
4732 const int string_offset
= main_offset
+ header
->main_size
;
4733 struct data_in
*data_in
;
4737 lto_input_block
ib_main ((const char *) data
+ main_offset
,
4738 header
->main_size
, file_data
->mode_table
);
4740 data_in
= lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
4741 header
->string_size
, vNULL
);
4742 count
= streamer_read_uhwi (&ib_main
);
4744 for (i
= 0; i
< count
; i
++)
4747 struct cgraph_node
*node
;
4748 lto_symtab_encoder_t encoder
;
4750 index
= streamer_read_uhwi (&ib_main
);
4751 encoder
= file_data
->symtab_node_encoder
;
4752 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
4754 gcc_assert (node
->definition
);
4755 read_ipcp_transformation_info (&ib_main
, node
, data_in
);
4757 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
4759 lto_data_in_delete (data_in
);
4762 /* Read IPA-CP aggregate replacements. */
4765 ipcp_read_transformation_summaries (void)
4767 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
4768 struct lto_file_decl_data
*file_data
;
4771 while ((file_data
= file_data_vec
[j
++]))
4774 const char *data
= lto_get_section_data (file_data
,
4775 LTO_section_ipcp_transform
,
4778 read_replacements_section (file_data
, data
, len
);
4782 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4786 adjust_agg_replacement_values (struct cgraph_node
*node
,
4787 struct ipa_agg_replacement_value
*aggval
)
4789 struct ipa_agg_replacement_value
*v
;
4790 int i
, c
= 0, d
= 0, *adj
;
4792 if (!node
->clone
.combined_args_to_skip
)
4795 for (v
= aggval
; v
; v
= v
->next
)
4797 gcc_assert (v
->index
>= 0);
4803 adj
= XALLOCAVEC (int, c
);
4804 for (i
= 0; i
< c
; i
++)
4805 if (bitmap_bit_p (node
->clone
.combined_args_to_skip
, i
))
4813 for (v
= aggval
; v
; v
= v
->next
)
4814 v
->index
= adj
[v
->index
];
4817 /* Dominator walker driving the ipcp modification phase. */
4819 class ipcp_modif_dom_walker
: public dom_walker
4822 ipcp_modif_dom_walker (struct ipa_func_body_info
*fbi
,
4823 vec
<ipa_param_descriptor
, va_gc
> *descs
,
4824 struct ipa_agg_replacement_value
*av
,
4826 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
), m_descriptors (descs
),
4827 m_aggval (av
), m_something_changed (sc
), m_cfg_changed (cc
) {}
4829 virtual edge
before_dom_children (basic_block
);
4832 struct ipa_func_body_info
*m_fbi
;
4833 vec
<ipa_param_descriptor
, va_gc
> *m_descriptors
;
4834 struct ipa_agg_replacement_value
*m_aggval
;
4835 bool *m_something_changed
, *m_cfg_changed
;
4839 ipcp_modif_dom_walker::before_dom_children (basic_block bb
)
4841 gimple_stmt_iterator gsi
;
4842 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4844 struct ipa_agg_replacement_value
*v
;
4845 gimple
*stmt
= gsi_stmt (gsi
);
4847 HOST_WIDE_INT offset
, size
;
4851 if (!gimple_assign_load_p (stmt
))
4853 rhs
= gimple_assign_rhs1 (stmt
);
4854 if (!is_gimple_reg_type (TREE_TYPE (rhs
)))
4859 while (handled_component_p (t
))
4861 /* V_C_E can do things like convert an array of integers to one
4862 bigger integer and similar things we do not handle below. */
4863 if (TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
)
4868 t
= TREE_OPERAND (t
, 0);
4873 if (!ipa_load_from_parm_agg (m_fbi
, m_descriptors
, stmt
, rhs
, &index
,
4874 &offset
, &size
, &by_ref
))
4876 for (v
= m_aggval
; v
; v
= v
->next
)
4877 if (v
->index
== index
4878 && v
->offset
== offset
)
4881 || v
->by_ref
!= by_ref
4882 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v
->value
))) != size
)
4885 gcc_checking_assert (is_gimple_ip_invariant (v
->value
));
4886 if (!useless_type_conversion_p (TREE_TYPE (rhs
), TREE_TYPE (v
->value
)))
4888 if (fold_convertible_p (TREE_TYPE (rhs
), v
->value
))
4889 val
= fold_build1 (NOP_EXPR
, TREE_TYPE (rhs
), v
->value
);
4890 else if (TYPE_SIZE (TREE_TYPE (rhs
))
4891 == TYPE_SIZE (TREE_TYPE (v
->value
)))
4892 val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (rhs
), v
->value
);
4897 fprintf (dump_file
, " const ");
4898 print_generic_expr (dump_file
, v
->value
);
4899 fprintf (dump_file
, " can't be converted to type of ");
4900 print_generic_expr (dump_file
, rhs
);
4901 fprintf (dump_file
, "\n");
4909 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4911 fprintf (dump_file
, "Modifying stmt:\n ");
4912 print_gimple_stmt (dump_file
, stmt
, 0);
4914 gimple_assign_set_rhs_from_tree (&gsi
, val
);
4917 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4919 fprintf (dump_file
, "into:\n ");
4920 print_gimple_stmt (dump_file
, stmt
, 0);
4921 fprintf (dump_file
, "\n");
4924 *m_something_changed
= true;
4925 if (maybe_clean_eh_stmt (stmt
)
4926 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
4927 *m_cfg_changed
= true;
4932 /* Update bits info of formal parameters as described in
4933 ipcp_transformation. */
4936 ipcp_update_bits (struct cgraph_node
*node
)
4938 tree parm
= DECL_ARGUMENTS (node
->decl
);
4939 tree next_parm
= parm
;
4940 ipcp_transformation
*ts
= ipcp_get_transformation_summary (node
);
4942 if (!ts
|| vec_safe_length (ts
->bits
) == 0)
4945 vec
<ipa_bits
*, va_gc
> &bits
= *ts
->bits
;
4946 unsigned count
= bits
.length ();
4948 for (unsigned i
= 0; i
< count
; ++i
, parm
= next_parm
)
4950 if (node
->clone
.combined_args_to_skip
4951 && bitmap_bit_p (node
->clone
.combined_args_to_skip
, i
))
4954 gcc_checking_assert (parm
);
4955 next_parm
= DECL_CHAIN (parm
);
4958 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm
))
4959 || POINTER_TYPE_P (TREE_TYPE (parm
)))
4960 || !is_gimple_reg (parm
))
4963 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
), parm
);
4969 fprintf (dump_file
, "Adjusting mask for param %u to ", i
);
4970 print_hex (bits
[i
]->mask
, dump_file
);
4971 fprintf (dump_file
, "\n");
4974 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef
)))
4976 unsigned prec
= TYPE_PRECISION (TREE_TYPE (ddef
));
4977 signop sgn
= TYPE_SIGN (TREE_TYPE (ddef
));
4979 wide_int nonzero_bits
= wide_int::from (bits
[i
]->mask
, prec
, UNSIGNED
)
4980 | wide_int::from (bits
[i
]->value
, prec
, sgn
);
4981 set_nonzero_bits (ddef
, nonzero_bits
);
4985 unsigned tem
= bits
[i
]->mask
.to_uhwi ();
4986 unsigned HOST_WIDE_INT bitpos
= bits
[i
]->value
.to_uhwi ();
4987 unsigned align
= tem
& -tem
;
4988 unsigned misalign
= bitpos
& (align
- 1);
4993 fprintf (dump_file
, "Adjusting align: %u, misalign: %u\n", align
, misalign
);
4995 unsigned old_align
, old_misalign
;
4996 struct ptr_info_def
*pi
= get_ptr_info (ddef
);
4997 bool old_known
= get_ptr_info_alignment (pi
, &old_align
, &old_misalign
);
5000 && old_align
> align
)
5004 fprintf (dump_file
, "But alignment was already %u.\n", old_align
);
5005 if ((old_misalign
& (align
- 1)) != misalign
)
5006 fprintf (dump_file
, "old_misalign (%u) and misalign (%u) mismatch\n",
5007 old_misalign
, misalign
);
5013 && ((misalign
& (old_align
- 1)) != old_misalign
)
5015 fprintf (dump_file
, "old_misalign (%u) and misalign (%u) mismatch\n",
5016 old_misalign
, misalign
);
5018 set_ptr_info_alignment (pi
, align
, misalign
);
5024 /* Update value range of formal parameters as described in
5025 ipcp_transformation. */
5028 ipcp_update_vr (struct cgraph_node
*node
)
5030 tree fndecl
= node
->decl
;
5031 tree parm
= DECL_ARGUMENTS (fndecl
);
5032 tree next_parm
= parm
;
5033 ipcp_transformation
*ts
= ipcp_get_transformation_summary (node
);
5034 if (!ts
|| vec_safe_length (ts
->m_vr
) == 0)
5036 const vec
<ipa_vr
, va_gc
> &vr
= *ts
->m_vr
;
5037 unsigned count
= vr
.length ();
5039 for (unsigned i
= 0; i
< count
; ++i
, parm
= next_parm
)
5041 if (node
->clone
.combined_args_to_skip
5042 && bitmap_bit_p (node
->clone
.combined_args_to_skip
, i
))
5044 gcc_checking_assert (parm
);
5045 next_parm
= DECL_CHAIN (parm
);
5046 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
), parm
);
5048 if (!ddef
|| !is_gimple_reg (parm
))
5052 && (vr
[i
].type
== VR_RANGE
|| vr
[i
].type
== VR_ANTI_RANGE
))
5054 tree type
= TREE_TYPE (ddef
);
5055 unsigned prec
= TYPE_PRECISION (type
);
5056 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef
)))
5060 fprintf (dump_file
, "Setting value range of param %u ", i
);
5061 fprintf (dump_file
, "%s[",
5062 (vr
[i
].type
== VR_ANTI_RANGE
) ? "~" : "");
5063 print_decs (vr
[i
].min
, dump_file
);
5064 fprintf (dump_file
, ", ");
5065 print_decs (vr
[i
].max
, dump_file
);
5066 fprintf (dump_file
, "]\n");
5068 set_range_info (ddef
, vr
[i
].type
,
5069 wide_int_storage::from (vr
[i
].min
, prec
,
5071 wide_int_storage::from (vr
[i
].max
, prec
,
5074 else if (POINTER_TYPE_P (TREE_TYPE (ddef
))
5075 && vr
[i
].type
== VR_ANTI_RANGE
5076 && wi::eq_p (vr
[i
].min
, 0)
5077 && wi::eq_p (vr
[i
].max
, 0))
5080 fprintf (dump_file
, "Setting nonnull for %u\n", i
);
5081 set_ptr_nonnull (ddef
);
5087 /* IPCP transformation phase doing propagation of aggregate values. */
5090 ipcp_transform_function (struct cgraph_node
*node
)
5092 vec
<ipa_param_descriptor
, va_gc
> *descriptors
= NULL
;
5093 struct ipa_func_body_info fbi
;
5094 struct ipa_agg_replacement_value
*aggval
;
5096 bool cfg_changed
= false, something_changed
= false;
5098 gcc_checking_assert (cfun
);
5099 gcc_checking_assert (current_function_decl
);
5102 fprintf (dump_file
, "Modification phase of node %s\n",
5103 node
->dump_name ());
5105 ipcp_update_bits (node
);
5106 ipcp_update_vr (node
);
5107 aggval
= ipa_get_agg_replacements_for_node (node
);
5110 param_count
= count_formal_params (node
->decl
);
5111 if (param_count
== 0)
5113 adjust_agg_replacement_values (node
, aggval
);
5115 ipa_dump_agg_replacement_values (dump_file
, aggval
);
5119 fbi
.bb_infos
= vNULL
;
5120 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
));
5121 fbi
.param_count
= param_count
;
5124 vec_safe_grow_cleared (descriptors
, param_count
);
5125 ipa_populate_param_decls (node
, *descriptors
);
5126 calculate_dominance_info (CDI_DOMINATORS
);
5127 ipcp_modif_dom_walker (&fbi
, descriptors
, aggval
, &something_changed
,
5128 &cfg_changed
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5131 struct ipa_bb_info
*bi
;
5132 FOR_EACH_VEC_ELT (fbi
.bb_infos
, i
, bi
)
5133 free_ipa_bb_info (bi
);
5134 fbi
.bb_infos
.release ();
5135 free_dominance_info (CDI_DOMINATORS
);
5137 ipcp_transformation
*s
= ipcp_transformation_sum
->get (node
);
5138 s
->agg_values
= NULL
;
5142 vec_free (descriptors
);
5144 if (!something_changed
)
5146 else if (cfg_changed
)
5147 return TODO_update_ssa_only_virtuals
| TODO_cleanup_cfg
;
5149 return TODO_update_ssa_only_virtuals
;
5152 #include "gt-ipa-prop.h"