1 /* Interprocedural analyses.
2 Copyright (C) 2005-2020 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
30 #include "tree-streamer.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
50 #include "ipa-utils.h"
54 #include "tree-cfgcleanup.h"
56 /* Function summary where the parameter infos are actually stored. */
57 ipa_node_params_t
*ipa_node_params_sum
= NULL
;
59 function_summary
<ipcp_transformation
*> *ipcp_transformation_sum
= NULL
;
61 /* Edge summary for IPA-CP edge information. */
62 ipa_edge_args_sum_t
*ipa_edge_args_sum
;
64 /* Traits for a hash table for reusing already existing ipa_bits. */
66 struct ipa_bit_ggc_hash_traits
: public ggc_cache_remove
<ipa_bits
*>
68 typedef ipa_bits
*value_type
;
69 typedef ipa_bits
*compare_type
;
71 hash (const ipa_bits
*p
)
73 hashval_t t
= (hashval_t
) p
->value
.to_shwi ();
74 return iterative_hash_host_wide_int (p
->mask
.to_shwi (), t
);
77 equal (const ipa_bits
*a
, const ipa_bits
*b
)
79 return a
->value
== b
->value
&& a
->mask
== b
->mask
;
81 static const bool empty_zero_p
= true;
83 mark_empty (ipa_bits
*&p
)
88 is_empty (const ipa_bits
*p
)
93 is_deleted (const ipa_bits
*p
)
95 return p
== reinterpret_cast<const ipa_bits
*> (1);
98 mark_deleted (ipa_bits
*&p
)
100 p
= reinterpret_cast<ipa_bits
*> (1);
104 /* Hash table for avoid repeated allocations of equal ipa_bits. */
105 static GTY ((cache
)) hash_table
<ipa_bit_ggc_hash_traits
> *ipa_bits_hash_table
;
107 /* Traits for a hash table for reusing value_ranges used for IPA. Note that
108 the equiv bitmap is not hashed and is expected to be NULL. */
110 struct ipa_vr_ggc_hash_traits
: public ggc_cache_remove
<value_range
*>
112 typedef value_range
*value_type
;
113 typedef value_range
*compare_type
;
115 hash (const value_range
*p
)
117 inchash::hash
hstate (p
->kind ());
118 inchash::add_expr (p
->min (), hstate
);
119 inchash::add_expr (p
->max (), hstate
);
120 return hstate
.end ();
123 equal (const value_range
*a
, const value_range
*b
)
125 return a
->equal_p (*b
);
127 static const bool empty_zero_p
= true;
129 mark_empty (value_range
*&p
)
134 is_empty (const value_range
*p
)
139 is_deleted (const value_range
*p
)
141 return p
== reinterpret_cast<const value_range
*> (1);
144 mark_deleted (value_range
*&p
)
146 p
= reinterpret_cast<value_range
*> (1);
150 /* Hash table for avoid repeated allocations of equal value_ranges. */
151 static GTY ((cache
)) hash_table
<ipa_vr_ggc_hash_traits
> *ipa_vr_hash_table
;
153 /* Holders of ipa cgraph hooks: */
154 static struct cgraph_node_hook_list
*function_insertion_hook_holder
;
156 /* Description of a reference to an IPA constant. */
157 struct ipa_cst_ref_desc
159 /* Edge that corresponds to the statement which took the reference. */
160 struct cgraph_edge
*cs
;
161 /* Linked list of duplicates created when call graph edges are cloned. */
162 struct ipa_cst_ref_desc
*next_duplicate
;
163 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
164 if out of control. */
168 /* Allocation pool for reference descriptions. */
170 static object_allocator
<ipa_cst_ref_desc
> ipa_refdesc_pool
171 ("IPA-PROP ref descriptions");
173 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
174 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
177 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node
*node
)
179 tree fs_opts
= DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node
->decl
);
183 return !opt_for_fn (node
->decl
, optimize
) || !opt_for_fn (node
->decl
, flag_ipa_cp
);
186 /* Return index of the formal whose tree is PTREE in function which corresponds
190 ipa_get_param_decl_index_1 (vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
195 count
= vec_safe_length (descriptors
);
196 for (i
= 0; i
< count
; i
++)
197 if ((*descriptors
)[i
].decl_or_type
== ptree
)
203 /* Return index of the formal whose tree is PTREE in function which corresponds
207 ipa_get_param_decl_index (class ipa_node_params
*info
, tree ptree
)
209 return ipa_get_param_decl_index_1 (info
->descriptors
, ptree
);
212 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
216 ipa_populate_param_decls (struct cgraph_node
*node
,
217 vec
<ipa_param_descriptor
, va_gc
> &descriptors
)
225 gcc_assert (gimple_has_body_p (fndecl
));
226 fnargs
= DECL_ARGUMENTS (fndecl
);
228 for (parm
= fnargs
; parm
; parm
= DECL_CHAIN (parm
))
230 descriptors
[param_num
].decl_or_type
= parm
;
231 unsigned int cost
= estimate_move_cost (TREE_TYPE (parm
), true);
232 descriptors
[param_num
].move_cost
= cost
;
233 /* Watch overflow, move_cost is a bitfield. */
234 gcc_checking_assert (cost
== descriptors
[param_num
].move_cost
);
239 /* Return how many formal parameters FNDECL has. */
242 count_formal_params (tree fndecl
)
246 gcc_assert (gimple_has_body_p (fndecl
));
248 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
254 /* Return the declaration of Ith formal parameter of the function corresponding
255 to INFO. Note there is no setter function as this array is built just once
256 using ipa_initialize_node_params. */
259 ipa_dump_param (FILE *file
, class ipa_node_params
*info
, int i
)
261 fprintf (file
, "param #%i", i
);
262 if ((*info
->descriptors
)[i
].decl_or_type
)
265 print_generic_expr (file
, (*info
->descriptors
)[i
].decl_or_type
);
269 /* If necessary, allocate vector of parameter descriptors in info of NODE.
270 Return true if they were allocated, false if not. */
273 ipa_alloc_node_params (struct cgraph_node
*node
, int param_count
)
275 class ipa_node_params
*info
= IPA_NODE_REF_GET_CREATE (node
);
277 if (!info
->descriptors
&& param_count
)
279 vec_safe_grow_cleared (info
->descriptors
, param_count
, true);
286 /* Initialize the ipa_node_params structure associated with NODE by counting
287 the function parameters, creating the descriptors and populating their
291 ipa_initialize_node_params (struct cgraph_node
*node
)
293 class ipa_node_params
*info
= IPA_NODE_REF_GET_CREATE (node
);
295 if (!info
->descriptors
296 && ipa_alloc_node_params (node
, count_formal_params (node
->decl
)))
297 ipa_populate_param_decls (node
, *info
->descriptors
);
300 /* Print the jump functions associated with call graph edge CS to file F. */
303 ipa_print_node_jump_functions_for_edge (FILE *f
, struct cgraph_edge
*cs
)
307 count
= ipa_get_cs_argument_count (IPA_EDGE_REF (cs
));
308 for (i
= 0; i
< count
; i
++)
310 struct ipa_jump_func
*jump_func
;
311 enum jump_func_type type
;
313 jump_func
= ipa_get_ith_jump_func (IPA_EDGE_REF (cs
), i
);
314 type
= jump_func
->type
;
316 fprintf (f
, " param %d: ", i
);
317 if (type
== IPA_JF_UNKNOWN
)
318 fprintf (f
, "UNKNOWN\n");
319 else if (type
== IPA_JF_CONST
)
321 tree val
= jump_func
->value
.constant
.value
;
322 fprintf (f
, "CONST: ");
323 print_generic_expr (f
, val
);
324 if (TREE_CODE (val
) == ADDR_EXPR
325 && TREE_CODE (TREE_OPERAND (val
, 0)) == CONST_DECL
)
328 print_generic_expr (f
, DECL_INITIAL (TREE_OPERAND (val
, 0)));
332 else if (type
== IPA_JF_PASS_THROUGH
)
334 fprintf (f
, "PASS THROUGH: ");
335 fprintf (f
, "%d, op %s",
336 jump_func
->value
.pass_through
.formal_id
,
337 get_tree_code_name(jump_func
->value
.pass_through
.operation
));
338 if (jump_func
->value
.pass_through
.operation
!= NOP_EXPR
)
341 print_generic_expr (f
, jump_func
->value
.pass_through
.operand
);
343 if (jump_func
->value
.pass_through
.agg_preserved
)
344 fprintf (f
, ", agg_preserved");
347 else if (type
== IPA_JF_ANCESTOR
)
349 fprintf (f
, "ANCESTOR: ");
350 fprintf (f
, "%d, offset " HOST_WIDE_INT_PRINT_DEC
,
351 jump_func
->value
.ancestor
.formal_id
,
352 jump_func
->value
.ancestor
.offset
);
353 if (jump_func
->value
.ancestor
.agg_preserved
)
354 fprintf (f
, ", agg_preserved");
358 if (jump_func
->agg
.items
)
360 struct ipa_agg_jf_item
*item
;
363 fprintf (f
, " Aggregate passed by %s:\n",
364 jump_func
->agg
.by_ref
? "reference" : "value");
365 FOR_EACH_VEC_ELT (*jump_func
->agg
.items
, j
, item
)
367 fprintf (f
, " offset: " HOST_WIDE_INT_PRINT_DEC
", ",
369 fprintf (f
, "type: ");
370 print_generic_expr (f
, item
->type
);
372 if (item
->jftype
== IPA_JF_PASS_THROUGH
)
373 fprintf (f
, "PASS THROUGH: %d,",
374 item
->value
.pass_through
.formal_id
);
375 else if (item
->jftype
== IPA_JF_LOAD_AGG
)
377 fprintf (f
, "LOAD AGG: %d",
378 item
->value
.pass_through
.formal_id
);
379 fprintf (f
, " [offset: " HOST_WIDE_INT_PRINT_DEC
", by %s],",
380 item
->value
.load_agg
.offset
,
381 item
->value
.load_agg
.by_ref
? "reference"
385 if (item
->jftype
== IPA_JF_PASS_THROUGH
386 || item
->jftype
== IPA_JF_LOAD_AGG
)
388 fprintf (f
, " op %s",
389 get_tree_code_name (item
->value
.pass_through
.operation
));
390 if (item
->value
.pass_through
.operation
!= NOP_EXPR
)
393 print_generic_expr (f
, item
->value
.pass_through
.operand
);
396 else if (item
->jftype
== IPA_JF_CONST
)
398 fprintf (f
, "CONST: ");
399 print_generic_expr (f
, item
->value
.constant
);
401 else if (item
->jftype
== IPA_JF_UNKNOWN
)
402 fprintf (f
, "UNKNOWN: " HOST_WIDE_INT_PRINT_DEC
" bits",
403 tree_to_uhwi (TYPE_SIZE (item
->type
)));
408 class ipa_polymorphic_call_context
*ctx
409 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs
), i
);
410 if (ctx
&& !ctx
->useless_p ())
412 fprintf (f
, " Context: ");
413 ctx
->dump (dump_file
);
418 fprintf (f
, " value: ");
419 print_hex (jump_func
->bits
->value
, f
);
420 fprintf (f
, ", mask: ");
421 print_hex (jump_func
->bits
->mask
, f
);
425 fprintf (f
, " Unknown bits\n");
431 (jump_func
->m_vr
->kind () == VR_ANTI_RANGE
) ? "~" : "");
432 print_decs (wi::to_wide (jump_func
->m_vr
->min ()), f
);
434 print_decs (wi::to_wide (jump_func
->m_vr
->max ()), f
);
438 fprintf (f
, " Unknown VR\n");
443 /* Print the jump functions of all arguments on all call graph edges going from
447 ipa_print_node_jump_functions (FILE *f
, struct cgraph_node
*node
)
449 struct cgraph_edge
*cs
;
451 fprintf (f
, " Jump functions of caller %s:\n", node
->dump_name ());
452 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
455 fprintf (f
, " callsite %s -> %s : \n",
457 cs
->callee
->dump_name ());
458 if (!ipa_edge_args_info_available_for_edge_p (cs
))
459 fprintf (f
, " no arg info\n");
461 ipa_print_node_jump_functions_for_edge (f
, cs
);
464 for (cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
466 class cgraph_indirect_call_info
*ii
;
468 ii
= cs
->indirect_info
;
469 if (ii
->agg_contents
)
470 fprintf (f
, " indirect %s callsite, calling param %i, "
471 "offset " HOST_WIDE_INT_PRINT_DEC
", %s",
472 ii
->member_ptr
? "member ptr" : "aggregate",
473 ii
->param_index
, ii
->offset
,
474 ii
->by_ref
? "by reference" : "by_value");
476 fprintf (f
, " indirect %s callsite, calling param %i, "
477 "offset " HOST_WIDE_INT_PRINT_DEC
,
478 ii
->polymorphic
? "polymorphic" : "simple", ii
->param_index
,
483 fprintf (f
, ", for stmt ");
484 print_gimple_stmt (f
, cs
->call_stmt
, 0, TDF_SLIM
);
489 ii
->context
.dump (f
);
490 if (!ipa_edge_args_info_available_for_edge_p (cs
))
491 fprintf (f
, " no arg info\n");
493 ipa_print_node_jump_functions_for_edge (f
, cs
);
497 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
500 ipa_print_all_jump_functions (FILE *f
)
502 struct cgraph_node
*node
;
504 fprintf (f
, "\nJump functions:\n");
505 FOR_EACH_FUNCTION (node
)
507 ipa_print_node_jump_functions (f
, node
);
511 /* Set jfunc to be a know-really nothing jump function. */
514 ipa_set_jf_unknown (struct ipa_jump_func
*jfunc
)
516 jfunc
->type
= IPA_JF_UNKNOWN
;
519 /* Set JFUNC to be a copy of another jmp (to be used by jump function
520 combination code). The two functions will share their rdesc. */
523 ipa_set_jf_cst_copy (struct ipa_jump_func
*dst
,
524 struct ipa_jump_func
*src
)
527 gcc_checking_assert (src
->type
== IPA_JF_CONST
);
528 dst
->type
= IPA_JF_CONST
;
529 dst
->value
.constant
= src
->value
.constant
;
532 /* Set JFUNC to be a constant jmp function. */
535 ipa_set_jf_constant (struct ipa_jump_func
*jfunc
, tree constant
,
536 struct cgraph_edge
*cs
)
538 jfunc
->type
= IPA_JF_CONST
;
539 jfunc
->value
.constant
.value
= unshare_expr_without_location (constant
);
541 if (TREE_CODE (constant
) == ADDR_EXPR
542 && TREE_CODE (TREE_OPERAND (constant
, 0)) == FUNCTION_DECL
)
544 struct ipa_cst_ref_desc
*rdesc
;
546 rdesc
= ipa_refdesc_pool
.allocate ();
548 rdesc
->next_duplicate
= NULL
;
550 jfunc
->value
.constant
.rdesc
= rdesc
;
553 jfunc
->value
.constant
.rdesc
= NULL
;
556 /* Set JFUNC to be a simple pass-through jump function. */
558 ipa_set_jf_simple_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
561 jfunc
->type
= IPA_JF_PASS_THROUGH
;
562 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
563 jfunc
->value
.pass_through
.formal_id
= formal_id
;
564 jfunc
->value
.pass_through
.operation
= NOP_EXPR
;
565 jfunc
->value
.pass_through
.agg_preserved
= agg_preserved
;
568 /* Set JFUNC to be an unary pass through jump function. */
571 ipa_set_jf_unary_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
572 enum tree_code operation
)
574 jfunc
->type
= IPA_JF_PASS_THROUGH
;
575 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
576 jfunc
->value
.pass_through
.formal_id
= formal_id
;
577 jfunc
->value
.pass_through
.operation
= operation
;
578 jfunc
->value
.pass_through
.agg_preserved
= false;
580 /* Set JFUNC to be an arithmetic pass through jump function. */
583 ipa_set_jf_arith_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
584 tree operand
, enum tree_code operation
)
586 jfunc
->type
= IPA_JF_PASS_THROUGH
;
587 jfunc
->value
.pass_through
.operand
= unshare_expr_without_location (operand
);
588 jfunc
->value
.pass_through
.formal_id
= formal_id
;
589 jfunc
->value
.pass_through
.operation
= operation
;
590 jfunc
->value
.pass_through
.agg_preserved
= false;
593 /* Set JFUNC to be an ancestor jump function. */
596 ipa_set_ancestor_jf (struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
,
597 int formal_id
, bool agg_preserved
)
599 jfunc
->type
= IPA_JF_ANCESTOR
;
600 jfunc
->value
.ancestor
.formal_id
= formal_id
;
601 jfunc
->value
.ancestor
.offset
= offset
;
602 jfunc
->value
.ancestor
.agg_preserved
= agg_preserved
;
605 /* Get IPA BB information about the given BB. FBI is the context of analyzis
606 of this function body. */
608 static struct ipa_bb_info
*
609 ipa_get_bb_info (struct ipa_func_body_info
*fbi
, basic_block bb
)
611 gcc_checking_assert (fbi
);
612 return &fbi
->bb_infos
[bb
->index
];
615 /* Structure to be passed in between detect_type_change and
616 check_stmt_for_type_change. */
618 struct prop_type_change_info
620 /* Offset into the object where there is the virtual method pointer we are
622 HOST_WIDE_INT offset
;
623 /* The declaration or SSA_NAME pointer of the base that we are checking for
626 /* Set to true if dynamic type change has been detected. */
627 bool type_maybe_changed
;
630 /* Return true if STMT can modify a virtual method table pointer.
632 This function makes special assumptions about both constructors and
633 destructors which are all the functions that are allowed to alter the VMT
634 pointers. It assumes that destructors begin with assignment into all VMT
635 pointers and that constructors essentially look in the following way:
637 1) The very first thing they do is that they call constructors of ancestor
638 sub-objects that have them.
640 2) Then VMT pointers of this and all its ancestors is set to new values
641 corresponding to the type corresponding to the constructor.
643 3) Only afterwards, other stuff such as constructor of member sub-objects
644 and the code written by the user is run. Only this may include calling
645 virtual functions, directly or indirectly.
647 There is no way to call a constructor of an ancestor sub-object in any
650 This means that we do not have to care whether constructors get the correct
651 type information because they will always change it (in fact, if we define
652 the type to be given by the VMT pointer, it is undefined).
654 The most important fact to derive from the above is that if, for some
655 statement in the section 3, we try to detect whether the dynamic type has
656 changed, we can safely ignore all calls as we examine the function body
657 backwards until we reach statements in section 2 because these calls cannot
658 be ancestor constructors or destructors (if the input is not bogus) and so
659 do not change the dynamic type (this holds true only for automatically
660 allocated objects but at the moment we devirtualize only these). We then
661 must detect that statements in section 2 change the dynamic type and can try
662 to derive the new type. That is enough and we can stop, we will never see
663 the calls into constructors of sub-objects in this code. Therefore we can
664 safely ignore all call statements that we traverse.
668 stmt_may_be_vtbl_ptr_store (gimple
*stmt
)
670 if (is_gimple_call (stmt
))
672 if (gimple_clobber_p (stmt
))
674 else if (is_gimple_assign (stmt
))
676 tree lhs
= gimple_assign_lhs (stmt
);
678 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs
)))
680 if (flag_strict_aliasing
681 && !POINTER_TYPE_P (TREE_TYPE (lhs
)))
684 if (TREE_CODE (lhs
) == COMPONENT_REF
685 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
687 /* In the future we might want to use get_ref_base_and_extent to find
688 if there is a field corresponding to the offset and if so, proceed
689 almost like if it was a component ref. */
695 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
696 to check whether a particular statement may modify the virtual table
697 pointerIt stores its result into DATA, which points to a
698 prop_type_change_info structure. */
701 check_stmt_for_type_change (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef
, void *data
)
703 gimple
*stmt
= SSA_NAME_DEF_STMT (vdef
);
704 struct prop_type_change_info
*tci
= (struct prop_type_change_info
*) data
;
706 if (stmt_may_be_vtbl_ptr_store (stmt
))
708 tci
->type_maybe_changed
= true;
715 /* See if ARG is PARAM_DECl describing instance passed by pointer
716 or reference in FUNCTION. Return false if the dynamic type may change
717 in between beggining of the function until CALL is invoked.
719 Generally functions are not allowed to change type of such instances,
720 but they call destructors. We assume that methods cannot destroy the THIS
721 pointer. Also as a special cases, constructor and destructors may change
722 type of the THIS pointer. */
725 param_type_may_change_p (tree function
, tree arg
, gimple
*call
)
727 /* Pure functions cannot do any changes on the dynamic type;
728 that require writting to memory. */
729 if (flags_from_decl_or_type (function
) & (ECF_PURE
| ECF_CONST
))
731 /* We need to check if we are within inlined consturctor
732 or destructor (ideally we would have way to check that the
733 inline cdtor is actually working on ARG, but we don't have
734 easy tie on this, so punt on all non-pure cdtors.
735 We may also record the types of cdtors and once we know type
736 of the instance match them.
738 Also code unification optimizations may merge calls from
739 different blocks making return values unreliable. So
740 do nothing during late optimization. */
741 if (DECL_STRUCT_FUNCTION (function
)->after_inlining
)
743 if (TREE_CODE (arg
) == SSA_NAME
744 && SSA_NAME_IS_DEFAULT_DEF (arg
)
745 && TREE_CODE (SSA_NAME_VAR (arg
)) == PARM_DECL
)
747 /* Normal (non-THIS) argument. */
748 if ((SSA_NAME_VAR (arg
) != DECL_ARGUMENTS (function
)
749 || TREE_CODE (TREE_TYPE (function
)) != METHOD_TYPE
)
750 /* THIS pointer of an method - here we want to watch constructors
751 and destructors as those definitely may change the dynamic
753 || (TREE_CODE (TREE_TYPE (function
)) == METHOD_TYPE
754 && !DECL_CXX_CONSTRUCTOR_P (function
)
755 && !DECL_CXX_DESTRUCTOR_P (function
)
756 && (SSA_NAME_VAR (arg
) == DECL_ARGUMENTS (function
))))
758 /* Walk the inline stack and watch out for ctors/dtors. */
759 for (tree block
= gimple_block (call
); block
&& TREE_CODE (block
) == BLOCK
;
760 block
= BLOCK_SUPERCONTEXT (block
))
761 if (inlined_polymorphic_ctor_dtor_block_p (block
, false))
769 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
770 callsite CALL) by looking for assignments to its virtual table pointer. If
771 it is, return true. ARG is the object itself (not a pointer
772 to it, unless dereferenced). BASE is the base of the memory access as
773 returned by get_ref_base_and_extent, as is the offset.
775 This is helper function for detect_type_change and detect_type_change_ssa
776 that does the heavy work which is usually unnecesary. */
779 detect_type_change_from_memory_writes (ipa_func_body_info
*fbi
, tree arg
,
780 tree base
, tree comp_type
, gcall
*call
,
781 HOST_WIDE_INT offset
)
783 struct prop_type_change_info tci
;
786 gcc_checking_assert (DECL_P (arg
)
787 || TREE_CODE (arg
) == MEM_REF
788 || handled_component_p (arg
));
790 comp_type
= TYPE_MAIN_VARIANT (comp_type
);
792 /* Const calls cannot call virtual methods through VMT and so type changes do
794 if (!flag_devirtualize
|| !gimple_vuse (call
)
795 /* Be sure expected_type is polymorphic. */
797 || TREE_CODE (comp_type
) != RECORD_TYPE
798 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))
799 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))))
802 ao_ref_init (&ao
, arg
);
805 ao
.size
= POINTER_SIZE
;
806 ao
.max_size
= ao
.size
;
809 tci
.object
= get_base_address (arg
);
810 tci
.type_maybe_changed
= false;
813 = walk_aliased_vdefs (&ao
, gimple_vuse (call
), check_stmt_for_type_change
,
814 &tci
, NULL
, NULL
, fbi
->aa_walk_budget
+ 1);
816 if (walked
>= 0 && !tci
.type_maybe_changed
)
822 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
823 If it is, return true. ARG is the object itself (not a pointer
824 to it, unless dereferenced). BASE is the base of the memory access as
825 returned by get_ref_base_and_extent, as is the offset. */
828 detect_type_change (ipa_func_body_info
*fbi
, tree arg
, tree base
,
829 tree comp_type
, gcall
*call
,
830 HOST_WIDE_INT offset
)
832 if (!flag_devirtualize
)
835 if (TREE_CODE (base
) == MEM_REF
836 && !param_type_may_change_p (current_function_decl
,
837 TREE_OPERAND (base
, 0),
840 return detect_type_change_from_memory_writes (fbi
, arg
, base
, comp_type
,
844 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
845 SSA name (its dereference will become the base and the offset is assumed to
849 detect_type_change_ssa (ipa_func_body_info
*fbi
, tree arg
, tree comp_type
,
852 gcc_checking_assert (TREE_CODE (arg
) == SSA_NAME
);
853 if (!flag_devirtualize
854 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
857 if (!param_type_may_change_p (current_function_decl
, arg
, call
))
860 arg
= build2 (MEM_REF
, ptr_type_node
, arg
,
861 build_int_cst (ptr_type_node
, 0));
863 return detect_type_change_from_memory_writes (fbi
, arg
, arg
, comp_type
,
867 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
868 boolean variable pointed to by DATA. */
871 mark_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
874 bool *b
= (bool *) data
;
879 /* Find the nearest valid aa status for parameter specified by INDEX that
882 static struct ipa_param_aa_status
*
883 find_dominating_aa_status (struct ipa_func_body_info
*fbi
, basic_block bb
,
888 bb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
891 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
892 if (!bi
->param_aa_statuses
.is_empty ()
893 && bi
->param_aa_statuses
[index
].valid
)
894 return &bi
->param_aa_statuses
[index
];
898 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
899 structures and/or intialize the result with a dominating description as
902 static struct ipa_param_aa_status
*
903 parm_bb_aa_status_for_bb (struct ipa_func_body_info
*fbi
, basic_block bb
,
906 gcc_checking_assert (fbi
);
907 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
908 if (bi
->param_aa_statuses
.is_empty ())
909 bi
->param_aa_statuses
.safe_grow_cleared (fbi
->param_count
, true);
910 struct ipa_param_aa_status
*paa
= &bi
->param_aa_statuses
[index
];
913 gcc_checking_assert (!paa
->parm_modified
914 && !paa
->ref_modified
915 && !paa
->pt_modified
);
916 struct ipa_param_aa_status
*dom_paa
;
917 dom_paa
= find_dominating_aa_status (fbi
, bb
, index
);
927 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
928 a value known not to be modified in this function before reaching the
929 statement STMT. FBI holds information about the function we have so far
930 gathered but do not survive the summary building stage. */
933 parm_preserved_before_stmt_p (struct ipa_func_body_info
*fbi
, int index
,
934 gimple
*stmt
, tree parm_load
)
936 struct ipa_param_aa_status
*paa
;
937 bool modified
= false;
940 tree base
= get_base_address (parm_load
);
941 gcc_assert (TREE_CODE (base
) == PARM_DECL
);
942 if (TREE_READONLY (base
))
945 gcc_checking_assert (fbi
);
946 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
947 if (paa
->parm_modified
)
950 gcc_checking_assert (gimple_vuse (stmt
) != NULL_TREE
);
951 ao_ref_init (&refd
, parm_load
);
952 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
953 &modified
, NULL
, NULL
,
954 fbi
->aa_walk_budget
+ 1);
959 fbi
->aa_walk_budget
= 0;
962 fbi
->aa_walk_budget
-= walked
;
964 paa
->parm_modified
= true;
968 /* If STMT is an assignment that loads a value from an parameter declaration,
969 return the index of the parameter in ipa_node_params which has not been
970 modified. Otherwise return -1. */
973 load_from_unmodified_param (struct ipa_func_body_info
*fbi
,
974 vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
980 if (!gimple_assign_single_p (stmt
))
983 op1
= gimple_assign_rhs1 (stmt
);
984 if (TREE_CODE (op1
) != PARM_DECL
)
987 index
= ipa_get_param_decl_index_1 (descriptors
, op1
);
989 || !parm_preserved_before_stmt_p (fbi
, index
, stmt
, op1
))
995 /* Return true if memory reference REF (which must be a load through parameter
996 with INDEX) loads data that are known to be unmodified in this function
997 before reaching statement STMT. */
1000 parm_ref_data_preserved_p (struct ipa_func_body_info
*fbi
,
1001 int index
, gimple
*stmt
, tree ref
)
1003 struct ipa_param_aa_status
*paa
;
1004 bool modified
= false;
1007 gcc_checking_assert (fbi
);
1008 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
1009 if (paa
->ref_modified
)
1012 gcc_checking_assert (gimple_vuse (stmt
));
1013 ao_ref_init (&refd
, ref
);
1014 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
1015 &modified
, NULL
, NULL
,
1016 fbi
->aa_walk_budget
+ 1);
1020 fbi
->aa_walk_budget
= 0;
1023 fbi
->aa_walk_budget
-= walked
;
1025 paa
->ref_modified
= true;
1029 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1030 is known to be unmodified in this function before reaching call statement
1031 CALL into which it is passed. FBI describes the function body. */
1034 parm_ref_data_pass_through_p (struct ipa_func_body_info
*fbi
, int index
,
1035 gimple
*call
, tree parm
)
1037 bool modified
= false;
1040 /* It's unnecessary to calculate anything about memory contnets for a const
1041 function because it is not goin to use it. But do not cache the result
1042 either. Also, no such calculations for non-pointers. */
1043 if (!gimple_vuse (call
)
1044 || !POINTER_TYPE_P (TREE_TYPE (parm
)))
1047 struct ipa_param_aa_status
*paa
= parm_bb_aa_status_for_bb (fbi
,
1050 if (paa
->pt_modified
)
1053 ao_ref_init_from_ptr_and_size (&refd
, parm
, NULL_TREE
);
1054 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (call
), mark_modified
,
1055 &modified
, NULL
, NULL
,
1056 fbi
->aa_walk_budget
+ 1);
1059 fbi
->aa_walk_budget
= 0;
1063 fbi
->aa_walk_budget
-= walked
;
1065 paa
->pt_modified
= true;
1069 /* Return true if we can prove that OP is a memory reference loading
1070 data from an aggregate passed as a parameter.
1072 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1073 false if it cannot prove that the value has not been modified before the
1074 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1075 if it cannot prove the value has not been modified, in that case it will
1076 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1078 INFO and PARMS_AINFO describe parameters of the current function (but the
1079 latter can be NULL), STMT is the load statement. If function returns true,
1080 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1081 within the aggregate and whether it is a load from a value passed by
1082 reference respectively. */
1085 ipa_load_from_parm_agg (struct ipa_func_body_info
*fbi
,
1086 vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
1087 gimple
*stmt
, tree op
, int *index_p
,
1088 HOST_WIDE_INT
*offset_p
, poly_int64
*size_p
,
1089 bool *by_ref_p
, bool *guaranteed_unmodified
)
1094 tree base
= get_ref_base_and_extent_hwi (op
, offset_p
, &size
, &reverse
);
1101 int index
= ipa_get_param_decl_index_1 (descriptors
, base
);
1103 && parm_preserved_before_stmt_p (fbi
, index
, stmt
, op
))
1109 if (guaranteed_unmodified
)
1110 *guaranteed_unmodified
= true;
1116 if (TREE_CODE (base
) != MEM_REF
1117 || TREE_CODE (TREE_OPERAND (base
, 0)) != SSA_NAME
1118 || !integer_zerop (TREE_OPERAND (base
, 1)))
1121 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base
, 0)))
1123 tree parm
= SSA_NAME_VAR (TREE_OPERAND (base
, 0));
1124 index
= ipa_get_param_decl_index_1 (descriptors
, parm
);
1128 /* This branch catches situations where a pointer parameter is not a
1129 gimple register, for example:
1131 void hip7(S*) (struct S * p)
1133 void (*<T2e4>) (struct S *) D.1867;
1138 D.1867_2 = p.1_1->f;
1143 gimple
*def
= SSA_NAME_DEF_STMT (TREE_OPERAND (base
, 0));
1144 index
= load_from_unmodified_param (fbi
, descriptors
, def
);
1149 bool data_preserved
= parm_ref_data_preserved_p (fbi
, index
, stmt
, op
);
1150 if (!data_preserved
&& !guaranteed_unmodified
)
1157 if (guaranteed_unmodified
)
1158 *guaranteed_unmodified
= data_preserved
;
1164 /* If STMT is an assignment that loads a value from a parameter declaration,
1165 or from an aggregate passed as the parameter either by value or reference,
1166 return the index of the parameter in ipa_node_params. Otherwise return -1.
1168 FBI holds gathered information about the function. INFO describes
1169 parameters of the function, STMT is the assignment statement. If it is a
1170 memory load from an aggregate, *OFFSET_P is filled with offset within the
1171 aggregate, and *BY_REF_P specifies whether the aggregate is passed by
1175 load_from_unmodified_param_or_agg (struct ipa_func_body_info
*fbi
,
1176 class ipa_node_params
*info
,
1178 HOST_WIDE_INT
*offset_p
,
1181 int index
= load_from_unmodified_param (fbi
, info
->descriptors
, stmt
);
1184 /* Load value from a parameter declaration. */
1191 if (!gimple_assign_load_p (stmt
))
1194 tree rhs
= gimple_assign_rhs1 (stmt
);
1196 /* Skip memory reference containing VIEW_CONVERT_EXPR. */
1197 for (tree t
= rhs
; handled_component_p (t
); t
= TREE_OPERAND (t
, 0))
1198 if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
1201 /* Skip memory reference containing bit-field. */
1202 if (TREE_CODE (rhs
) == BIT_FIELD_REF
1203 || contains_bitfld_component_ref_p (rhs
))
1206 if (!ipa_load_from_parm_agg (fbi
, info
->descriptors
, stmt
, rhs
, &index
,
1207 offset_p
, &size
, by_ref_p
))
1210 gcc_assert (!maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (rhs
))),
1214 tree param_type
= ipa_get_type (info
, index
);
1216 if (!param_type
|| !AGGREGATE_TYPE_P (param_type
))
1219 else if (TREE_THIS_VOLATILE (rhs
))
1225 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1226 of an assignment statement STMT, try to determine whether we are actually
1227 handling any of the following cases and construct an appropriate jump
1228 function into JFUNC if so:
1230 1) The passed value is loaded from a formal parameter which is not a gimple
1231 register (most probably because it is addressable, the value has to be
1232 scalar) and we can guarantee the value has not changed. This case can
1233 therefore be described by a simple pass-through jump function. For example:
1242 2) The passed value can be described by a simple arithmetic pass-through
1249 D.2064_4 = a.1(D) + 4;
1252 This case can also occur in combination of the previous one, e.g.:
1260 D.2064_4 = a.0_3 + 4;
1263 3) The passed value is an address of an object within another one (which
1264 also passed by reference). Such situations are described by an ancestor
1265 jump function and describe situations such as:
1267 B::foo() (struct B * const this)
1271 D.1845_2 = &this_1(D)->D.1748;
1274 INFO is the structure describing individual parameters access different
1275 stages of IPA optimizations. PARMS_AINFO contains the information that is
1276 only needed for intraprocedural analysis. */
1279 compute_complex_assign_jump_func (struct ipa_func_body_info
*fbi
,
1280 class ipa_node_params
*info
,
1281 struct ipa_jump_func
*jfunc
,
1282 gcall
*call
, gimple
*stmt
, tree name
,
1285 HOST_WIDE_INT offset
, size
;
1286 tree op1
, tc_ssa
, base
, ssa
;
1290 op1
= gimple_assign_rhs1 (stmt
);
1292 if (TREE_CODE (op1
) == SSA_NAME
)
1294 if (SSA_NAME_IS_DEFAULT_DEF (op1
))
1295 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (op1
));
1297 index
= load_from_unmodified_param (fbi
, info
->descriptors
,
1298 SSA_NAME_DEF_STMT (op1
));
1303 index
= load_from_unmodified_param (fbi
, info
->descriptors
, stmt
);
1304 tc_ssa
= gimple_assign_lhs (stmt
);
1309 switch (gimple_assign_rhs_class (stmt
))
1311 case GIMPLE_BINARY_RHS
:
1313 tree op2
= gimple_assign_rhs2 (stmt
);
1314 if (!is_gimple_ip_invariant (op2
)
1315 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt
))
1317 && !useless_type_conversion_p (TREE_TYPE (name
),
1321 ipa_set_jf_arith_pass_through (jfunc
, index
, op2
,
1322 gimple_assign_rhs_code (stmt
));
1325 case GIMPLE_SINGLE_RHS
:
1327 bool agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
,
1329 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1332 case GIMPLE_UNARY_RHS
:
1333 if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
)))
1334 ipa_set_jf_unary_pass_through (jfunc
, index
,
1335 gimple_assign_rhs_code (stmt
));
1341 if (TREE_CODE (op1
) != ADDR_EXPR
)
1343 op1
= TREE_OPERAND (op1
, 0);
1344 if (TREE_CODE (TREE_TYPE (op1
)) != RECORD_TYPE
)
1346 base
= get_ref_base_and_extent_hwi (op1
, &offset
, &size
, &reverse
);
1347 offset_int mem_offset
;
1349 || TREE_CODE (base
) != MEM_REF
1350 || !mem_ref_offset (base
).is_constant (&mem_offset
))
1352 offset
+= mem_offset
.to_short_addr () * BITS_PER_UNIT
;
1353 ssa
= TREE_OPERAND (base
, 0);
1354 if (TREE_CODE (ssa
) != SSA_NAME
1355 || !SSA_NAME_IS_DEFAULT_DEF (ssa
)
1359 /* Dynamic types are changed in constructors and destructors. */
1360 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (ssa
));
1361 if (index
>= 0 && param_type
&& POINTER_TYPE_P (param_type
))
1362 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1363 parm_ref_data_pass_through_p (fbi
, index
, call
, ssa
));
1366 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1369 iftmp.1_3 = &obj_2(D)->D.1762;
1371 The base of the MEM_REF must be a default definition SSA NAME of a
1372 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1373 whole MEM_REF expression is returned and the offset calculated from any
1374 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1375 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1378 get_ancestor_addr_info (gimple
*assign
, tree
*obj_p
, HOST_WIDE_INT
*offset
)
1381 tree expr
, parm
, obj
;
1384 if (!gimple_assign_single_p (assign
))
1386 expr
= gimple_assign_rhs1 (assign
);
1388 if (TREE_CODE (expr
) != ADDR_EXPR
)
1390 expr
= TREE_OPERAND (expr
, 0);
1392 expr
= get_ref_base_and_extent_hwi (expr
, offset
, &size
, &reverse
);
1394 offset_int mem_offset
;
1396 || TREE_CODE (expr
) != MEM_REF
1397 || !mem_ref_offset (expr
).is_constant (&mem_offset
))
1399 parm
= TREE_OPERAND (expr
, 0);
1400 if (TREE_CODE (parm
) != SSA_NAME
1401 || !SSA_NAME_IS_DEFAULT_DEF (parm
)
1402 || TREE_CODE (SSA_NAME_VAR (parm
)) != PARM_DECL
)
1405 *offset
+= mem_offset
.to_short_addr () * BITS_PER_UNIT
;
1411 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1412 statement PHI, try to find out whether NAME is in fact a
1413 multiple-inheritance typecast from a descendant into an ancestor of a formal
1414 parameter and thus can be described by an ancestor jump function and if so,
1415 write the appropriate function into JFUNC.
1417 Essentially we want to match the following pattern:
1425 iftmp.1_3 = &obj_2(D)->D.1762;
1428 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1429 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1433 compute_complex_ancestor_jump_func (struct ipa_func_body_info
*fbi
,
1434 class ipa_node_params
*info
,
1435 struct ipa_jump_func
*jfunc
,
1436 gcall
*call
, gphi
*phi
)
1438 HOST_WIDE_INT offset
;
1439 gimple
*assign
, *cond
;
1440 basic_block phi_bb
, assign_bb
, cond_bb
;
1441 tree tmp
, parm
, expr
, obj
;
1444 if (gimple_phi_num_args (phi
) != 2)
1447 if (integer_zerop (PHI_ARG_DEF (phi
, 1)))
1448 tmp
= PHI_ARG_DEF (phi
, 0);
1449 else if (integer_zerop (PHI_ARG_DEF (phi
, 0)))
1450 tmp
= PHI_ARG_DEF (phi
, 1);
1453 if (TREE_CODE (tmp
) != SSA_NAME
1454 || SSA_NAME_IS_DEFAULT_DEF (tmp
)
1455 || !POINTER_TYPE_P (TREE_TYPE (tmp
))
1456 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp
))) != RECORD_TYPE
)
1459 assign
= SSA_NAME_DEF_STMT (tmp
);
1460 assign_bb
= gimple_bb (assign
);
1461 if (!single_pred_p (assign_bb
))
1463 expr
= get_ancestor_addr_info (assign
, &obj
, &offset
);
1466 parm
= TREE_OPERAND (expr
, 0);
1467 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (parm
));
1471 cond_bb
= single_pred (assign_bb
);
1472 cond
= last_stmt (cond_bb
);
1474 || gimple_code (cond
) != GIMPLE_COND
1475 || gimple_cond_code (cond
) != NE_EXPR
1476 || gimple_cond_lhs (cond
) != parm
1477 || !integer_zerop (gimple_cond_rhs (cond
)))
1480 phi_bb
= gimple_bb (phi
);
1481 for (i
= 0; i
< 2; i
++)
1483 basic_block pred
= EDGE_PRED (phi_bb
, i
)->src
;
1484 if (pred
!= assign_bb
&& pred
!= cond_bb
)
1488 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1489 parm_ref_data_pass_through_p (fbi
, index
, call
, parm
));
1492 /* Inspect the given TYPE and return true iff it has the same structure (the
1493 same number of fields of the same types) as a C++ member pointer. If
1494 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1495 corresponding fields there. */
1498 type_like_member_ptr_p (tree type
, tree
*method_ptr
, tree
*delta
)
1502 if (TREE_CODE (type
) != RECORD_TYPE
)
1505 fld
= TYPE_FIELDS (type
);
1506 if (!fld
|| !POINTER_TYPE_P (TREE_TYPE (fld
))
1507 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld
))) != METHOD_TYPE
1508 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1514 fld
= DECL_CHAIN (fld
);
1515 if (!fld
|| INTEGRAL_TYPE_P (fld
)
1516 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1521 if (DECL_CHAIN (fld
))
1527 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1528 return the rhs of its defining statement, and this statement is stored in
1529 *RHS_STMT. Otherwise return RHS as it is. */
1532 get_ssa_def_if_simple_copy (tree rhs
, gimple
**rhs_stmt
)
1534 while (TREE_CODE (rhs
) == SSA_NAME
&& !SSA_NAME_IS_DEFAULT_DEF (rhs
))
1536 gimple
*def_stmt
= SSA_NAME_DEF_STMT (rhs
);
1538 if (gimple_assign_single_p (def_stmt
))
1539 rhs
= gimple_assign_rhs1 (def_stmt
);
1542 *rhs_stmt
= def_stmt
;
1547 /* Simple linked list, describing contents of an aggregate before call. */
1549 struct ipa_known_agg_contents_list
1551 /* Offset and size of the described part of the aggregate. */
1552 HOST_WIDE_INT offset
, size
;
1554 /* Type of the described part of the aggregate. */
1557 /* Known constant value or jump function data describing contents. */
1558 struct ipa_load_agg_data value
;
1560 /* Pointer to the next structure in the list. */
1561 struct ipa_known_agg_contents_list
*next
;
1564 /* Add an aggregate content item into a linked list of
1565 ipa_known_agg_contents_list structure, in which all elements
1566 are sorted ascendingly by offset. */
1569 add_to_agg_contents_list (struct ipa_known_agg_contents_list
**plist
,
1570 struct ipa_known_agg_contents_list
*item
)
1572 struct ipa_known_agg_contents_list
*list
= *plist
;
1574 for (; list
; list
= list
->next
)
1576 if (list
->offset
>= item
->offset
)
1579 plist
= &list
->next
;
1586 /* Check whether a given aggregate content is clobbered by certain element in
1587 a linked list of ipa_known_agg_contents_list. */
1590 clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list
*list
,
1591 struct ipa_known_agg_contents_list
*item
)
1593 for (; list
; list
= list
->next
)
1595 if (list
->offset
>= item
->offset
)
1596 return list
->offset
< item
->offset
+ item
->size
;
1598 if (list
->offset
+ list
->size
> item
->offset
)
1605 /* Build aggregate jump function from LIST, assuming there are exactly
1606 VALUE_COUNT entries there and that offset of the passed argument
1607 is ARG_OFFSET and store it into JFUNC. */
1610 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list
*list
,
1611 int value_count
, HOST_WIDE_INT arg_offset
,
1612 struct ipa_jump_func
*jfunc
)
1614 vec_alloc (jfunc
->agg
.items
, value_count
);
1615 for (; list
; list
= list
->next
)
1617 struct ipa_agg_jf_item item
;
1618 tree operand
= list
->value
.pass_through
.operand
;
1620 if (list
->value
.pass_through
.formal_id
>= 0)
1622 /* Content value is derived from some formal paramerter. */
1623 if (list
->value
.offset
>= 0)
1624 item
.jftype
= IPA_JF_LOAD_AGG
;
1626 item
.jftype
= IPA_JF_PASS_THROUGH
;
1628 item
.value
.load_agg
= list
->value
;
1630 item
.value
.pass_through
.operand
1631 = unshare_expr_without_location (operand
);
1635 /* Content value is known constant. */
1636 item
.jftype
= IPA_JF_CONST
;
1637 item
.value
.constant
= unshare_expr_without_location (operand
);
1642 item
.type
= list
->type
;
1643 gcc_assert (tree_to_shwi (TYPE_SIZE (list
->type
)) == list
->size
);
1645 item
.offset
= list
->offset
- arg_offset
;
1646 gcc_assert ((item
.offset
% BITS_PER_UNIT
) == 0);
1648 jfunc
->agg
.items
->quick_push (item
);
1652 /* Given an assignment statement STMT, try to collect information into
1653 AGG_VALUE that will be used to construct jump function for RHS of the
1654 assignment, from which content value of an aggregate part comes.
1656 Besides constant and simple pass-through jump functions, also try to
1657 identify whether it matches the following pattern that can be described by
1658 a load-value-from-aggregate jump function, which is a derivative of simple
1659 pass-through jump function.
1665 *(q_5 + 4) = *(p_3(D) + 28) op 1;
1669 Here IPA_LOAD_AGG_DATA data structure is informative enough to describe
1670 constant, simple pass-through and load-vale-from-aggregate. If value
1671 is constant, it will be kept in field OPERAND, and field FORMAL_ID is
1672 set to -1. For simple pass-through and load-value-from-aggregate, field
1673 FORMAL_ID specifies the related formal parameter index, and field
1674 OFFSET can be used to distinguish them, -1 means simple pass-through,
1675 otherwise means load-value-from-aggregate. */
1678 analyze_agg_content_value (struct ipa_func_body_info
*fbi
,
1679 struct ipa_load_agg_data
*agg_value
,
1682 tree lhs
= gimple_assign_lhs (stmt
);
1683 tree rhs1
= gimple_assign_rhs1 (stmt
);
1684 enum tree_code code
;
1687 /* Initialize jump function data for the aggregate part. */
1688 memset (agg_value
, 0, sizeof (*agg_value
));
1689 agg_value
->pass_through
.operation
= NOP_EXPR
;
1690 agg_value
->pass_through
.formal_id
= -1;
1691 agg_value
->offset
= -1;
1693 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs
)) /* TODO: Support aggregate type. */
1694 || TREE_THIS_VOLATILE (lhs
)
1695 || TREE_CODE (lhs
) == BIT_FIELD_REF
1696 || contains_bitfld_component_ref_p (lhs
))
1699 /* Skip SSA copies. */
1700 while (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
1702 if (TREE_CODE (rhs1
) != SSA_NAME
|| SSA_NAME_IS_DEFAULT_DEF (rhs1
))
1705 stmt
= SSA_NAME_DEF_STMT (rhs1
);
1706 if (!is_gimple_assign (stmt
))
1709 rhs1
= gimple_assign_rhs1 (stmt
);
1712 code
= gimple_assign_rhs_code (stmt
);
1713 switch (gimple_assign_rhs_class (stmt
))
1715 case GIMPLE_SINGLE_RHS
:
1716 if (is_gimple_ip_invariant (rhs1
))
1718 agg_value
->pass_through
.operand
= rhs1
;
1724 case GIMPLE_UNARY_RHS
:
1725 /* NOTE: A GIMPLE_UNARY_RHS operation might not be tcc_unary
1726 (truth_not_expr is example), GIMPLE_BINARY_RHS does not imply
1727 tcc_binary, this subtleness is somewhat misleading.
1729 Since tcc_unary is widely used in IPA-CP code to check an operation
1730 with one operand, here we only allow tc_unary operation to avoid
1731 possible problem. Then we can use (opclass == tc_unary) or not to
1732 distinguish unary and binary. */
1733 if (TREE_CODE_CLASS (code
) != tcc_unary
|| CONVERT_EXPR_CODE_P (code
))
1736 rhs1
= get_ssa_def_if_simple_copy (rhs1
, &stmt
);
1739 case GIMPLE_BINARY_RHS
:
1741 gimple
*rhs1_stmt
= stmt
;
1742 gimple
*rhs2_stmt
= stmt
;
1743 tree rhs2
= gimple_assign_rhs2 (stmt
);
1745 rhs1
= get_ssa_def_if_simple_copy (rhs1
, &rhs1_stmt
);
1746 rhs2
= get_ssa_def_if_simple_copy (rhs2
, &rhs2_stmt
);
1748 if (is_gimple_ip_invariant (rhs2
))
1750 agg_value
->pass_through
.operand
= rhs2
;
1753 else if (is_gimple_ip_invariant (rhs1
))
1755 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
1756 code
= swap_tree_comparison (code
);
1757 else if (!commutative_tree_code (code
))
1760 agg_value
->pass_through
.operand
= rhs1
;
1767 if (TREE_CODE_CLASS (code
) != tcc_comparison
1768 && !useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (rhs1
)))
1777 if (TREE_CODE (rhs1
) != SSA_NAME
)
1778 index
= load_from_unmodified_param_or_agg (fbi
, fbi
->info
, stmt
,
1780 &agg_value
->by_ref
);
1781 else if (SSA_NAME_IS_DEFAULT_DEF (rhs1
))
1782 index
= ipa_get_param_decl_index (fbi
->info
, SSA_NAME_VAR (rhs1
));
1786 if (agg_value
->offset
>= 0)
1787 agg_value
->type
= TREE_TYPE (rhs1
);
1788 agg_value
->pass_through
.formal_id
= index
;
1789 agg_value
->pass_through
.operation
= code
;
1792 agg_value
->pass_through
.operand
= NULL_TREE
;
1795 /* If STMT is a memory store to the object whose address is BASE, extract
1796 information (offset, size, and value) into CONTENT, and return true,
1797 otherwise we conservatively assume the whole object is modified with
1798 unknown content, and return false. CHECK_REF means that access to object
1799 is expected to be in form of MEM_REF expression. */
1802 extract_mem_content (struct ipa_func_body_info
*fbi
,
1803 gimple
*stmt
, tree base
, bool check_ref
,
1804 struct ipa_known_agg_contents_list
*content
)
1806 HOST_WIDE_INT lhs_offset
, lhs_size
;
1809 if (!is_gimple_assign (stmt
))
1812 tree lhs
= gimple_assign_lhs (stmt
);
1813 tree lhs_base
= get_ref_base_and_extent_hwi (lhs
, &lhs_offset
, &lhs_size
,
1820 if (TREE_CODE (lhs_base
) != MEM_REF
1821 || TREE_OPERAND (lhs_base
, 0) != base
1822 || !integer_zerop (TREE_OPERAND (lhs_base
, 1)))
1825 else if (lhs_base
!= base
)
1828 content
->offset
= lhs_offset
;
1829 content
->size
= lhs_size
;
1830 content
->type
= TREE_TYPE (lhs
);
1831 content
->next
= NULL
;
1833 analyze_agg_content_value (fbi
, &content
->value
, stmt
);
1837 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1838 in ARG is filled in constants or values that are derived from caller's
1839 formal parameter in the way described by some kinds of jump functions. FBI
1840 is the context of the caller function for interprocedural analysis. ARG can
1841 either be an aggregate expression or a pointer to an aggregate. ARG_TYPE is
1842 the type of the aggregate, JFUNC is the jump function for the aggregate. */
1845 determine_known_aggregate_parts (struct ipa_func_body_info
*fbi
,
1846 gcall
*call
, tree arg
,
1848 struct ipa_jump_func
*jfunc
)
1850 struct ipa_known_agg_contents_list
*list
= NULL
, *all_list
= NULL
;
1851 bitmap visited
= NULL
;
1852 int item_count
= 0, value_count
= 0;
1853 HOST_WIDE_INT arg_offset
, arg_size
;
1855 bool check_ref
, by_ref
;
1857 int max_agg_items
= opt_for_fn (fbi
->node
->decl
, param_ipa_max_agg_items
);
1859 if (max_agg_items
== 0)
1862 /* The function operates in three stages. First, we prepare check_ref, r,
1863 arg_base and arg_offset based on what is actually passed as an actual
1866 if (POINTER_TYPE_P (arg_type
))
1869 if (TREE_CODE (arg
) == SSA_NAME
)
1872 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type
)))
1873 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
1878 type_size
= TYPE_SIZE (TREE_TYPE (arg_type
));
1879 arg_size
= tree_to_uhwi (type_size
);
1880 ao_ref_init_from_ptr_and_size (&r
, arg_base
, NULL_TREE
);
1882 else if (TREE_CODE (arg
) == ADDR_EXPR
)
1886 arg
= TREE_OPERAND (arg
, 0);
1887 arg_base
= get_ref_base_and_extent_hwi (arg
, &arg_offset
,
1888 &arg_size
, &reverse
);
1891 if (DECL_P (arg_base
))
1894 ao_ref_init (&r
, arg_base
);
1906 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg
)));
1910 arg_base
= get_ref_base_and_extent_hwi (arg
, &arg_offset
,
1911 &arg_size
, &reverse
);
1915 ao_ref_init (&r
, arg
);
1918 /* Second stage traverses virtual SSA web backwards starting from the call
1919 statement, only looks at individual dominating virtual operand (its
1920 definition dominates the call), as long as it is confident that content
1921 of the aggregate is affected by definition of the virtual operand, it
1922 builds a sorted linked list of ipa_agg_jf_list describing that. */
1924 for (tree dom_vuse
= gimple_vuse (call
); dom_vuse
;)
1926 gimple
*stmt
= SSA_NAME_DEF_STMT (dom_vuse
);
1928 if (gimple_code (stmt
) == GIMPLE_PHI
)
1930 dom_vuse
= get_continuation_for_phi (stmt
, &r
, true,
1931 fbi
->aa_walk_budget
,
1932 &visited
, false, NULL
, NULL
);
1936 if (stmt_may_clobber_ref_p_1 (stmt
, &r
))
1938 struct ipa_known_agg_contents_list
*content
1939 = XALLOCA (struct ipa_known_agg_contents_list
);
1941 if (!extract_mem_content (fbi
, stmt
, arg_base
, check_ref
, content
))
1944 /* Now we get a dominating virtual operand, and need to check
1945 whether its value is clobbered any other dominating one. */
1946 if ((content
->value
.pass_through
.formal_id
>= 0
1947 || content
->value
.pass_through
.operand
)
1948 && !clobber_by_agg_contents_list_p (all_list
, content
))
1950 struct ipa_known_agg_contents_list
*copy
1951 = XALLOCA (struct ipa_known_agg_contents_list
);
1953 /* Add to the list consisting of only dominating virtual
1954 operands, whose definitions can finally reach the call. */
1955 add_to_agg_contents_list (&list
, (*copy
= *content
, copy
));
1957 if (++value_count
== max_agg_items
)
1961 /* Add to the list consisting of all dominating virtual operands. */
1962 add_to_agg_contents_list (&all_list
, content
);
1964 if (++item_count
== 2 * max_agg_items
)
1967 dom_vuse
= gimple_vuse (stmt
);
1971 BITMAP_FREE (visited
);
1973 /* Third stage just goes over the list and creates an appropriate vector of
1974 ipa_agg_jf_item structures out of it, of course only if there are
1975 any meaningful items to begin with. */
1979 jfunc
->agg
.by_ref
= by_ref
;
1980 build_agg_jump_func_from_list (list
, value_count
, arg_offset
, jfunc
);
1985 /* Return the Ith param type of callee associated with call graph
1989 ipa_get_callee_param_type (struct cgraph_edge
*e
, int i
)
1992 tree type
= (e
->callee
1993 ? TREE_TYPE (e
->callee
->decl
)
1994 : gimple_call_fntype (e
->call_stmt
));
1995 tree t
= TYPE_ARG_TYPES (type
);
1997 for (n
= 0; n
< i
; n
++)
2004 return TREE_VALUE (t
);
2007 t
= DECL_ARGUMENTS (e
->callee
->decl
);
2008 for (n
= 0; n
< i
; n
++)
2015 return TREE_TYPE (t
);
2019 /* Return ipa_bits with VALUE and MASK values, which can be either a newly
2020 allocated structure or a previously existing one shared with other jump
2021 functions and/or transformation summaries. */
2024 ipa_get_ipa_bits_for_value (const widest_int
&value
, const widest_int
&mask
)
2030 ipa_bits
**slot
= ipa_bits_hash_table
->find_slot (&tmp
, INSERT
);
2034 ipa_bits
*res
= ggc_alloc
<ipa_bits
> ();
2042 /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
2043 table in order to avoid creating multiple same ipa_bits structures. */
2046 ipa_set_jfunc_bits (ipa_jump_func
*jf
, const widest_int
&value
,
2047 const widest_int
&mask
)
2049 jf
->bits
= ipa_get_ipa_bits_for_value (value
, mask
);
2052 /* Return a pointer to a value_range just like *TMP, but either find it in
2053 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
2055 static value_range
*
2056 ipa_get_value_range (value_range
*tmp
)
2058 value_range
**slot
= ipa_vr_hash_table
->find_slot (tmp
, INSERT
);
2062 value_range
*vr
= new (ggc_alloc
<value_range
> ()) value_range
;
2069 /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
2070 equiv set. Use hash table in order to avoid creating multiple same copies of
2073 static value_range
*
2074 ipa_get_value_range (enum value_range_kind kind
, tree min
, tree max
)
2076 value_range
tmp (min
, max
, kind
);
2077 return ipa_get_value_range (&tmp
);
2080 /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
2081 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
2082 same value_range structures. */
2085 ipa_set_jfunc_vr (ipa_jump_func
*jf
, enum value_range_kind type
,
2088 jf
->m_vr
= ipa_get_value_range (type
, min
, max
);
2091 /* Assign to JF a pointer to a value_range just like TMP but either fetch a
2092 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
2095 ipa_set_jfunc_vr (ipa_jump_func
*jf
, value_range
*tmp
)
2097 jf
->m_vr
= ipa_get_value_range (tmp
);
2100 /* Compute jump function for all arguments of callsite CS and insert the
2101 information in the jump_functions array in the ipa_edge_args corresponding
2102 to this callsite. */
2105 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info
*fbi
,
2106 struct cgraph_edge
*cs
)
2108 class ipa_node_params
*info
= IPA_NODE_REF (cs
->caller
);
2109 class ipa_edge_args
*args
= IPA_EDGE_REF_GET_CREATE (cs
);
2110 gcall
*call
= cs
->call_stmt
;
2111 int n
, arg_num
= gimple_call_num_args (call
);
2112 bool useful_context
= false;
2114 if (arg_num
== 0 || args
->jump_functions
)
2116 vec_safe_grow_cleared (args
->jump_functions
, arg_num
, true);
2117 if (flag_devirtualize
)
2118 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, arg_num
, true);
2120 if (gimple_call_internal_p (call
))
2122 if (ipa_func_spec_opts_forbid_analysis_p (cs
->caller
))
2125 for (n
= 0; n
< arg_num
; n
++)
2127 struct ipa_jump_func
*jfunc
= ipa_get_ith_jump_func (args
, n
);
2128 tree arg
= gimple_call_arg (call
, n
);
2129 tree param_type
= ipa_get_callee_param_type (cs
, n
);
2130 if (flag_devirtualize
&& POINTER_TYPE_P (TREE_TYPE (arg
)))
2133 class ipa_polymorphic_call_context
context (cs
->caller
->decl
,
2136 context
.get_dynamic_type (instance
, arg
, NULL
, cs
->call_stmt
,
2137 &fbi
->aa_walk_budget
);
2138 *ipa_get_ith_polymorhic_call_context (args
, n
) = context
;
2139 if (!context
.useless_p ())
2140 useful_context
= true;
2143 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
2145 bool addr_nonzero
= false;
2146 bool strict_overflow
= false;
2148 if (TREE_CODE (arg
) == SSA_NAME
2150 && get_ptr_nonnull (arg
))
2151 addr_nonzero
= true;
2152 else if (tree_single_nonzero_warnv_p (arg
, &strict_overflow
))
2153 addr_nonzero
= true;
2157 tree z
= build_int_cst (TREE_TYPE (arg
), 0);
2158 ipa_set_jfunc_vr (jfunc
, VR_ANTI_RANGE
, z
, z
);
2161 gcc_assert (!jfunc
->m_vr
);
2166 value_range_kind kind
;
2167 if (TREE_CODE (arg
) == SSA_NAME
2169 && (kind
= get_range_info (arg
, &min
, &max
))
2170 && (kind
== VR_RANGE
|| kind
== VR_ANTI_RANGE
))
2173 value_range
tmpvr (wide_int_to_tree (TREE_TYPE (arg
), min
),
2174 wide_int_to_tree (TREE_TYPE (arg
), max
),
2176 range_fold_unary_expr (&resvr
, NOP_EXPR
, param_type
,
2177 &tmpvr
, TREE_TYPE (arg
));
2178 if (!resvr
.undefined_p () && !resvr
.varying_p ())
2179 ipa_set_jfunc_vr (jfunc
, &resvr
);
2181 gcc_assert (!jfunc
->m_vr
);
2184 gcc_assert (!jfunc
->m_vr
);
2187 if (INTEGRAL_TYPE_P (TREE_TYPE (arg
))
2188 && (TREE_CODE (arg
) == SSA_NAME
|| TREE_CODE (arg
) == INTEGER_CST
))
2190 if (TREE_CODE (arg
) == SSA_NAME
)
2191 ipa_set_jfunc_bits (jfunc
, 0,
2192 widest_int::from (get_nonzero_bits (arg
),
2193 TYPE_SIGN (TREE_TYPE (arg
))));
2195 ipa_set_jfunc_bits (jfunc
, wi::to_widest (arg
), 0);
2197 else if (POINTER_TYPE_P (TREE_TYPE (arg
)))
2199 unsigned HOST_WIDE_INT bitpos
;
2202 get_pointer_alignment_1 (arg
, &align
, &bitpos
);
2203 widest_int mask
= wi::bit_and_not
2204 (wi::mask
<widest_int
> (TYPE_PRECISION (TREE_TYPE (arg
)), false),
2205 align
/ BITS_PER_UNIT
- 1);
2206 widest_int value
= bitpos
/ BITS_PER_UNIT
;
2207 ipa_set_jfunc_bits (jfunc
, value
, mask
);
2210 gcc_assert (!jfunc
->bits
);
2212 if (is_gimple_ip_invariant (arg
)
2214 && is_global_var (arg
)
2215 && TREE_READONLY (arg
)))
2216 ipa_set_jf_constant (jfunc
, arg
, cs
);
2217 else if (!is_gimple_reg_type (TREE_TYPE (arg
))
2218 && TREE_CODE (arg
) == PARM_DECL
)
2220 int index
= ipa_get_param_decl_index (info
, arg
);
2222 gcc_assert (index
>=0);
2223 /* Aggregate passed by value, check for pass-through, otherwise we
2224 will attempt to fill in aggregate contents later in this
2226 if (parm_preserved_before_stmt_p (fbi
, index
, call
, arg
))
2228 ipa_set_jf_simple_pass_through (jfunc
, index
, false);
2232 else if (TREE_CODE (arg
) == SSA_NAME
)
2234 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
2236 int index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (arg
));
2240 agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
, arg
);
2241 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
2246 gimple
*stmt
= SSA_NAME_DEF_STMT (arg
);
2247 if (is_gimple_assign (stmt
))
2248 compute_complex_assign_jump_func (fbi
, info
, jfunc
,
2249 call
, stmt
, arg
, param_type
);
2250 else if (gimple_code (stmt
) == GIMPLE_PHI
)
2251 compute_complex_ancestor_jump_func (fbi
, info
, jfunc
,
2253 as_a
<gphi
*> (stmt
));
2257 /* If ARG is pointer, we cannot use its type to determine the type of aggregate
2258 passed (because type conversions are ignored in gimple). Usually we can
2259 safely get type from function declaration, but in case of K&R prototypes or
2260 variadic functions we can try our luck with type of the pointer passed.
2261 TODO: Since we look for actual initialization of the memory object, we may better
2262 work out the type based on the memory stores we find. */
2264 param_type
= TREE_TYPE (arg
);
2266 if ((jfunc
->type
!= IPA_JF_PASS_THROUGH
2267 || !ipa_get_jf_pass_through_agg_preserved (jfunc
))
2268 && (jfunc
->type
!= IPA_JF_ANCESTOR
2269 || !ipa_get_jf_ancestor_agg_preserved (jfunc
))
2270 && (AGGREGATE_TYPE_P (TREE_TYPE (arg
))
2271 || POINTER_TYPE_P (param_type
)))
2272 determine_known_aggregate_parts (fbi
, call
, arg
, param_type
, jfunc
);
2274 if (!useful_context
)
2275 vec_free (args
->polymorphic_call_contexts
);
2278 /* Compute jump functions for all edges - both direct and indirect - outgoing
2282 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info
*fbi
, basic_block bb
)
2284 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
2286 struct cgraph_edge
*cs
;
2288 FOR_EACH_VEC_ELT_REVERSE (bi
->cg_edges
, i
, cs
)
2290 struct cgraph_node
*callee
= cs
->callee
;
2294 callee
= callee
->ultimate_alias_target ();
2295 /* We do not need to bother analyzing calls to unknown functions
2296 unless they may become known during lto/whopr. */
2297 if (!callee
->definition
&& !flag_lto
)
2300 ipa_compute_jump_functions_for_edge (fbi
, cs
);
2304 /* If STMT looks like a statement loading a value from a member pointer formal
2305 parameter, return that parameter and store the offset of the field to
2306 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2307 might be clobbered). If USE_DELTA, then we look for a use of the delta
2308 field rather than the pfn. */
2311 ipa_get_stmt_member_ptr_load_param (gimple
*stmt
, bool use_delta
,
2312 HOST_WIDE_INT
*offset_p
)
2314 tree rhs
, rec
, ref_field
, ref_offset
, fld
, ptr_field
, delta_field
;
2316 if (!gimple_assign_single_p (stmt
))
2319 rhs
= gimple_assign_rhs1 (stmt
);
2320 if (TREE_CODE (rhs
) == COMPONENT_REF
)
2322 ref_field
= TREE_OPERAND (rhs
, 1);
2323 rhs
= TREE_OPERAND (rhs
, 0);
2326 ref_field
= NULL_TREE
;
2327 if (TREE_CODE (rhs
) != MEM_REF
)
2329 rec
= TREE_OPERAND (rhs
, 0);
2330 if (TREE_CODE (rec
) != ADDR_EXPR
)
2332 rec
= TREE_OPERAND (rec
, 0);
2333 if (TREE_CODE (rec
) != PARM_DECL
2334 || !type_like_member_ptr_p (TREE_TYPE (rec
), &ptr_field
, &delta_field
))
2336 ref_offset
= TREE_OPERAND (rhs
, 1);
2343 *offset_p
= int_bit_position (fld
);
2347 if (integer_nonzerop (ref_offset
))
2349 return ref_field
== fld
? rec
: NULL_TREE
;
2352 return tree_int_cst_equal (byte_position (fld
), ref_offset
) ? rec
2356 /* Returns true iff T is an SSA_NAME defined by a statement. */
2359 ipa_is_ssa_with_stmt_def (tree t
)
2361 if (TREE_CODE (t
) == SSA_NAME
2362 && !SSA_NAME_IS_DEFAULT_DEF (t
))
2368 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2369 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2370 indirect call graph edge.
2371 If POLYMORPHIC is true record is as a destination of polymorphic call. */
2373 static struct cgraph_edge
*
2374 ipa_note_param_call (struct cgraph_node
*node
, int param_index
,
2375 gcall
*stmt
, bool polymorphic
)
2377 struct cgraph_edge
*cs
;
2379 cs
= node
->get_edge (stmt
);
2380 cs
->indirect_info
->param_index
= param_index
;
2381 cs
->indirect_info
->agg_contents
= 0;
2382 cs
->indirect_info
->member_ptr
= 0;
2383 cs
->indirect_info
->guaranteed_unmodified
= 0;
2384 ipa_set_param_used_by_indirect_call (IPA_NODE_REF (node
),
2386 if (cs
->indirect_info
->polymorphic
|| polymorphic
)
2387 ipa_set_param_used_by_polymorphic_call
2388 (IPA_NODE_REF (node
), param_index
, true);
2392 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2393 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2394 intermediate information about each formal parameter. Currently it checks
2395 whether the call calls a pointer that is a formal parameter and if so, the
2396 parameter is marked with the called flag and an indirect call graph edge
2397 describing the call is created. This is very simple for ordinary pointers
2398 represented in SSA but not-so-nice when it comes to member pointers. The
2399 ugly part of this function does nothing more than trying to match the
2400 pattern of such a call. An example of such a pattern is the gimple dump
2401 below, the call is on the last line:
2404 f$__delta_5 = f.__delta;
2405 f$__pfn_24 = f.__pfn;
2409 f$__delta_5 = MEM[(struct *)&f];
2410 f$__pfn_24 = MEM[(struct *)&f + 4B];
2412 and a few lines below:
2415 D.2496_3 = (int) f$__pfn_24;
2416 D.2497_4 = D.2496_3 & 1;
2423 D.2500_7 = (unsigned int) f$__delta_5;
2424 D.2501_8 = &S + D.2500_7;
2425 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2426 D.2503_10 = *D.2502_9;
2427 D.2504_12 = f$__pfn_24 + -1;
2428 D.2505_13 = (unsigned int) D.2504_12;
2429 D.2506_14 = D.2503_10 + D.2505_13;
2430 D.2507_15 = *D.2506_14;
2431 iftmp.11_16 = (String:: *) D.2507_15;
2434 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2435 D.2500_19 = (unsigned int) f$__delta_5;
2436 D.2508_20 = &S + D.2500_19;
2437 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2439 Such patterns are results of simple calls to a member pointer:
2441 int doprinting (int (MyString::* f)(int) const)
2443 MyString S ("somestring");
2448 Moreover, the function also looks for called pointers loaded from aggregates
2449 passed by value or reference. */
2452 ipa_analyze_indirect_call_uses (struct ipa_func_body_info
*fbi
, gcall
*call
,
2455 class ipa_node_params
*info
= fbi
->info
;
2456 HOST_WIDE_INT offset
;
2459 if (SSA_NAME_IS_DEFAULT_DEF (target
))
2461 tree var
= SSA_NAME_VAR (target
);
2462 int index
= ipa_get_param_decl_index (info
, var
);
2464 ipa_note_param_call (fbi
->node
, index
, call
, false);
2469 gimple
*def
= SSA_NAME_DEF_STMT (target
);
2470 bool guaranteed_unmodified
;
2471 if (gimple_assign_single_p (def
)
2472 && ipa_load_from_parm_agg (fbi
, info
->descriptors
, def
,
2473 gimple_assign_rhs1 (def
), &index
, &offset
,
2474 NULL
, &by_ref
, &guaranteed_unmodified
))
2476 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
,
2478 cs
->indirect_info
->offset
= offset
;
2479 cs
->indirect_info
->agg_contents
= 1;
2480 cs
->indirect_info
->by_ref
= by_ref
;
2481 cs
->indirect_info
->guaranteed_unmodified
= guaranteed_unmodified
;
2485 /* Now we need to try to match the complex pattern of calling a member
2487 if (gimple_code (def
) != GIMPLE_PHI
2488 || gimple_phi_num_args (def
) != 2
2489 || !POINTER_TYPE_P (TREE_TYPE (target
))
2490 || TREE_CODE (TREE_TYPE (TREE_TYPE (target
))) != METHOD_TYPE
)
2493 /* First, we need to check whether one of these is a load from a member
2494 pointer that is a parameter to this function. */
2495 tree n1
= PHI_ARG_DEF (def
, 0);
2496 tree n2
= PHI_ARG_DEF (def
, 1);
2497 if (!ipa_is_ssa_with_stmt_def (n1
) || !ipa_is_ssa_with_stmt_def (n2
))
2499 gimple
*d1
= SSA_NAME_DEF_STMT (n1
);
2500 gimple
*d2
= SSA_NAME_DEF_STMT (n2
);
2503 basic_block bb
, virt_bb
;
2504 basic_block join
= gimple_bb (def
);
2505 if ((rec
= ipa_get_stmt_member_ptr_load_param (d1
, false, &offset
)))
2507 if (ipa_get_stmt_member_ptr_load_param (d2
, false, NULL
))
2510 bb
= EDGE_PRED (join
, 0)->src
;
2511 virt_bb
= gimple_bb (d2
);
2513 else if ((rec
= ipa_get_stmt_member_ptr_load_param (d2
, false, &offset
)))
2515 bb
= EDGE_PRED (join
, 1)->src
;
2516 virt_bb
= gimple_bb (d1
);
2521 /* Second, we need to check that the basic blocks are laid out in the way
2522 corresponding to the pattern. */
2524 if (!single_pred_p (virt_bb
) || !single_succ_p (virt_bb
)
2525 || single_pred (virt_bb
) != bb
2526 || single_succ (virt_bb
) != join
)
2529 /* Third, let's see that the branching is done depending on the least
2530 significant bit of the pfn. */
2532 gimple
*branch
= last_stmt (bb
);
2533 if (!branch
|| gimple_code (branch
) != GIMPLE_COND
)
2536 if ((gimple_cond_code (branch
) != NE_EXPR
2537 && gimple_cond_code (branch
) != EQ_EXPR
)
2538 || !integer_zerop (gimple_cond_rhs (branch
)))
2541 tree cond
= gimple_cond_lhs (branch
);
2542 if (!ipa_is_ssa_with_stmt_def (cond
))
2545 def
= SSA_NAME_DEF_STMT (cond
);
2546 if (!is_gimple_assign (def
)
2547 || gimple_assign_rhs_code (def
) != BIT_AND_EXPR
2548 || !integer_onep (gimple_assign_rhs2 (def
)))
2551 cond
= gimple_assign_rhs1 (def
);
2552 if (!ipa_is_ssa_with_stmt_def (cond
))
2555 def
= SSA_NAME_DEF_STMT (cond
);
2557 if (is_gimple_assign (def
)
2558 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
2560 cond
= gimple_assign_rhs1 (def
);
2561 if (!ipa_is_ssa_with_stmt_def (cond
))
2563 def
= SSA_NAME_DEF_STMT (cond
);
2567 rec2
= ipa_get_stmt_member_ptr_load_param (def
,
2568 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2569 == ptrmemfunc_vbit_in_delta
),
2574 index
= ipa_get_param_decl_index (info
, rec
);
2576 && parm_preserved_before_stmt_p (fbi
, index
, call
, rec
))
2578 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
,
2580 cs
->indirect_info
->offset
= offset
;
2581 cs
->indirect_info
->agg_contents
= 1;
2582 cs
->indirect_info
->member_ptr
= 1;
2583 cs
->indirect_info
->guaranteed_unmodified
= 1;
2589 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2590 object referenced in the expression is a formal parameter of the caller
2591 FBI->node (described by FBI->info), create a call note for the
2595 ipa_analyze_virtual_call_uses (struct ipa_func_body_info
*fbi
,
2596 gcall
*call
, tree target
)
2598 tree obj
= OBJ_TYPE_REF_OBJECT (target
);
2600 HOST_WIDE_INT anc_offset
;
2602 if (!flag_devirtualize
)
2605 if (TREE_CODE (obj
) != SSA_NAME
)
2608 class ipa_node_params
*info
= fbi
->info
;
2609 if (SSA_NAME_IS_DEFAULT_DEF (obj
))
2611 if (TREE_CODE (SSA_NAME_VAR (obj
)) != PARM_DECL
)
2615 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (obj
));
2616 gcc_assert (index
>= 0);
2617 if (detect_type_change_ssa (fbi
, obj
, obj_type_ref_class (target
),
2623 gimple
*stmt
= SSA_NAME_DEF_STMT (obj
);
2626 expr
= get_ancestor_addr_info (stmt
, &obj
, &anc_offset
);
2629 index
= ipa_get_param_decl_index (info
,
2630 SSA_NAME_VAR (TREE_OPERAND (expr
, 0)));
2631 gcc_assert (index
>= 0);
2632 if (detect_type_change (fbi
, obj
, expr
, obj_type_ref_class (target
),
2637 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
,
2639 class cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
2640 ii
->offset
= anc_offset
;
2641 ii
->otr_token
= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target
));
2642 ii
->otr_type
= obj_type_ref_class (target
);
2643 ii
->polymorphic
= 1;
2646 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2647 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2648 containing intermediate information about each formal parameter. */
2651 ipa_analyze_call_uses (struct ipa_func_body_info
*fbi
, gcall
*call
)
2653 tree target
= gimple_call_fn (call
);
2656 || (TREE_CODE (target
) != SSA_NAME
2657 && !virtual_method_call_p (target
)))
2660 struct cgraph_edge
*cs
= fbi
->node
->get_edge (call
);
2661 /* If we previously turned the call into a direct call, there is
2662 no need to analyze. */
2663 if (cs
&& !cs
->indirect_unknown_callee
)
2666 if (cs
->indirect_info
->polymorphic
&& flag_devirtualize
)
2669 tree target
= gimple_call_fn (call
);
2670 ipa_polymorphic_call_context
context (current_function_decl
,
2671 target
, call
, &instance
);
2673 gcc_checking_assert (cs
->indirect_info
->otr_type
2674 == obj_type_ref_class (target
));
2675 gcc_checking_assert (cs
->indirect_info
->otr_token
2676 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target
)));
2678 cs
->indirect_info
->vptr_changed
2679 = !context
.get_dynamic_type (instance
,
2680 OBJ_TYPE_REF_OBJECT (target
),
2681 obj_type_ref_class (target
), call
,
2682 &fbi
->aa_walk_budget
);
2683 cs
->indirect_info
->context
= context
;
2686 if (TREE_CODE (target
) == SSA_NAME
)
2687 ipa_analyze_indirect_call_uses (fbi
, call
, target
);
2688 else if (virtual_method_call_p (target
))
2689 ipa_analyze_virtual_call_uses (fbi
, call
, target
);
2693 /* Analyze the call statement STMT with respect to formal parameters (described
2694 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2695 formal parameters are called. */
2698 ipa_analyze_stmt_uses (struct ipa_func_body_info
*fbi
, gimple
*stmt
)
2700 if (is_gimple_call (stmt
))
2701 ipa_analyze_call_uses (fbi
, as_a
<gcall
*> (stmt
));
2704 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2705 If OP is a parameter declaration, mark it as used in the info structure
2709 visit_ref_for_mod_analysis (gimple
*, tree op
, tree
, void *data
)
2711 class ipa_node_params
*info
= (class ipa_node_params
*) data
;
2713 op
= get_base_address (op
);
2715 && TREE_CODE (op
) == PARM_DECL
)
2717 int index
= ipa_get_param_decl_index (info
, op
);
2718 gcc_assert (index
>= 0);
2719 ipa_set_param_used (info
, index
, true);
2725 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2726 the findings in various structures of the associated ipa_node_params
2727 structure, such as parameter flags, notes etc. FBI holds various data about
2728 the function being analyzed. */
2731 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info
*fbi
, basic_block bb
)
2733 gimple_stmt_iterator gsi
;
2734 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2736 gimple
*stmt
= gsi_stmt (gsi
);
2738 if (is_gimple_debug (stmt
))
2741 ipa_analyze_stmt_uses (fbi
, stmt
);
2742 walk_stmt_load_store_addr_ops (stmt
, fbi
->info
,
2743 visit_ref_for_mod_analysis
,
2744 visit_ref_for_mod_analysis
,
2745 visit_ref_for_mod_analysis
);
2747 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2748 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), fbi
->info
,
2749 visit_ref_for_mod_analysis
,
2750 visit_ref_for_mod_analysis
,
2751 visit_ref_for_mod_analysis
);
2754 /* Calculate controlled uses of parameters of NODE. */
2757 ipa_analyze_controlled_uses (struct cgraph_node
*node
)
2759 class ipa_node_params
*info
= IPA_NODE_REF (node
);
2761 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2763 tree parm
= ipa_get_param (info
, i
);
2764 int controlled_uses
= 0;
2766 /* For SSA regs see if parameter is used. For non-SSA we compute
2767 the flag during modification analysis. */
2768 if (is_gimple_reg (parm
))
2770 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
),
2772 if (ddef
&& !has_zero_uses (ddef
))
2774 imm_use_iterator imm_iter
;
2775 use_operand_p use_p
;
2777 ipa_set_param_used (info
, i
, true);
2778 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, ddef
)
2779 if (!is_gimple_call (USE_STMT (use_p
)))
2781 if (!is_gimple_debug (USE_STMT (use_p
)))
2783 controlled_uses
= IPA_UNDESCRIBED_USE
;
2791 controlled_uses
= 0;
2794 controlled_uses
= IPA_UNDESCRIBED_USE
;
2795 ipa_set_controlled_uses (info
, i
, controlled_uses
);
2799 /* Free stuff in BI. */
2802 free_ipa_bb_info (struct ipa_bb_info
*bi
)
2804 bi
->cg_edges
.release ();
2805 bi
->param_aa_statuses
.release ();
2808 /* Dominator walker driving the analysis. */
2810 class analysis_dom_walker
: public dom_walker
2813 analysis_dom_walker (struct ipa_func_body_info
*fbi
)
2814 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
) {}
2816 virtual edge
before_dom_children (basic_block
);
2819 struct ipa_func_body_info
*m_fbi
;
2823 analysis_dom_walker::before_dom_children (basic_block bb
)
2825 ipa_analyze_params_uses_in_bb (m_fbi
, bb
);
2826 ipa_compute_jump_functions_for_bb (m_fbi
, bb
);
2830 /* Release body info FBI. */
2833 ipa_release_body_info (struct ipa_func_body_info
*fbi
)
2836 struct ipa_bb_info
*bi
;
2838 FOR_EACH_VEC_ELT (fbi
->bb_infos
, i
, bi
)
2839 free_ipa_bb_info (bi
);
2840 fbi
->bb_infos
.release ();
2843 /* Initialize the array describing properties of formal parameters
2844 of NODE, analyze their uses and compute jump functions associated
2845 with actual arguments of calls from within NODE. */
2848 ipa_analyze_node (struct cgraph_node
*node
)
2850 struct ipa_func_body_info fbi
;
2851 class ipa_node_params
*info
;
2853 ipa_check_create_node_params ();
2854 ipa_check_create_edge_args ();
2855 info
= IPA_NODE_REF_GET_CREATE (node
);
2857 if (info
->analysis_done
)
2859 info
->analysis_done
= 1;
2861 if (ipa_func_spec_opts_forbid_analysis_p (node
))
2863 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2865 ipa_set_param_used (info
, i
, true);
2866 ipa_set_controlled_uses (info
, i
, IPA_UNDESCRIBED_USE
);
2871 struct function
*func
= DECL_STRUCT_FUNCTION (node
->decl
);
2873 calculate_dominance_info (CDI_DOMINATORS
);
2874 ipa_initialize_node_params (node
);
2875 ipa_analyze_controlled_uses (node
);
2878 fbi
.info
= IPA_NODE_REF (node
);
2879 fbi
.bb_infos
= vNULL
;
2880 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
), true);
2881 fbi
.param_count
= ipa_get_param_count (info
);
2882 fbi
.aa_walk_budget
= opt_for_fn (node
->decl
, param_ipa_max_aa_steps
);
2884 for (struct cgraph_edge
*cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
2886 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
2887 bi
->cg_edges
.safe_push (cs
);
2890 for (struct cgraph_edge
*cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
2892 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
2893 bi
->cg_edges
.safe_push (cs
);
2896 analysis_dom_walker (&fbi
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
2898 ipa_release_body_info (&fbi
);
2899 free_dominance_info (CDI_DOMINATORS
);
2903 /* Update the jump functions associated with call graph edge E when the call
2904 graph edge CS is being inlined, assuming that E->caller is already (possibly
2905 indirectly) inlined into CS->callee and that E has not been inlined. */
2908 update_jump_functions_after_inlining (struct cgraph_edge
*cs
,
2909 struct cgraph_edge
*e
)
2911 class ipa_edge_args
*top
= IPA_EDGE_REF (cs
);
2912 class ipa_edge_args
*args
= IPA_EDGE_REF (e
);
2915 int count
= ipa_get_cs_argument_count (args
);
2918 for (i
= 0; i
< count
; i
++)
2920 struct ipa_jump_func
*dst
= ipa_get_ith_jump_func (args
, i
);
2921 class ipa_polymorphic_call_context
*dst_ctx
2922 = ipa_get_ith_polymorhic_call_context (args
, i
);
2926 struct ipa_agg_jf_item
*item
;
2929 FOR_EACH_VEC_ELT (*dst
->agg
.items
, j
, item
)
2932 struct ipa_jump_func
*src
;
2934 if (item
->jftype
!= IPA_JF_PASS_THROUGH
2935 && item
->jftype
!= IPA_JF_LOAD_AGG
)
2938 dst_fid
= item
->value
.pass_through
.formal_id
;
2939 if (!top
|| dst_fid
>= ipa_get_cs_argument_count (top
))
2941 item
->jftype
= IPA_JF_UNKNOWN
;
2945 item
->value
.pass_through
.formal_id
= -1;
2946 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2947 if (src
->type
== IPA_JF_CONST
)
2949 if (item
->jftype
== IPA_JF_PASS_THROUGH
2950 && item
->value
.pass_through
.operation
== NOP_EXPR
)
2952 item
->jftype
= IPA_JF_CONST
;
2953 item
->value
.constant
= src
->value
.constant
.value
;
2957 else if (src
->type
== IPA_JF_PASS_THROUGH
2958 && src
->value
.pass_through
.operation
== NOP_EXPR
)
2960 if (item
->jftype
== IPA_JF_PASS_THROUGH
2961 || !item
->value
.load_agg
.by_ref
2962 || src
->value
.pass_through
.agg_preserved
)
2963 item
->value
.pass_through
.formal_id
2964 = src
->value
.pass_through
.formal_id
;
2966 else if (src
->type
== IPA_JF_ANCESTOR
)
2968 if (item
->jftype
== IPA_JF_PASS_THROUGH
)
2970 if (!src
->value
.ancestor
.offset
)
2971 item
->value
.pass_through
.formal_id
2972 = src
->value
.ancestor
.formal_id
;
2974 else if (src
->value
.ancestor
.agg_preserved
)
2976 gcc_checking_assert (item
->value
.load_agg
.by_ref
);
2978 item
->value
.pass_through
.formal_id
2979 = src
->value
.ancestor
.formal_id
;
2980 item
->value
.load_agg
.offset
2981 += src
->value
.ancestor
.offset
;
2985 if (item
->value
.pass_through
.formal_id
< 0)
2986 item
->jftype
= IPA_JF_UNKNOWN
;
2992 ipa_set_jf_unknown (dst
);
2996 if (dst
->type
== IPA_JF_ANCESTOR
)
2998 struct ipa_jump_func
*src
;
2999 int dst_fid
= dst
->value
.ancestor
.formal_id
;
3000 class ipa_polymorphic_call_context
*src_ctx
3001 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
3003 /* Variable number of arguments can cause havoc if we try to access
3004 one that does not exist in the inlined edge. So make sure we
3006 if (dst_fid
>= ipa_get_cs_argument_count (top
))
3008 ipa_set_jf_unknown (dst
);
3012 src
= ipa_get_ith_jump_func (top
, dst_fid
);
3014 if (src_ctx
&& !src_ctx
->useless_p ())
3016 class ipa_polymorphic_call_context ctx
= *src_ctx
;
3018 /* TODO: Make type preserved safe WRT contexts. */
3019 if (!ipa_get_jf_ancestor_type_preserved (dst
))
3020 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
3021 ctx
.offset_by (dst
->value
.ancestor
.offset
);
3022 if (!ctx
.useless_p ())
3026 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
3028 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
3031 dst_ctx
->combine_with (ctx
);
3035 /* Parameter and argument in ancestor jump function must be pointer
3036 type, which means access to aggregate must be by-reference. */
3037 gcc_assert (!src
->agg
.items
|| src
->agg
.by_ref
);
3039 if (src
->agg
.items
&& dst
->value
.ancestor
.agg_preserved
)
3041 struct ipa_agg_jf_item
*item
;
3044 /* Currently we do not produce clobber aggregate jump functions,
3045 replace with merging when we do. */
3046 gcc_assert (!dst
->agg
.items
);
3048 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
3049 dst
->agg
.by_ref
= src
->agg
.by_ref
;
3050 FOR_EACH_VEC_SAFE_ELT (dst
->agg
.items
, j
, item
)
3051 item
->offset
-= dst
->value
.ancestor
.offset
;
3054 if (src
->type
== IPA_JF_PASS_THROUGH
3055 && src
->value
.pass_through
.operation
== NOP_EXPR
)
3057 dst
->value
.ancestor
.formal_id
= src
->value
.pass_through
.formal_id
;
3058 dst
->value
.ancestor
.agg_preserved
&=
3059 src
->value
.pass_through
.agg_preserved
;
3061 else if (src
->type
== IPA_JF_ANCESTOR
)
3063 dst
->value
.ancestor
.formal_id
= src
->value
.ancestor
.formal_id
;
3064 dst
->value
.ancestor
.offset
+= src
->value
.ancestor
.offset
;
3065 dst
->value
.ancestor
.agg_preserved
&=
3066 src
->value
.ancestor
.agg_preserved
;
3069 ipa_set_jf_unknown (dst
);
3071 else if (dst
->type
== IPA_JF_PASS_THROUGH
)
3073 struct ipa_jump_func
*src
;
3074 /* We must check range due to calls with variable number of arguments
3075 and we cannot combine jump functions with operations. */
3076 if (dst
->value
.pass_through
.operation
== NOP_EXPR
3077 && (top
&& dst
->value
.pass_through
.formal_id
3078 < ipa_get_cs_argument_count (top
)))
3080 int dst_fid
= dst
->value
.pass_through
.formal_id
;
3081 src
= ipa_get_ith_jump_func (top
, dst_fid
);
3082 bool dst_agg_p
= ipa_get_jf_pass_through_agg_preserved (dst
);
3083 class ipa_polymorphic_call_context
*src_ctx
3084 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
3086 if (src_ctx
&& !src_ctx
->useless_p ())
3088 class ipa_polymorphic_call_context ctx
= *src_ctx
;
3090 /* TODO: Make type preserved safe WRT contexts. */
3091 if (!ipa_get_jf_pass_through_type_preserved (dst
))
3092 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
3093 if (!ctx
.useless_p ())
3097 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
3099 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
3101 dst_ctx
->combine_with (ctx
);
3106 case IPA_JF_UNKNOWN
:
3107 ipa_set_jf_unknown (dst
);
3110 ipa_set_jf_cst_copy (dst
, src
);
3113 case IPA_JF_PASS_THROUGH
:
3115 int formal_id
= ipa_get_jf_pass_through_formal_id (src
);
3116 enum tree_code operation
;
3117 operation
= ipa_get_jf_pass_through_operation (src
);
3119 if (operation
== NOP_EXPR
)
3123 && ipa_get_jf_pass_through_agg_preserved (src
);
3124 ipa_set_jf_simple_pass_through (dst
, formal_id
, agg_p
);
3126 else if (TREE_CODE_CLASS (operation
) == tcc_unary
)
3127 ipa_set_jf_unary_pass_through (dst
, formal_id
, operation
);
3130 tree operand
= ipa_get_jf_pass_through_operand (src
);
3131 ipa_set_jf_arith_pass_through (dst
, formal_id
, operand
,
3136 case IPA_JF_ANCESTOR
:
3140 && ipa_get_jf_ancestor_agg_preserved (src
);
3141 ipa_set_ancestor_jf (dst
,
3142 ipa_get_jf_ancestor_offset (src
),
3143 ipa_get_jf_ancestor_formal_id (src
),
3152 && (dst_agg_p
|| !src
->agg
.by_ref
))
3154 /* Currently we do not produce clobber aggregate jump
3155 functions, replace with merging when we do. */
3156 gcc_assert (!dst
->agg
.items
);
3158 dst
->agg
.by_ref
= src
->agg
.by_ref
;
3159 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
3163 ipa_set_jf_unknown (dst
);
3168 /* If TARGET is an addr_expr of a function declaration, make it the
3169 (SPECULATIVE)destination of an indirect edge IE and return the edge.
3170 Otherwise, return NULL. */
3172 struct cgraph_edge
*
3173 ipa_make_edge_direct_to_target (struct cgraph_edge
*ie
, tree target
,
3176 struct cgraph_node
*callee
;
3177 bool unreachable
= false;
3179 if (TREE_CODE (target
) == ADDR_EXPR
)
3180 target
= TREE_OPERAND (target
, 0);
3181 if (TREE_CODE (target
) != FUNCTION_DECL
)
3183 target
= canonicalize_constructor_val (target
, NULL
);
3184 if (!target
|| TREE_CODE (target
) != FUNCTION_DECL
)
3186 /* Member pointer call that goes through a VMT lookup. */
3187 if (ie
->indirect_info
->member_ptr
3188 /* Or if target is not an invariant expression and we do not
3189 know if it will evaulate to function at runtime.
3190 This can happen when folding through &VAR, where &VAR
3191 is IP invariant, but VAR itself is not.
3193 TODO: Revisit this when GCC 5 is branched. It seems that
3194 member_ptr check is not needed and that we may try to fold
3195 the expression and see if VAR is readonly. */
3196 || !is_gimple_ip_invariant (target
))
3198 if (dump_enabled_p ())
3200 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, ie
->call_stmt
,
3201 "discovered direct call non-invariant %s\n",
3202 ie
->caller
->dump_name ());
3208 if (dump_enabled_p ())
3210 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, ie
->call_stmt
,
3211 "discovered direct call to non-function in %s, "
3212 "making it __builtin_unreachable\n",
3213 ie
->caller
->dump_name ());
3216 target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
3217 callee
= cgraph_node::get_create (target
);
3221 callee
= cgraph_node::get (target
);
3224 callee
= cgraph_node::get (target
);
3226 /* Because may-edges are not explicitely represented and vtable may be external,
3227 we may create the first reference to the object in the unit. */
3228 if (!callee
|| callee
->inlined_to
)
3231 /* We are better to ensure we can refer to it.
3232 In the case of static functions we are out of luck, since we already
3233 removed its body. In the case of public functions we may or may
3234 not introduce the reference. */
3235 if (!canonicalize_constructor_val (target
, NULL
)
3236 || !TREE_PUBLIC (target
))
3239 fprintf (dump_file
, "ipa-prop: Discovered call to a known target "
3240 "(%s -> %s) but cannot refer to it. Giving up.\n",
3241 ie
->caller
->dump_name (),
3242 ie
->callee
->dump_name ());
3245 callee
= cgraph_node::get_create (target
);
3248 /* If the edge is already speculated. */
3249 if (speculative
&& ie
->speculative
)
3253 cgraph_edge
*e2
= ie
->speculative_call_for_target (callee
);
3257 fprintf (dump_file
, "ipa-prop: Discovered call to a "
3258 "speculative target (%s -> %s) but the call is "
3259 "already speculated to different target. "
3261 ie
->caller
->dump_name (), callee
->dump_name ());
3267 "ipa-prop: Discovered call to a speculative target "
3268 "(%s -> %s) this agree with previous speculation.\n",
3269 ie
->caller
->dump_name (), callee
->dump_name ());
3275 if (!dbg_cnt (devirt
))
3278 ipa_check_create_node_params ();
3280 /* We cannot make edges to inline clones. It is bug that someone removed
3281 the cgraph node too early. */
3282 gcc_assert (!callee
->inlined_to
);
3284 if (dump_file
&& !unreachable
)
3286 fprintf (dump_file
, "ipa-prop: Discovered %s call to a %s target "
3287 "(%s -> %s), for stmt ",
3288 ie
->indirect_info
->polymorphic
? "a virtual" : "an indirect",
3289 speculative
? "speculative" : "known",
3290 ie
->caller
->dump_name (),
3291 callee
->dump_name ());
3293 print_gimple_stmt (dump_file
, ie
->call_stmt
, 2, TDF_SLIM
);
3295 fprintf (dump_file
, "with uid %i\n", ie
->lto_stmt_uid
);
3297 if (dump_enabled_p ())
3299 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, ie
->call_stmt
,
3300 "converting indirect call in %s to direct call to %s\n",
3301 ie
->caller
->dump_name (), callee
->dump_name ());
3305 struct cgraph_edge
*orig
= ie
;
3306 ie
= cgraph_edge::make_direct (ie
, callee
);
3307 /* If we resolved speculative edge the cost is already up to date
3308 for direct call (adjusted by inline_edge_duplication_hook). */
3311 ipa_call_summary
*es
= ipa_call_summaries
->get (ie
);
3312 es
->call_stmt_size
-= (eni_size_weights
.indirect_call_cost
3313 - eni_size_weights
.call_cost
);
3314 es
->call_stmt_time
-= (eni_time_weights
.indirect_call_cost
3315 - eni_time_weights
.call_cost
);
3320 if (!callee
->can_be_discarded_p ())
3323 alias
= dyn_cast
<cgraph_node
*> (callee
->noninterposable_alias ());
3327 /* make_speculative will update ie's cost to direct call cost. */
3328 ie
= ie
->make_speculative
3329 (callee
, ie
->count
.apply_scale (8, 10));
3335 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3336 CONSTRUCTOR and return it. Return NULL if the search fails for some
3340 find_constructor_constant_at_offset (tree constructor
, HOST_WIDE_INT req_offset
)
3342 tree type
= TREE_TYPE (constructor
);
3343 if (TREE_CODE (type
) != ARRAY_TYPE
3344 && TREE_CODE (type
) != RECORD_TYPE
)
3349 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor
), ix
, index
, val
)
3351 HOST_WIDE_INT elt_offset
;
3352 if (TREE_CODE (type
) == ARRAY_TYPE
)
3355 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (type
));
3356 gcc_assert (TREE_CODE (unit_size
) == INTEGER_CST
);
3360 if (TREE_CODE (index
) == RANGE_EXPR
)
3361 off
= wi::to_offset (TREE_OPERAND (index
, 0));
3363 off
= wi::to_offset (index
);
3364 if (TYPE_DOMAIN (type
) && TYPE_MIN_VALUE (TYPE_DOMAIN (type
)))
3366 tree low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
3367 gcc_assert (TREE_CODE (unit_size
) == INTEGER_CST
);
3368 off
= wi::sext (off
- wi::to_offset (low_bound
),
3369 TYPE_PRECISION (TREE_TYPE (index
)));
3371 off
*= wi::to_offset (unit_size
);
3372 /* ??? Handle more than just the first index of a
3376 off
= wi::to_offset (unit_size
) * ix
;
3378 off
= wi::lshift (off
, LOG2_BITS_PER_UNIT
);
3379 if (!wi::fits_shwi_p (off
) || wi::neg_p (off
))
3381 elt_offset
= off
.to_shwi ();
3383 else if (TREE_CODE (type
) == RECORD_TYPE
)
3385 gcc_checking_assert (index
&& TREE_CODE (index
) == FIELD_DECL
);
3386 if (DECL_BIT_FIELD (index
))
3388 elt_offset
= int_bit_position (index
);
3393 if (elt_offset
> req_offset
)
3396 if (TREE_CODE (val
) == CONSTRUCTOR
)
3397 return find_constructor_constant_at_offset (val
,
3398 req_offset
- elt_offset
);
3400 if (elt_offset
== req_offset
3401 && is_gimple_reg_type (TREE_TYPE (val
))
3402 && is_gimple_ip_invariant (val
))
3408 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3409 invariant from a static constructor and if so, return it. Otherwise return
3413 ipa_find_agg_cst_from_init (tree scalar
, HOST_WIDE_INT offset
, bool by_ref
)
3417 if (TREE_CODE (scalar
) != ADDR_EXPR
)
3419 scalar
= TREE_OPERAND (scalar
, 0);
3423 || !is_global_var (scalar
)
3424 || !TREE_READONLY (scalar
)
3425 || !DECL_INITIAL (scalar
)
3426 || TREE_CODE (DECL_INITIAL (scalar
)) != CONSTRUCTOR
)
3429 return find_constructor_constant_at_offset (DECL_INITIAL (scalar
), offset
);
3432 /* Retrieve value from AGG, a set of known offset/value for an aggregate or
3433 static initializer of SCALAR (which can be NULL) for the given OFFSET or
3434 return NULL if there is none. BY_REF specifies whether the value has to be
3435 passed by reference or by value. If FROM_GLOBAL_CONSTANT is non-NULL, then
3436 the boolean it points to is set to true if the value comes from an
3437 initializer of a constant. */
3440 ipa_find_agg_cst_for_param (struct ipa_agg_value_set
*agg
, tree scalar
,
3441 HOST_WIDE_INT offset
, bool by_ref
,
3442 bool *from_global_constant
)
3444 struct ipa_agg_value
*item
;
3449 tree res
= ipa_find_agg_cst_from_init (scalar
, offset
, by_ref
);
3452 if (from_global_constant
)
3453 *from_global_constant
= true;
3459 || by_ref
!= agg
->by_ref
)
3462 FOR_EACH_VEC_ELT (agg
->items
, i
, item
)
3463 if (item
->offset
== offset
)
3465 /* Currently we do not have clobber values, return NULL for them once
3467 gcc_checking_assert (is_gimple_ip_invariant (item
->value
));
3468 if (from_global_constant
)
3469 *from_global_constant
= false;
3475 /* Remove a reference to SYMBOL from the list of references of a node given by
3476 reference description RDESC. Return true if the reference has been
3477 successfully found and removed. */
3480 remove_described_reference (symtab_node
*symbol
, struct ipa_cst_ref_desc
*rdesc
)
3482 struct ipa_ref
*to_del
;
3483 struct cgraph_edge
*origin
;
3488 to_del
= origin
->caller
->find_reference (symbol
, origin
->call_stmt
,
3489 origin
->lto_stmt_uid
);
3493 to_del
->remove_reference ();
3495 fprintf (dump_file
, "ipa-prop: Removed a reference from %s to %s.\n",
3496 origin
->caller
->dump_name (), symbol
->dump_name ());
3500 /* If JFUNC has a reference description with refcount different from
3501 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3502 NULL. JFUNC must be a constant jump function. */
3504 static struct ipa_cst_ref_desc
*
3505 jfunc_rdesc_usable (struct ipa_jump_func
*jfunc
)
3507 struct ipa_cst_ref_desc
*rdesc
= ipa_get_jf_constant_rdesc (jfunc
);
3508 if (rdesc
&& rdesc
->refcount
!= IPA_UNDESCRIBED_USE
)
3514 /* If the value of constant jump function JFUNC is an address of a function
3515 declaration, return the associated call graph node. Otherwise return
3518 static cgraph_node
*
3519 cgraph_node_for_jfunc (struct ipa_jump_func
*jfunc
)
3521 gcc_checking_assert (jfunc
->type
== IPA_JF_CONST
);
3522 tree cst
= ipa_get_jf_constant (jfunc
);
3523 if (TREE_CODE (cst
) != ADDR_EXPR
3524 || TREE_CODE (TREE_OPERAND (cst
, 0)) != FUNCTION_DECL
)
3527 return cgraph_node::get (TREE_OPERAND (cst
, 0));
3531 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3532 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3533 the edge specified in the rdesc. Return false if either the symbol or the
3534 reference could not be found, otherwise return true. */
3537 try_decrement_rdesc_refcount (struct ipa_jump_func
*jfunc
)
3539 struct ipa_cst_ref_desc
*rdesc
;
3540 if (jfunc
->type
== IPA_JF_CONST
3541 && (rdesc
= jfunc_rdesc_usable (jfunc
))
3542 && --rdesc
->refcount
== 0)
3544 symtab_node
*symbol
= cgraph_node_for_jfunc (jfunc
);
3548 return remove_described_reference (symbol
, rdesc
);
3553 /* Try to find a destination for indirect edge IE that corresponds to a simple
3554 call or a call of a member function pointer and where the destination is a
3555 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3556 the type of the parameter to which the result of JFUNC is passed. If it can
3557 be determined, return the newly direct edge, otherwise return NULL.
3558 NEW_ROOT and NEW_ROOT_INFO is the node and its info that JFUNC lattices are
3561 static struct cgraph_edge
*
3562 try_make_edge_direct_simple_call (struct cgraph_edge
*ie
,
3563 struct ipa_jump_func
*jfunc
, tree target_type
,
3564 struct cgraph_node
*new_root
,
3565 class ipa_node_params
*new_root_info
)
3567 struct cgraph_edge
*cs
;
3569 bool agg_contents
= ie
->indirect_info
->agg_contents
;
3570 tree scalar
= ipa_value_from_jfunc (new_root_info
, jfunc
, target_type
);
3573 bool from_global_constant
;
3574 ipa_agg_value_set agg
= ipa_agg_value_set_from_jfunc (new_root_info
,
3577 target
= ipa_find_agg_cst_for_param (&agg
, scalar
,
3578 ie
->indirect_info
->offset
,
3579 ie
->indirect_info
->by_ref
,
3580 &from_global_constant
);
3583 && !from_global_constant
3584 && !ie
->indirect_info
->guaranteed_unmodified
)
3591 cs
= ipa_make_edge_direct_to_target (ie
, target
);
3593 if (cs
&& !agg_contents
)
3596 gcc_checking_assert (cs
->callee
3598 || jfunc
->type
!= IPA_JF_CONST
3599 || !cgraph_node_for_jfunc (jfunc
)
3600 || cs
->callee
== cgraph_node_for_jfunc (jfunc
)));
3601 ok
= try_decrement_rdesc_refcount (jfunc
);
3602 gcc_checking_assert (ok
);
3608 /* Return the target to be used in cases of impossible devirtualization. IE
3609 and target (the latter can be NULL) are dumped when dumping is enabled. */
3612 ipa_impossible_devirt_target (struct cgraph_edge
*ie
, tree target
)
3618 "Type inconsistent devirtualization: %s->%s\n",
3619 ie
->caller
->dump_name (),
3620 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target
)));
3623 "No devirtualization target in %s\n",
3624 ie
->caller
->dump_name ());
3626 tree new_target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
3627 cgraph_node::get_create (new_target
);
3631 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3632 call based on a formal parameter which is described by jump function JFUNC
3633 and if it can be determined, make it direct and return the direct edge.
3634 Otherwise, return NULL. CTX describes the polymorphic context that the
3635 parameter the call is based on brings along with it. NEW_ROOT and
3636 NEW_ROOT_INFO is the node and its info that JFUNC lattices are relative
3639 static struct cgraph_edge
*
3640 try_make_edge_direct_virtual_call (struct cgraph_edge
*ie
,
3641 struct ipa_jump_func
*jfunc
,
3642 class ipa_polymorphic_call_context ctx
,
3643 struct cgraph_node
*new_root
,
3644 class ipa_node_params
*new_root_info
)
3647 bool speculative
= false;
3649 if (!opt_for_fn (ie
->caller
->decl
, flag_devirtualize
))
3652 gcc_assert (!ie
->indirect_info
->by_ref
);
3654 /* Try to do lookup via known virtual table pointer value. */
3655 if (!ie
->indirect_info
->vptr_changed
3656 || opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
))
3659 unsigned HOST_WIDE_INT offset
;
3660 tree scalar
= (jfunc
->type
== IPA_JF_CONST
) ? ipa_get_jf_constant (jfunc
)
3662 ipa_agg_value_set agg
= ipa_agg_value_set_from_jfunc (new_root_info
,
3665 tree t
= ipa_find_agg_cst_for_param (&agg
, scalar
,
3666 ie
->indirect_info
->offset
,
3669 if (t
&& vtable_pointer_value_to_vtable (t
, &vtable
, &offset
))
3672 t
= gimple_get_virt_method_for_vtable (ie
->indirect_info
->otr_token
,
3673 vtable
, offset
, &can_refer
);
3677 || fndecl_built_in_p (t
, BUILT_IN_UNREACHABLE
)
3678 || !possible_polymorphic_call_target_p
3679 (ie
, cgraph_node::get (t
)))
3681 /* Do not speculate builtin_unreachable, it is stupid! */
3682 if (!ie
->indirect_info
->vptr_changed
)
3683 target
= ipa_impossible_devirt_target (ie
, target
);
3690 speculative
= ie
->indirect_info
->vptr_changed
;
3696 ipa_polymorphic_call_context
ie_context (ie
);
3697 vec
<cgraph_node
*>targets
;
3700 ctx
.offset_by (ie
->indirect_info
->offset
);
3701 if (ie
->indirect_info
->vptr_changed
)
3702 ctx
.possible_dynamic_type_change (ie
->in_polymorphic_cdtor
,
3703 ie
->indirect_info
->otr_type
);
3704 ctx
.combine_with (ie_context
, ie
->indirect_info
->otr_type
);
3705 targets
= possible_polymorphic_call_targets
3706 (ie
->indirect_info
->otr_type
,
3707 ie
->indirect_info
->otr_token
,
3709 if (final
&& targets
.length () <= 1)
3711 speculative
= false;
3712 if (targets
.length () == 1)
3713 target
= targets
[0]->decl
;
3715 target
= ipa_impossible_devirt_target (ie
, NULL_TREE
);
3717 else if (!target
&& opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
)
3718 && !ie
->speculative
&& ie
->maybe_hot_p ())
3721 n
= try_speculative_devirtualization (ie
->indirect_info
->otr_type
,
3722 ie
->indirect_info
->otr_token
,
3723 ie
->indirect_info
->context
);
3733 if (!possible_polymorphic_call_target_p
3734 (ie
, cgraph_node::get_create (target
)))
3738 target
= ipa_impossible_devirt_target (ie
, target
);
3740 return ipa_make_edge_direct_to_target (ie
, target
, speculative
);
3746 /* Update the param called notes associated with NODE when CS is being inlined,
3747 assuming NODE is (potentially indirectly) inlined into CS->callee.
3748 Moreover, if the callee is discovered to be constant, create a new cgraph
3749 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3750 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3753 update_indirect_edges_after_inlining (struct cgraph_edge
*cs
,
3754 struct cgraph_node
*node
,
3755 vec
<cgraph_edge
*> *new_edges
)
3757 class ipa_edge_args
*top
;
3758 struct cgraph_edge
*ie
, *next_ie
, *new_direct_edge
;
3759 struct cgraph_node
*new_root
;
3760 class ipa_node_params
*new_root_info
, *inlined_node_info
;
3763 ipa_check_create_edge_args ();
3764 top
= IPA_EDGE_REF (cs
);
3765 new_root
= cs
->caller
->inlined_to
3766 ? cs
->caller
->inlined_to
: cs
->caller
;
3767 new_root_info
= IPA_NODE_REF (new_root
);
3768 inlined_node_info
= IPA_NODE_REF (cs
->callee
->function_symbol ());
3770 for (ie
= node
->indirect_calls
; ie
; ie
= next_ie
)
3772 class cgraph_indirect_call_info
*ici
= ie
->indirect_info
;
3773 struct ipa_jump_func
*jfunc
;
3776 next_ie
= ie
->next_callee
;
3778 if (ici
->param_index
== -1)
3781 /* We must check range due to calls with variable number of arguments: */
3782 if (!top
|| ici
->param_index
>= ipa_get_cs_argument_count (top
))
3784 ici
->param_index
= -1;
3788 param_index
= ici
->param_index
;
3789 jfunc
= ipa_get_ith_jump_func (top
, param_index
);
3790 cgraph_node
*spec_target
= NULL
;
3792 /* FIXME: This may need updating for multiple calls. */
3793 if (ie
->speculative
)
3794 spec_target
= ie
->first_speculative_call_target ()->callee
;
3796 if (!opt_for_fn (node
->decl
, flag_indirect_inlining
))
3797 new_direct_edge
= NULL
;
3798 else if (ici
->polymorphic
)
3800 ipa_polymorphic_call_context ctx
;
3801 ctx
= ipa_context_from_jfunc (new_root_info
, cs
, param_index
, jfunc
);
3802 new_direct_edge
= try_make_edge_direct_virtual_call (ie
, jfunc
, ctx
,
3808 tree target_type
= ipa_get_type (inlined_node_info
, param_index
);
3809 new_direct_edge
= try_make_edge_direct_simple_call (ie
, jfunc
,
3815 /* If speculation was removed, then we need to do nothing. */
3816 if (new_direct_edge
&& new_direct_edge
!= ie
3817 && new_direct_edge
->callee
== spec_target
)
3819 new_direct_edge
->indirect_inlining_edge
= 1;
3820 top
= IPA_EDGE_REF (cs
);
3822 if (!new_direct_edge
->speculative
)
3825 else if (new_direct_edge
)
3827 new_direct_edge
->indirect_inlining_edge
= 1;
3830 new_edges
->safe_push (new_direct_edge
);
3833 top
= IPA_EDGE_REF (cs
);
3834 /* If speculative edge was introduced we still need to update
3835 call info of the indirect edge. */
3836 if (!new_direct_edge
->speculative
)
3839 if (jfunc
->type
== IPA_JF_PASS_THROUGH
3840 && ipa_get_jf_pass_through_operation (jfunc
) == NOP_EXPR
)
3842 if (ici
->agg_contents
3843 && !ipa_get_jf_pass_through_agg_preserved (jfunc
)
3844 && !ici
->polymorphic
)
3845 ici
->param_index
= -1;
3848 ici
->param_index
= ipa_get_jf_pass_through_formal_id (jfunc
);
3849 if (ici
->polymorphic
3850 && !ipa_get_jf_pass_through_type_preserved (jfunc
))
3851 ici
->vptr_changed
= true;
3852 ipa_set_param_used_by_indirect_call (new_root_info
,
3853 ici
->param_index
, true);
3854 if (ici
->polymorphic
)
3855 ipa_set_param_used_by_polymorphic_call (new_root_info
,
3856 ici
->param_index
, true);
3859 else if (jfunc
->type
== IPA_JF_ANCESTOR
)
3861 if (ici
->agg_contents
3862 && !ipa_get_jf_ancestor_agg_preserved (jfunc
)
3863 && !ici
->polymorphic
)
3864 ici
->param_index
= -1;
3867 ici
->param_index
= ipa_get_jf_ancestor_formal_id (jfunc
);
3868 ici
->offset
+= ipa_get_jf_ancestor_offset (jfunc
);
3869 if (ici
->polymorphic
3870 && !ipa_get_jf_ancestor_type_preserved (jfunc
))
3871 ici
->vptr_changed
= true;
3872 ipa_set_param_used_by_indirect_call (new_root_info
,
3873 ici
->param_index
, true);
3874 if (ici
->polymorphic
)
3875 ipa_set_param_used_by_polymorphic_call (new_root_info
,
3876 ici
->param_index
, true);
3880 /* Either we can find a destination for this edge now or never. */
3881 ici
->param_index
= -1;
3887 /* Recursively traverse subtree of NODE (including node) made of inlined
3888 cgraph_edges when CS has been inlined and invoke
3889 update_indirect_edges_after_inlining on all nodes and
3890 update_jump_functions_after_inlining on all non-inlined edges that lead out
3891 of this subtree. Newly discovered indirect edges will be added to
3892 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3896 propagate_info_to_inlined_callees (struct cgraph_edge
*cs
,
3897 struct cgraph_node
*node
,
3898 vec
<cgraph_edge
*> *new_edges
)
3900 struct cgraph_edge
*e
;
3903 res
= update_indirect_edges_after_inlining (cs
, node
, new_edges
);
3905 for (e
= node
->callees
; e
; e
= e
->next_callee
)
3906 if (!e
->inline_failed
)
3907 res
|= propagate_info_to_inlined_callees (cs
, e
->callee
, new_edges
);
3909 update_jump_functions_after_inlining (cs
, e
);
3910 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
3911 update_jump_functions_after_inlining (cs
, e
);
3916 /* Combine two controlled uses counts as done during inlining. */
3919 combine_controlled_uses_counters (int c
, int d
)
3921 if (c
== IPA_UNDESCRIBED_USE
|| d
== IPA_UNDESCRIBED_USE
)
3922 return IPA_UNDESCRIBED_USE
;
3927 /* Propagate number of controlled users from CS->caleee to the new root of the
3928 tree of inlined nodes. */
3931 propagate_controlled_uses (struct cgraph_edge
*cs
)
3933 class ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
3936 struct cgraph_node
*new_root
= cs
->caller
->inlined_to
3937 ? cs
->caller
->inlined_to
: cs
->caller
;
3938 class ipa_node_params
*new_root_info
= IPA_NODE_REF (new_root
);
3939 class ipa_node_params
*old_root_info
= IPA_NODE_REF (cs
->callee
);
3945 count
= MIN (ipa_get_cs_argument_count (args
),
3946 ipa_get_param_count (old_root_info
));
3947 for (i
= 0; i
< count
; i
++)
3949 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
3950 struct ipa_cst_ref_desc
*rdesc
;
3952 if (jf
->type
== IPA_JF_PASS_THROUGH
)
3955 src_idx
= ipa_get_jf_pass_through_formal_id (jf
);
3956 c
= ipa_get_controlled_uses (new_root_info
, src_idx
);
3957 d
= ipa_get_controlled_uses (old_root_info
, i
);
3959 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf
)
3960 == NOP_EXPR
|| c
== IPA_UNDESCRIBED_USE
);
3961 c
= combine_controlled_uses_counters (c
, d
);
3962 ipa_set_controlled_uses (new_root_info
, src_idx
, c
);
3963 if (c
== 0 && new_root_info
->ipcp_orig_node
)
3965 struct cgraph_node
*n
;
3966 struct ipa_ref
*ref
;
3967 tree t
= new_root_info
->known_csts
[src_idx
];
3969 if (t
&& TREE_CODE (t
) == ADDR_EXPR
3970 && TREE_CODE (TREE_OPERAND (t
, 0)) == FUNCTION_DECL
3971 && (n
= cgraph_node::get (TREE_OPERAND (t
, 0)))
3972 && (ref
= new_root
->find_reference (n
, NULL
, 0)))
3975 fprintf (dump_file
, "ipa-prop: Removing cloning-created "
3976 "reference from %s to %s.\n",
3977 new_root
->dump_name (),
3979 ref
->remove_reference ();
3983 else if (jf
->type
== IPA_JF_CONST
3984 && (rdesc
= jfunc_rdesc_usable (jf
)))
3986 int d
= ipa_get_controlled_uses (old_root_info
, i
);
3987 int c
= rdesc
->refcount
;
3988 rdesc
->refcount
= combine_controlled_uses_counters (c
, d
);
3989 if (rdesc
->refcount
== 0)
3991 tree cst
= ipa_get_jf_constant (jf
);
3992 struct cgraph_node
*n
;
3993 gcc_checking_assert (TREE_CODE (cst
) == ADDR_EXPR
3994 && TREE_CODE (TREE_OPERAND (cst
, 0))
3996 n
= cgraph_node::get (TREE_OPERAND (cst
, 0));
3999 struct cgraph_node
*clone
;
4001 ok
= remove_described_reference (n
, rdesc
);
4002 gcc_checking_assert (ok
);
4005 while (clone
->inlined_to
4006 && clone
->ipcp_clone
4007 && clone
!= rdesc
->cs
->caller
)
4009 struct ipa_ref
*ref
;
4010 ref
= clone
->find_reference (n
, NULL
, 0);
4014 fprintf (dump_file
, "ipa-prop: Removing "
4015 "cloning-created reference "
4017 clone
->dump_name (),
4019 ref
->remove_reference ();
4021 clone
= clone
->callers
->caller
;
4028 for (i
= ipa_get_param_count (old_root_info
);
4029 i
< ipa_get_cs_argument_count (args
);
4032 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
4034 if (jf
->type
== IPA_JF_CONST
)
4036 struct ipa_cst_ref_desc
*rdesc
= jfunc_rdesc_usable (jf
);
4038 rdesc
->refcount
= IPA_UNDESCRIBED_USE
;
4040 else if (jf
->type
== IPA_JF_PASS_THROUGH
)
4041 ipa_set_controlled_uses (new_root_info
,
4042 jf
->value
.pass_through
.formal_id
,
4043 IPA_UNDESCRIBED_USE
);
4047 /* Update jump functions and call note functions on inlining the call site CS.
4048 CS is expected to lead to a node already cloned by
4049 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
4050 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
4054 ipa_propagate_indirect_call_infos (struct cgraph_edge
*cs
,
4055 vec
<cgraph_edge
*> *new_edges
)
4058 /* Do nothing if the preparation phase has not been carried out yet
4059 (i.e. during early inlining). */
4060 if (!ipa_node_params_sum
)
4062 gcc_assert (ipa_edge_args_sum
);
4064 propagate_controlled_uses (cs
);
4065 changed
= propagate_info_to_inlined_callees (cs
, cs
->callee
, new_edges
);
4066 ipa_node_params_sum
->remove (cs
->callee
);
4068 class ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
4072 if (args
->jump_functions
)
4074 struct ipa_jump_func
*jf
;
4076 FOR_EACH_VEC_ELT (*args
->jump_functions
, i
, jf
)
4077 if (jf
->type
== IPA_JF_CONST
4078 && ipa_get_jf_constant_rdesc (jf
))
4085 ipa_edge_args_sum
->remove (cs
);
4087 if (ipcp_transformation_sum
)
4088 ipcp_transformation_sum
->remove (cs
->callee
);
4093 /* Ensure that array of edge arguments infos is big enough to accommodate a
4094 structure for all edges and reallocates it if not. Also, allocate
4095 associated hash tables is they do not already exist. */
4098 ipa_check_create_edge_args (void)
4100 if (!ipa_edge_args_sum
)
4102 = (new (ggc_alloc_no_dtor
<ipa_edge_args_sum_t
> ())
4103 ipa_edge_args_sum_t (symtab
, true));
4104 if (!ipa_bits_hash_table
)
4105 ipa_bits_hash_table
= hash_table
<ipa_bit_ggc_hash_traits
>::create_ggc (37);
4106 if (!ipa_vr_hash_table
)
4107 ipa_vr_hash_table
= hash_table
<ipa_vr_ggc_hash_traits
>::create_ggc (37);
4110 /* Free all ipa_edge structures. */
4113 ipa_free_all_edge_args (void)
4115 if (!ipa_edge_args_sum
)
4118 ggc_delete (ipa_edge_args_sum
);
4119 ipa_edge_args_sum
= NULL
;
4122 /* Free all ipa_node_params structures. */
4125 ipa_free_all_node_params (void)
4127 ggc_delete (ipa_node_params_sum
);
4128 ipa_node_params_sum
= NULL
;
4131 /* Initialize IPA CP transformation summary and also allocate any necessary hash
4132 tables if they do not already exist. */
4135 ipcp_transformation_initialize (void)
4137 if (!ipa_bits_hash_table
)
4138 ipa_bits_hash_table
= hash_table
<ipa_bit_ggc_hash_traits
>::create_ggc (37);
4139 if (!ipa_vr_hash_table
)
4140 ipa_vr_hash_table
= hash_table
<ipa_vr_ggc_hash_traits
>::create_ggc (37);
4141 if (ipcp_transformation_sum
== NULL
)
4142 ipcp_transformation_sum
= ipcp_transformation_t::create_ggc (symtab
);
4145 /* Release the IPA CP transformation summary. */
4148 ipcp_free_transformation_sum (void)
4150 if (!ipcp_transformation_sum
)
4153 ipcp_transformation_sum
->~function_summary
<ipcp_transformation
*> ();
4154 ggc_free (ipcp_transformation_sum
);
4155 ipcp_transformation_sum
= NULL
;
4158 /* Set the aggregate replacements of NODE to be AGGVALS. */
4161 ipa_set_node_agg_value_chain (struct cgraph_node
*node
,
4162 struct ipa_agg_replacement_value
*aggvals
)
4164 ipcp_transformation_initialize ();
4165 ipcp_transformation
*s
= ipcp_transformation_sum
->get_create (node
);
4166 s
->agg_values
= aggvals
;
4169 /* Hook that is called by cgraph.c when an edge is removed. Adjust reference
4170 count data structures accordingly. */
4173 ipa_edge_args_sum_t::remove (cgraph_edge
*cs
, ipa_edge_args
*args
)
4175 if (args
->jump_functions
)
4177 struct ipa_jump_func
*jf
;
4179 FOR_EACH_VEC_ELT (*args
->jump_functions
, i
, jf
)
4181 struct ipa_cst_ref_desc
*rdesc
;
4182 try_decrement_rdesc_refcount (jf
);
4183 if (jf
->type
== IPA_JF_CONST
4184 && (rdesc
= ipa_get_jf_constant_rdesc (jf
))
4191 /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
4192 reference count data strucutres accordingly. */
4195 ipa_edge_args_sum_t::duplicate (cgraph_edge
*src
, cgraph_edge
*dst
,
4196 ipa_edge_args
*old_args
, ipa_edge_args
*new_args
)
4200 new_args
->jump_functions
= vec_safe_copy (old_args
->jump_functions
);
4201 if (old_args
->polymorphic_call_contexts
)
4202 new_args
->polymorphic_call_contexts
4203 = vec_safe_copy (old_args
->polymorphic_call_contexts
);
4205 for (i
= 0; i
< vec_safe_length (old_args
->jump_functions
); i
++)
4207 struct ipa_jump_func
*src_jf
= ipa_get_ith_jump_func (old_args
, i
);
4208 struct ipa_jump_func
*dst_jf
= ipa_get_ith_jump_func (new_args
, i
);
4210 dst_jf
->agg
.items
= vec_safe_copy (dst_jf
->agg
.items
);
4212 if (src_jf
->type
== IPA_JF_CONST
)
4214 struct ipa_cst_ref_desc
*src_rdesc
= jfunc_rdesc_usable (src_jf
);
4217 dst_jf
->value
.constant
.rdesc
= NULL
;
4218 else if (src
->caller
== dst
->caller
)
4220 struct ipa_ref
*ref
;
4221 symtab_node
*n
= cgraph_node_for_jfunc (src_jf
);
4222 gcc_checking_assert (n
);
4223 ref
= src
->caller
->find_reference (n
, src
->call_stmt
,
4225 gcc_checking_assert (ref
);
4226 dst
->caller
->clone_reference (ref
, ref
->stmt
);
4228 struct ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
4229 dst_rdesc
->cs
= dst
;
4230 dst_rdesc
->refcount
= src_rdesc
->refcount
;
4231 dst_rdesc
->next_duplicate
= NULL
;
4232 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
4234 else if (src_rdesc
->cs
== src
)
4236 struct ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
4237 dst_rdesc
->cs
= dst
;
4238 dst_rdesc
->refcount
= src_rdesc
->refcount
;
4239 dst_rdesc
->next_duplicate
= src_rdesc
->next_duplicate
;
4240 src_rdesc
->next_duplicate
= dst_rdesc
;
4241 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
4245 struct ipa_cst_ref_desc
*dst_rdesc
;
4246 /* This can happen during inlining, when a JFUNC can refer to a
4247 reference taken in a function up in the tree of inline clones.
4248 We need to find the duplicate that refers to our tree of
4251 gcc_assert (dst
->caller
->inlined_to
);
4252 for (dst_rdesc
= src_rdesc
->next_duplicate
;
4254 dst_rdesc
= dst_rdesc
->next_duplicate
)
4256 struct cgraph_node
*top
;
4257 top
= dst_rdesc
->cs
->caller
->inlined_to
4258 ? dst_rdesc
->cs
->caller
->inlined_to
4259 : dst_rdesc
->cs
->caller
;
4260 if (dst
->caller
->inlined_to
== top
)
4263 gcc_assert (dst_rdesc
);
4264 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
4267 else if (dst_jf
->type
== IPA_JF_PASS_THROUGH
4268 && src
->caller
== dst
->caller
)
4270 struct cgraph_node
*inline_root
= dst
->caller
->inlined_to
4271 ? dst
->caller
->inlined_to
: dst
->caller
;
4272 class ipa_node_params
*root_info
= IPA_NODE_REF (inline_root
);
4273 int idx
= ipa_get_jf_pass_through_formal_id (dst_jf
);
4275 int c
= ipa_get_controlled_uses (root_info
, idx
);
4276 if (c
!= IPA_UNDESCRIBED_USE
)
4279 ipa_set_controlled_uses (root_info
, idx
, c
);
4285 /* Analyze newly added function into callgraph. */
4288 ipa_add_new_function (cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
4290 if (node
->has_gimple_body_p ())
4291 ipa_analyze_node (node
);
4294 /* Hook that is called by summary when a node is duplicated. */
4297 ipa_node_params_t::duplicate(cgraph_node
*src
, cgraph_node
*dst
,
4298 ipa_node_params
*old_info
,
4299 ipa_node_params
*new_info
)
4301 ipa_agg_replacement_value
*old_av
, *new_av
;
4303 new_info
->descriptors
= vec_safe_copy (old_info
->descriptors
);
4304 new_info
->lattices
= NULL
;
4305 new_info
->ipcp_orig_node
= old_info
->ipcp_orig_node
;
4306 new_info
->known_csts
= old_info
->known_csts
.copy ();
4307 new_info
->known_contexts
= old_info
->known_contexts
.copy ();
4309 new_info
->analysis_done
= old_info
->analysis_done
;
4310 new_info
->node_enqueued
= old_info
->node_enqueued
;
4311 new_info
->versionable
= old_info
->versionable
;
4313 old_av
= ipa_get_agg_replacements_for_node (src
);
4319 struct ipa_agg_replacement_value
*v
;
4321 v
= ggc_alloc
<ipa_agg_replacement_value
> ();
4322 memcpy (v
, old_av
, sizeof (*v
));
4325 old_av
= old_av
->next
;
4327 ipa_set_node_agg_value_chain (dst
, new_av
);
4331 /* Duplication of ipcp transformation summaries. */
4334 ipcp_transformation_t::duplicate(cgraph_node
*, cgraph_node
*dst
,
4335 ipcp_transformation
*src_trans
,
4336 ipcp_transformation
*dst_trans
)
4338 /* Avoid redundant work of duplicating vectors we will never use. */
4339 if (dst
->inlined_to
)
4341 dst_trans
->bits
= vec_safe_copy (src_trans
->bits
);
4342 dst_trans
->m_vr
= vec_safe_copy (src_trans
->m_vr
);
4343 ipa_agg_replacement_value
*agg
= src_trans
->agg_values
,
4344 **aggptr
= &dst_trans
->agg_values
;
4347 *aggptr
= ggc_alloc
<ipa_agg_replacement_value
> ();
4350 aggptr
= &(*aggptr
)->next
;
4354 /* Register our cgraph hooks if they are not already there. */
4357 ipa_register_cgraph_hooks (void)
4359 ipa_check_create_node_params ();
4360 ipa_check_create_edge_args ();
4362 function_insertion_hook_holder
=
4363 symtab
->add_cgraph_insertion_hook (&ipa_add_new_function
, NULL
);
4366 /* Unregister our cgraph hooks if they are not already there. */
4369 ipa_unregister_cgraph_hooks (void)
4371 symtab
->remove_cgraph_insertion_hook (function_insertion_hook_holder
);
4372 function_insertion_hook_holder
= NULL
;
4375 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4376 longer needed after ipa-cp. */
4379 ipa_free_all_structures_after_ipa_cp (void)
4381 if (!optimize
&& !in_lto_p
)
4383 ipa_free_all_edge_args ();
4384 ipa_free_all_node_params ();
4385 ipcp_sources_pool
.release ();
4386 ipcp_cst_values_pool
.release ();
4387 ipcp_poly_ctx_values_pool
.release ();
4388 ipcp_agg_lattice_pool
.release ();
4389 ipa_unregister_cgraph_hooks ();
4390 ipa_refdesc_pool
.release ();
4394 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4395 longer needed after indirect inlining. */
4398 ipa_free_all_structures_after_iinln (void)
4400 ipa_free_all_edge_args ();
4401 ipa_free_all_node_params ();
4402 ipa_unregister_cgraph_hooks ();
4403 ipcp_sources_pool
.release ();
4404 ipcp_cst_values_pool
.release ();
4405 ipcp_poly_ctx_values_pool
.release ();
4406 ipcp_agg_lattice_pool
.release ();
4407 ipa_refdesc_pool
.release ();
4410 /* Print ipa_tree_map data structures of all functions in the
4414 ipa_print_node_params (FILE *f
, struct cgraph_node
*node
)
4417 class ipa_node_params
*info
;
4419 if (!node
->definition
)
4421 info
= IPA_NODE_REF (node
);
4422 fprintf (f
, " function %s parameter descriptors:\n", node
->dump_name ());
4425 fprintf (f
, " no params return\n");
4428 count
= ipa_get_param_count (info
);
4429 for (i
= 0; i
< count
; i
++)
4434 ipa_dump_param (f
, info
, i
);
4435 if (ipa_is_param_used (info
, i
))
4436 fprintf (f
, " used");
4437 if (ipa_is_param_used_by_ipa_predicates (info
, i
))
4438 fprintf (f
, " used_by_ipa_predicates");
4439 if (ipa_is_param_used_by_indirect_call (info
, i
))
4440 fprintf (f
, " used_by_indirect_call");
4441 if (ipa_is_param_used_by_polymorphic_call (info
, i
))
4442 fprintf (f
, " used_by_polymorphic_call");
4443 c
= ipa_get_controlled_uses (info
, i
);
4444 if (c
== IPA_UNDESCRIBED_USE
)
4445 fprintf (f
, " undescribed_use");
4447 fprintf (f
, " controlled_uses=%i", c
);
4452 /* Print ipa_tree_map data structures of all functions in the
4456 ipa_print_all_params (FILE * f
)
4458 struct cgraph_node
*node
;
4460 fprintf (f
, "\nFunction parameters:\n");
4461 FOR_EACH_FUNCTION (node
)
4462 ipa_print_node_params (f
, node
);
4465 /* Dump the AV linked list. */
4468 ipa_dump_agg_replacement_values (FILE *f
, struct ipa_agg_replacement_value
*av
)
4471 fprintf (f
, " Aggregate replacements:");
4472 for (; av
; av
= av
->next
)
4474 fprintf (f
, "%s %i[" HOST_WIDE_INT_PRINT_DEC
"]=", comma
? "," : "",
4475 av
->index
, av
->offset
);
4476 print_generic_expr (f
, av
->value
);
4482 /* Stream out jump function JUMP_FUNC to OB. */
4485 ipa_write_jump_function (struct output_block
*ob
,
4486 struct ipa_jump_func
*jump_func
)
4488 struct ipa_agg_jf_item
*item
;
4489 struct bitpack_d bp
;
4493 /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4494 as well as WPA memory by handling them specially. */
4495 if (jump_func
->type
== IPA_JF_CONST
4496 && TREE_CODE (jump_func
->value
.constant
.value
) == ADDR_EXPR
)
4499 streamer_write_uhwi (ob
, jump_func
->type
* 2 + flag
);
4500 switch (jump_func
->type
)
4502 case IPA_JF_UNKNOWN
:
4506 EXPR_LOCATION (jump_func
->value
.constant
.value
) == UNKNOWN_LOCATION
);
4507 stream_write_tree (ob
,
4509 ? TREE_OPERAND (jump_func
->value
.constant
.value
, 0)
4510 : jump_func
->value
.constant
.value
, true);
4512 case IPA_JF_PASS_THROUGH
:
4513 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.operation
);
4514 if (jump_func
->value
.pass_through
.operation
== NOP_EXPR
)
4516 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4517 bp
= bitpack_create (ob
->main_stream
);
4518 bp_pack_value (&bp
, jump_func
->value
.pass_through
.agg_preserved
, 1);
4519 streamer_write_bitpack (&bp
);
4521 else if (TREE_CODE_CLASS (jump_func
->value
.pass_through
.operation
)
4523 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4526 stream_write_tree (ob
, jump_func
->value
.pass_through
.operand
, true);
4527 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4530 case IPA_JF_ANCESTOR
:
4531 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.offset
);
4532 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.formal_id
);
4533 bp
= bitpack_create (ob
->main_stream
);
4534 bp_pack_value (&bp
, jump_func
->value
.ancestor
.agg_preserved
, 1);
4535 streamer_write_bitpack (&bp
);
4538 fatal_error (UNKNOWN_LOCATION
, "invalid jump function in LTO stream");
4541 count
= vec_safe_length (jump_func
->agg
.items
);
4542 streamer_write_uhwi (ob
, count
);
4545 bp
= bitpack_create (ob
->main_stream
);
4546 bp_pack_value (&bp
, jump_func
->agg
.by_ref
, 1);
4547 streamer_write_bitpack (&bp
);
4550 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, i
, item
)
4552 stream_write_tree (ob
, item
->type
, true);
4553 streamer_write_uhwi (ob
, item
->offset
);
4554 streamer_write_uhwi (ob
, item
->jftype
);
4555 switch (item
->jftype
)
4557 case IPA_JF_UNKNOWN
:
4560 stream_write_tree (ob
, item
->value
.constant
, true);
4562 case IPA_JF_PASS_THROUGH
:
4563 case IPA_JF_LOAD_AGG
:
4564 streamer_write_uhwi (ob
, item
->value
.pass_through
.operation
);
4565 streamer_write_uhwi (ob
, item
->value
.pass_through
.formal_id
);
4566 if (TREE_CODE_CLASS (item
->value
.pass_through
.operation
)
4568 stream_write_tree (ob
, item
->value
.pass_through
.operand
, true);
4569 if (item
->jftype
== IPA_JF_LOAD_AGG
)
4571 stream_write_tree (ob
, item
->value
.load_agg
.type
, true);
4572 streamer_write_uhwi (ob
, item
->value
.load_agg
.offset
);
4573 bp
= bitpack_create (ob
->main_stream
);
4574 bp_pack_value (&bp
, item
->value
.load_agg
.by_ref
, 1);
4575 streamer_write_bitpack (&bp
);
4579 fatal_error (UNKNOWN_LOCATION
,
4580 "invalid jump function in LTO stream");
4584 bp
= bitpack_create (ob
->main_stream
);
4585 bp_pack_value (&bp
, !!jump_func
->bits
, 1);
4586 streamer_write_bitpack (&bp
);
4587 if (jump_func
->bits
)
4589 streamer_write_widest_int (ob
, jump_func
->bits
->value
);
4590 streamer_write_widest_int (ob
, jump_func
->bits
->mask
);
4592 bp_pack_value (&bp
, !!jump_func
->m_vr
, 1);
4593 streamer_write_bitpack (&bp
);
4594 if (jump_func
->m_vr
)
4596 streamer_write_enum (ob
->main_stream
, value_rang_type
,
4597 VR_LAST
, jump_func
->m_vr
->kind ());
4598 stream_write_tree (ob
, jump_func
->m_vr
->min (), true);
4599 stream_write_tree (ob
, jump_func
->m_vr
->max (), true);
4603 /* Read in jump function JUMP_FUNC from IB. */
4606 ipa_read_jump_function (class lto_input_block
*ib
,
4607 struct ipa_jump_func
*jump_func
,
4608 struct cgraph_edge
*cs
,
4609 class data_in
*data_in
,
4612 enum jump_func_type jftype
;
4613 enum tree_code operation
;
4615 int val
= streamer_read_uhwi (ib
);
4616 bool flag
= val
& 1;
4618 jftype
= (enum jump_func_type
) (val
/ 2);
4621 case IPA_JF_UNKNOWN
:
4622 ipa_set_jf_unknown (jump_func
);
4626 tree t
= stream_read_tree (ib
, data_in
);
4627 if (flag
&& prevails
)
4628 t
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (t
)), t
);
4629 ipa_set_jf_constant (jump_func
, t
, cs
);
4632 case IPA_JF_PASS_THROUGH
:
4633 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
4634 if (operation
== NOP_EXPR
)
4636 int formal_id
= streamer_read_uhwi (ib
);
4637 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4638 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4639 ipa_set_jf_simple_pass_through (jump_func
, formal_id
, agg_preserved
);
4641 else if (TREE_CODE_CLASS (operation
) == tcc_unary
)
4643 int formal_id
= streamer_read_uhwi (ib
);
4644 ipa_set_jf_unary_pass_through (jump_func
, formal_id
, operation
);
4648 tree operand
= stream_read_tree (ib
, data_in
);
4649 int formal_id
= streamer_read_uhwi (ib
);
4650 ipa_set_jf_arith_pass_through (jump_func
, formal_id
, operand
,
4654 case IPA_JF_ANCESTOR
:
4656 HOST_WIDE_INT offset
= streamer_read_uhwi (ib
);
4657 int formal_id
= streamer_read_uhwi (ib
);
4658 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4659 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4660 ipa_set_ancestor_jf (jump_func
, offset
, formal_id
, agg_preserved
);
4664 fatal_error (UNKNOWN_LOCATION
, "invalid jump function in LTO stream");
4667 count
= streamer_read_uhwi (ib
);
4669 vec_alloc (jump_func
->agg
.items
, count
);
4672 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4673 jump_func
->agg
.by_ref
= bp_unpack_value (&bp
, 1);
4675 for (i
= 0; i
< count
; i
++)
4677 struct ipa_agg_jf_item item
;
4678 item
.type
= stream_read_tree (ib
, data_in
);
4679 item
.offset
= streamer_read_uhwi (ib
);
4680 item
.jftype
= (enum jump_func_type
) streamer_read_uhwi (ib
);
4682 switch (item
.jftype
)
4684 case IPA_JF_UNKNOWN
:
4687 item
.value
.constant
= stream_read_tree (ib
, data_in
);
4689 case IPA_JF_PASS_THROUGH
:
4690 case IPA_JF_LOAD_AGG
:
4691 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
4692 item
.value
.pass_through
.operation
= operation
;
4693 item
.value
.pass_through
.formal_id
= streamer_read_uhwi (ib
);
4694 if (TREE_CODE_CLASS (operation
) == tcc_unary
)
4695 item
.value
.pass_through
.operand
= NULL_TREE
;
4697 item
.value
.pass_through
.operand
= stream_read_tree (ib
, data_in
);
4698 if (item
.jftype
== IPA_JF_LOAD_AGG
)
4700 struct bitpack_d bp
;
4701 item
.value
.load_agg
.type
= stream_read_tree (ib
, data_in
);
4702 item
.value
.load_agg
.offset
= streamer_read_uhwi (ib
);
4703 bp
= streamer_read_bitpack (ib
);
4704 item
.value
.load_agg
.by_ref
= bp_unpack_value (&bp
, 1);
4708 fatal_error (UNKNOWN_LOCATION
,
4709 "invalid jump function in LTO stream");
4712 jump_func
->agg
.items
->quick_push (item
);
4715 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4716 bool bits_known
= bp_unpack_value (&bp
, 1);
4719 widest_int value
= streamer_read_widest_int (ib
);
4720 widest_int mask
= streamer_read_widest_int (ib
);
4722 ipa_set_jfunc_bits (jump_func
, value
, mask
);
4725 jump_func
->bits
= NULL
;
4727 struct bitpack_d vr_bp
= streamer_read_bitpack (ib
);
4728 bool vr_known
= bp_unpack_value (&vr_bp
, 1);
4731 enum value_range_kind type
= streamer_read_enum (ib
, value_range_kind
,
4733 tree min
= stream_read_tree (ib
, data_in
);
4734 tree max
= stream_read_tree (ib
, data_in
);
4736 ipa_set_jfunc_vr (jump_func
, type
, min
, max
);
4739 jump_func
->m_vr
= NULL
;
4742 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4743 relevant to indirect inlining to OB. */
4746 ipa_write_indirect_edge_info (struct output_block
*ob
,
4747 struct cgraph_edge
*cs
)
4749 class cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4750 struct bitpack_d bp
;
4752 streamer_write_hwi (ob
, ii
->param_index
);
4753 bp
= bitpack_create (ob
->main_stream
);
4754 bp_pack_value (&bp
, ii
->polymorphic
, 1);
4755 bp_pack_value (&bp
, ii
->agg_contents
, 1);
4756 bp_pack_value (&bp
, ii
->member_ptr
, 1);
4757 bp_pack_value (&bp
, ii
->by_ref
, 1);
4758 bp_pack_value (&bp
, ii
->guaranteed_unmodified
, 1);
4759 bp_pack_value (&bp
, ii
->vptr_changed
, 1);
4760 streamer_write_bitpack (&bp
);
4761 if (ii
->agg_contents
|| ii
->polymorphic
)
4762 streamer_write_hwi (ob
, ii
->offset
);
4764 gcc_assert (ii
->offset
== 0);
4766 if (ii
->polymorphic
)
4768 streamer_write_hwi (ob
, ii
->otr_token
);
4769 stream_write_tree (ob
, ii
->otr_type
, true);
4770 ii
->context
.stream_out (ob
);
4774 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4775 relevant to indirect inlining from IB. */
4778 ipa_read_indirect_edge_info (class lto_input_block
*ib
,
4779 class data_in
*data_in
,
4780 struct cgraph_edge
*cs
,
4781 class ipa_node_params
*info
)
4783 class cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4784 struct bitpack_d bp
;
4786 ii
->param_index
= (int) streamer_read_hwi (ib
);
4787 bp
= streamer_read_bitpack (ib
);
4788 ii
->polymorphic
= bp_unpack_value (&bp
, 1);
4789 ii
->agg_contents
= bp_unpack_value (&bp
, 1);
4790 ii
->member_ptr
= bp_unpack_value (&bp
, 1);
4791 ii
->by_ref
= bp_unpack_value (&bp
, 1);
4792 ii
->guaranteed_unmodified
= bp_unpack_value (&bp
, 1);
4793 ii
->vptr_changed
= bp_unpack_value (&bp
, 1);
4794 if (ii
->agg_contents
|| ii
->polymorphic
)
4795 ii
->offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4798 if (ii
->polymorphic
)
4800 ii
->otr_token
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4801 ii
->otr_type
= stream_read_tree (ib
, data_in
);
4802 ii
->context
.stream_in (ib
, data_in
);
4804 if (info
&& ii
->param_index
>= 0)
4806 if (ii
->polymorphic
)
4807 ipa_set_param_used_by_polymorphic_call (info
,
4808 ii
->param_index
, true);
4809 ipa_set_param_used_by_indirect_call (info
,
4810 ii
->param_index
, true);
4814 /* Stream out NODE info to OB. */
4817 ipa_write_node_info (struct output_block
*ob
, struct cgraph_node
*node
)
4820 lto_symtab_encoder_t encoder
;
4821 class ipa_node_params
*info
= IPA_NODE_REF (node
);
4823 struct cgraph_edge
*e
;
4824 struct bitpack_d bp
;
4826 encoder
= ob
->decl_state
->symtab_node_encoder
;
4827 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4828 streamer_write_uhwi (ob
, node_ref
);
4830 streamer_write_uhwi (ob
, ipa_get_param_count (info
));
4831 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4832 streamer_write_uhwi (ob
, ipa_get_param_move_cost (info
, j
));
4833 bp
= bitpack_create (ob
->main_stream
);
4834 gcc_assert (info
->analysis_done
4835 || ipa_get_param_count (info
) == 0);
4836 gcc_assert (!info
->node_enqueued
);
4837 gcc_assert (!info
->ipcp_orig_node
);
4838 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4839 bp_pack_value (&bp
, ipa_is_param_used (info
, j
), 1);
4840 streamer_write_bitpack (&bp
);
4841 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4843 streamer_write_hwi (ob
, ipa_get_controlled_uses (info
, j
));
4844 stream_write_tree (ob
, ipa_get_type (info
, j
), true);
4846 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4848 class ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4852 streamer_write_uhwi (ob
, 0);
4856 streamer_write_uhwi (ob
,
4857 ipa_get_cs_argument_count (args
) * 2
4858 + (args
->polymorphic_call_contexts
!= NULL
));
4859 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4861 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4862 if (args
->polymorphic_call_contexts
!= NULL
)
4863 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
4866 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4868 class ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4870 streamer_write_uhwi (ob
, 0);
4873 streamer_write_uhwi (ob
,
4874 ipa_get_cs_argument_count (args
) * 2
4875 + (args
->polymorphic_call_contexts
!= NULL
));
4876 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4878 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4879 if (args
->polymorphic_call_contexts
!= NULL
)
4880 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
4883 ipa_write_indirect_edge_info (ob
, e
);
4887 /* Stream in edge E from IB. */
4890 ipa_read_edge_info (class lto_input_block
*ib
,
4891 class data_in
*data_in
,
4892 struct cgraph_edge
*e
, bool prevails
)
4894 int count
= streamer_read_uhwi (ib
);
4895 bool contexts_computed
= count
& 1;
4900 if (prevails
&& e
->possibly_call_in_translation_unit_p ())
4902 class ipa_edge_args
*args
= IPA_EDGE_REF_GET_CREATE (e
);
4903 vec_safe_grow_cleared (args
->jump_functions
, count
, true);
4904 if (contexts_computed
)
4905 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
, true);
4906 for (int k
= 0; k
< count
; k
++)
4908 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4910 if (contexts_computed
)
4911 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in
4917 for (int k
= 0; k
< count
; k
++)
4919 struct ipa_jump_func dummy
;
4920 ipa_read_jump_function (ib
, &dummy
, e
,
4922 if (contexts_computed
)
4924 class ipa_polymorphic_call_context ctx
;
4925 ctx
.stream_in (ib
, data_in
);
4931 /* Stream in NODE info from IB. */
4934 ipa_read_node_info (class lto_input_block
*ib
, struct cgraph_node
*node
,
4935 class data_in
*data_in
)
4938 struct cgraph_edge
*e
;
4939 struct bitpack_d bp
;
4940 bool prevails
= node
->prevailing_p ();
4941 class ipa_node_params
*info
= prevails
4942 ? IPA_NODE_REF_GET_CREATE (node
) : NULL
;
4944 int param_count
= streamer_read_uhwi (ib
);
4947 ipa_alloc_node_params (node
, param_count
);
4948 for (k
= 0; k
< param_count
; k
++)
4949 (*info
->descriptors
)[k
].move_cost
= streamer_read_uhwi (ib
);
4950 if (ipa_get_param_count (info
) != 0)
4951 info
->analysis_done
= true;
4952 info
->node_enqueued
= false;
4955 for (k
= 0; k
< param_count
; k
++)
4956 streamer_read_uhwi (ib
);
4958 bp
= streamer_read_bitpack (ib
);
4959 for (k
= 0; k
< param_count
; k
++)
4961 bool used
= bp_unpack_value (&bp
, 1);
4964 ipa_set_param_used (info
, k
, used
);
4966 for (k
= 0; k
< param_count
; k
++)
4968 int nuses
= streamer_read_hwi (ib
);
4969 tree type
= stream_read_tree (ib
, data_in
);
4973 ipa_set_controlled_uses (info
, k
, nuses
);
4974 (*info
->descriptors
)[k
].decl_or_type
= type
;
4977 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4978 ipa_read_edge_info (ib
, data_in
, e
, prevails
);
4979 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4981 ipa_read_edge_info (ib
, data_in
, e
, prevails
);
4982 ipa_read_indirect_edge_info (ib
, data_in
, e
, info
);
4986 /* Write jump functions for nodes in SET. */
4989 ipa_prop_write_jump_functions (void)
4991 struct cgraph_node
*node
;
4992 struct output_block
*ob
;
4993 unsigned int count
= 0;
4994 lto_symtab_encoder_iterator lsei
;
4995 lto_symtab_encoder_t encoder
;
4997 if (!ipa_node_params_sum
|| !ipa_edge_args_sum
)
5000 ob
= create_output_block (LTO_section_jump_functions
);
5001 encoder
= ob
->decl_state
->symtab_node_encoder
;
5003 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5004 lsei_next_function_in_partition (&lsei
))
5006 node
= lsei_cgraph_node (lsei
);
5007 if (node
->has_gimple_body_p ()
5008 && IPA_NODE_REF (node
) != NULL
)
5012 streamer_write_uhwi (ob
, count
);
5014 /* Process all of the functions. */
5015 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5016 lsei_next_function_in_partition (&lsei
))
5018 node
= lsei_cgraph_node (lsei
);
5019 if (node
->has_gimple_body_p ()
5020 && IPA_NODE_REF (node
) != NULL
)
5021 ipa_write_node_info (ob
, node
);
5023 streamer_write_char_stream (ob
->main_stream
, 0);
5024 produce_asm (ob
, NULL
);
5025 destroy_output_block (ob
);
5028 /* Read section in file FILE_DATA of length LEN with data DATA. */
5031 ipa_prop_read_section (struct lto_file_decl_data
*file_data
, const char *data
,
5034 const struct lto_function_header
*header
=
5035 (const struct lto_function_header
*) data
;
5036 const int cfg_offset
= sizeof (struct lto_function_header
);
5037 const int main_offset
= cfg_offset
+ header
->cfg_size
;
5038 const int string_offset
= main_offset
+ header
->main_size
;
5039 class data_in
*data_in
;
5043 lto_input_block
ib_main ((const char *) data
+ main_offset
,
5044 header
->main_size
, file_data
->mode_table
);
5047 lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
5048 header
->string_size
, vNULL
);
5049 count
= streamer_read_uhwi (&ib_main
);
5051 for (i
= 0; i
< count
; i
++)
5054 struct cgraph_node
*node
;
5055 lto_symtab_encoder_t encoder
;
5057 index
= streamer_read_uhwi (&ib_main
);
5058 encoder
= file_data
->symtab_node_encoder
;
5059 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
5061 gcc_assert (node
->definition
);
5062 ipa_read_node_info (&ib_main
, node
, data_in
);
5064 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
5066 lto_data_in_delete (data_in
);
5069 /* Read ipcp jump functions. */
5072 ipa_prop_read_jump_functions (void)
5074 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
5075 struct lto_file_decl_data
*file_data
;
5078 ipa_check_create_node_params ();
5079 ipa_check_create_edge_args ();
5080 ipa_register_cgraph_hooks ();
5082 while ((file_data
= file_data_vec
[j
++]))
5086 = lto_get_summary_section_data (file_data
, LTO_section_jump_functions
,
5089 ipa_prop_read_section (file_data
, data
, len
);
5094 write_ipcp_transformation_info (output_block
*ob
, cgraph_node
*node
)
5097 unsigned int count
= 0;
5098 lto_symtab_encoder_t encoder
;
5099 struct ipa_agg_replacement_value
*aggvals
, *av
;
5101 aggvals
= ipa_get_agg_replacements_for_node (node
);
5102 encoder
= ob
->decl_state
->symtab_node_encoder
;
5103 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
5104 streamer_write_uhwi (ob
, node_ref
);
5106 for (av
= aggvals
; av
; av
= av
->next
)
5108 streamer_write_uhwi (ob
, count
);
5110 for (av
= aggvals
; av
; av
= av
->next
)
5112 struct bitpack_d bp
;
5114 streamer_write_uhwi (ob
, av
->offset
);
5115 streamer_write_uhwi (ob
, av
->index
);
5116 stream_write_tree (ob
, av
->value
, true);
5118 bp
= bitpack_create (ob
->main_stream
);
5119 bp_pack_value (&bp
, av
->by_ref
, 1);
5120 streamer_write_bitpack (&bp
);
5123 ipcp_transformation
*ts
= ipcp_get_transformation_summary (node
);
5124 if (ts
&& vec_safe_length (ts
->m_vr
) > 0)
5126 count
= ts
->m_vr
->length ();
5127 streamer_write_uhwi (ob
, count
);
5128 for (unsigned i
= 0; i
< count
; ++i
)
5130 struct bitpack_d bp
;
5131 ipa_vr
*parm_vr
= &(*ts
->m_vr
)[i
];
5132 bp
= bitpack_create (ob
->main_stream
);
5133 bp_pack_value (&bp
, parm_vr
->known
, 1);
5134 streamer_write_bitpack (&bp
);
5137 streamer_write_enum (ob
->main_stream
, value_rang_type
,
5138 VR_LAST
, parm_vr
->type
);
5139 streamer_write_wide_int (ob
, parm_vr
->min
);
5140 streamer_write_wide_int (ob
, parm_vr
->max
);
5145 streamer_write_uhwi (ob
, 0);
5147 if (ts
&& vec_safe_length (ts
->bits
) > 0)
5149 count
= ts
->bits
->length ();
5150 streamer_write_uhwi (ob
, count
);
5152 for (unsigned i
= 0; i
< count
; ++i
)
5154 const ipa_bits
*bits_jfunc
= (*ts
->bits
)[i
];
5155 struct bitpack_d bp
= bitpack_create (ob
->main_stream
);
5156 bp_pack_value (&bp
, !!bits_jfunc
, 1);
5157 streamer_write_bitpack (&bp
);
5160 streamer_write_widest_int (ob
, bits_jfunc
->value
);
5161 streamer_write_widest_int (ob
, bits_jfunc
->mask
);
5166 streamer_write_uhwi (ob
, 0);
5169 /* Stream in the aggregate value replacement chain for NODE from IB. */
5172 read_ipcp_transformation_info (lto_input_block
*ib
, cgraph_node
*node
,
5175 struct ipa_agg_replacement_value
*aggvals
= NULL
;
5176 unsigned int count
, i
;
5178 count
= streamer_read_uhwi (ib
);
5179 for (i
= 0; i
<count
; i
++)
5181 struct ipa_agg_replacement_value
*av
;
5182 struct bitpack_d bp
;
5184 av
= ggc_alloc
<ipa_agg_replacement_value
> ();
5185 av
->offset
= streamer_read_uhwi (ib
);
5186 av
->index
= streamer_read_uhwi (ib
);
5187 av
->value
= stream_read_tree (ib
, data_in
);
5188 bp
= streamer_read_bitpack (ib
);
5189 av
->by_ref
= bp_unpack_value (&bp
, 1);
5193 ipa_set_node_agg_value_chain (node
, aggvals
);
5195 count
= streamer_read_uhwi (ib
);
5198 ipcp_transformation_initialize ();
5199 ipcp_transformation
*ts
= ipcp_transformation_sum
->get_create (node
);
5200 vec_safe_grow_cleared (ts
->m_vr
, count
, true);
5201 for (i
= 0; i
< count
; i
++)
5204 parm_vr
= &(*ts
->m_vr
)[i
];
5205 struct bitpack_d bp
;
5206 bp
= streamer_read_bitpack (ib
);
5207 parm_vr
->known
= bp_unpack_value (&bp
, 1);
5210 parm_vr
->type
= streamer_read_enum (ib
, value_range_kind
,
5212 parm_vr
->min
= streamer_read_wide_int (ib
);
5213 parm_vr
->max
= streamer_read_wide_int (ib
);
5217 count
= streamer_read_uhwi (ib
);
5220 ipcp_transformation_initialize ();
5221 ipcp_transformation
*ts
= ipcp_transformation_sum
->get_create (node
);
5222 vec_safe_grow_cleared (ts
->bits
, count
, true);
5224 for (i
= 0; i
< count
; i
++)
5226 struct bitpack_d bp
= streamer_read_bitpack (ib
);
5227 bool known
= bp_unpack_value (&bp
, 1);
5230 const widest_int value
= streamer_read_widest_int (ib
);
5231 const widest_int mask
= streamer_read_widest_int (ib
);
5233 = ipa_get_ipa_bits_for_value (value
, mask
);
5234 (*ts
->bits
)[i
] = bits
;
5240 /* Write all aggregate replacement for nodes in set. */
5243 ipcp_write_transformation_summaries (void)
5245 struct cgraph_node
*node
;
5246 struct output_block
*ob
;
5247 unsigned int count
= 0;
5248 lto_symtab_encoder_iterator lsei
;
5249 lto_symtab_encoder_t encoder
;
5251 ob
= create_output_block (LTO_section_ipcp_transform
);
5252 encoder
= ob
->decl_state
->symtab_node_encoder
;
5254 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5255 lsei_next_function_in_partition (&lsei
))
5257 node
= lsei_cgraph_node (lsei
);
5258 if (node
->has_gimple_body_p ())
5262 streamer_write_uhwi (ob
, count
);
5264 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5265 lsei_next_function_in_partition (&lsei
))
5267 node
= lsei_cgraph_node (lsei
);
5268 if (node
->has_gimple_body_p ())
5269 write_ipcp_transformation_info (ob
, node
);
5271 streamer_write_char_stream (ob
->main_stream
, 0);
5272 produce_asm (ob
, NULL
);
5273 destroy_output_block (ob
);
5276 /* Read replacements section in file FILE_DATA of length LEN with data
5280 read_replacements_section (struct lto_file_decl_data
*file_data
,
5284 const struct lto_function_header
*header
=
5285 (const struct lto_function_header
*) data
;
5286 const int cfg_offset
= sizeof (struct lto_function_header
);
5287 const int main_offset
= cfg_offset
+ header
->cfg_size
;
5288 const int string_offset
= main_offset
+ header
->main_size
;
5289 class data_in
*data_in
;
5293 lto_input_block
ib_main ((const char *) data
+ main_offset
,
5294 header
->main_size
, file_data
->mode_table
);
5296 data_in
= lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
5297 header
->string_size
, vNULL
);
5298 count
= streamer_read_uhwi (&ib_main
);
5300 for (i
= 0; i
< count
; i
++)
5303 struct cgraph_node
*node
;
5304 lto_symtab_encoder_t encoder
;
5306 index
= streamer_read_uhwi (&ib_main
);
5307 encoder
= file_data
->symtab_node_encoder
;
5308 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
5310 gcc_assert (node
->definition
);
5311 read_ipcp_transformation_info (&ib_main
, node
, data_in
);
5313 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
5315 lto_data_in_delete (data_in
);
5318 /* Read IPA-CP aggregate replacements. */
5321 ipcp_read_transformation_summaries (void)
5323 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
5324 struct lto_file_decl_data
*file_data
;
5327 while ((file_data
= file_data_vec
[j
++]))
5331 = lto_get_summary_section_data (file_data
, LTO_section_ipcp_transform
,
5334 read_replacements_section (file_data
, data
, len
);
5338 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5342 adjust_agg_replacement_values (struct cgraph_node
*node
,
5343 struct ipa_agg_replacement_value
*aggval
)
5345 struct ipa_agg_replacement_value
*v
;
5347 if (!node
->clone
.param_adjustments
)
5350 auto_vec
<int, 16> new_indices
;
5351 node
->clone
.param_adjustments
->get_updated_indices (&new_indices
);
5352 for (v
= aggval
; v
; v
= v
->next
)
5354 gcc_checking_assert (v
->index
>= 0);
5356 if ((unsigned) v
->index
< new_indices
.length ())
5357 v
->index
= new_indices
[v
->index
];
5359 /* This can happen if we know about a constant passed by reference by
5360 an argument which is never actually used for anything, let alone
5361 loading that constant. */
5366 /* Dominator walker driving the ipcp modification phase. */
5368 class ipcp_modif_dom_walker
: public dom_walker
5371 ipcp_modif_dom_walker (struct ipa_func_body_info
*fbi
,
5372 vec
<ipa_param_descriptor
, va_gc
> *descs
,
5373 struct ipa_agg_replacement_value
*av
,
5375 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
), m_descriptors (descs
),
5376 m_aggval (av
), m_something_changed (sc
), m_cfg_changed (cc
) {}
5378 virtual edge
before_dom_children (basic_block
);
5381 struct ipa_func_body_info
*m_fbi
;
5382 vec
<ipa_param_descriptor
, va_gc
> *m_descriptors
;
5383 struct ipa_agg_replacement_value
*m_aggval
;
5384 bool *m_something_changed
, *m_cfg_changed
;
5388 ipcp_modif_dom_walker::before_dom_children (basic_block bb
)
5390 gimple_stmt_iterator gsi
;
5391 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5393 struct ipa_agg_replacement_value
*v
;
5394 gimple
*stmt
= gsi_stmt (gsi
);
5396 HOST_WIDE_INT offset
;
5401 if (!gimple_assign_load_p (stmt
))
5403 rhs
= gimple_assign_rhs1 (stmt
);
5404 if (!is_gimple_reg_type (TREE_TYPE (rhs
)))
5409 while (handled_component_p (t
))
5411 /* V_C_E can do things like convert an array of integers to one
5412 bigger integer and similar things we do not handle below. */
5413 if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
5418 t
= TREE_OPERAND (t
, 0);
5423 if (!ipa_load_from_parm_agg (m_fbi
, m_descriptors
, stmt
, rhs
, &index
,
5424 &offset
, &size
, &by_ref
))
5426 for (v
= m_aggval
; v
; v
= v
->next
)
5427 if (v
->index
== index
5428 && v
->offset
== offset
)
5431 || v
->by_ref
!= by_ref
5432 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v
->value
))),
5436 gcc_checking_assert (is_gimple_ip_invariant (v
->value
));
5437 if (!useless_type_conversion_p (TREE_TYPE (rhs
), TREE_TYPE (v
->value
)))
5439 if (fold_convertible_p (TREE_TYPE (rhs
), v
->value
))
5440 val
= fold_build1 (NOP_EXPR
, TREE_TYPE (rhs
), v
->value
);
5441 else if (TYPE_SIZE (TREE_TYPE (rhs
))
5442 == TYPE_SIZE (TREE_TYPE (v
->value
)))
5443 val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (rhs
), v
->value
);
5448 fprintf (dump_file
, " const ");
5449 print_generic_expr (dump_file
, v
->value
);
5450 fprintf (dump_file
, " can't be converted to type of ");
5451 print_generic_expr (dump_file
, rhs
);
5452 fprintf (dump_file
, "\n");
5460 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5462 fprintf (dump_file
, "Modifying stmt:\n ");
5463 print_gimple_stmt (dump_file
, stmt
, 0);
5465 gimple_assign_set_rhs_from_tree (&gsi
, val
);
5468 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5470 fprintf (dump_file
, "into:\n ");
5471 print_gimple_stmt (dump_file
, stmt
, 0);
5472 fprintf (dump_file
, "\n");
5475 *m_something_changed
= true;
5476 if (maybe_clean_eh_stmt (stmt
)
5477 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
5478 *m_cfg_changed
= true;
5483 /* Return true if we have recorded VALUE and MASK about PARM.
5484 Set VALUE and MASk accordingly. */
5487 ipcp_get_parm_bits (tree parm
, tree
*value
, widest_int
*mask
)
5489 cgraph_node
*cnode
= cgraph_node::get (current_function_decl
);
5490 ipcp_transformation
*ts
= ipcp_get_transformation_summary (cnode
);
5491 if (!ts
|| vec_safe_length (ts
->bits
) == 0)
5495 for (tree p
= DECL_ARGUMENTS (current_function_decl
);
5496 p
!= parm
; p
= DECL_CHAIN (p
))
5499 /* Ignore static chain. */
5504 if (cnode
->clone
.param_adjustments
)
5506 i
= cnode
->clone
.param_adjustments
->get_original_index (i
);
5511 vec
<ipa_bits
*, va_gc
> &bits
= *ts
->bits
;
5514 *mask
= bits
[i
]->mask
;
5515 *value
= wide_int_to_tree (TREE_TYPE (parm
), bits
[i
]->value
);
5520 /* Update bits info of formal parameters as described in
5521 ipcp_transformation. */
5524 ipcp_update_bits (struct cgraph_node
*node
)
5526 ipcp_transformation
*ts
= ipcp_get_transformation_summary (node
);
5528 if (!ts
|| vec_safe_length (ts
->bits
) == 0)
5530 vec
<ipa_bits
*, va_gc
> &bits
= *ts
->bits
;
5531 unsigned count
= bits
.length ();
5535 auto_vec
<int, 16> new_indices
;
5536 bool need_remapping
= false;
5537 if (node
->clone
.param_adjustments
)
5539 node
->clone
.param_adjustments
->get_updated_indices (&new_indices
);
5540 need_remapping
= true;
5542 auto_vec
<tree
, 16> parm_decls
;
5543 push_function_arg_decls (&parm_decls
, node
->decl
);
5545 for (unsigned i
= 0; i
< count
; ++i
)
5550 if (i
>= new_indices
.length ())
5552 int idx
= new_indices
[i
];
5555 parm
= parm_decls
[idx
];
5558 parm
= parm_decls
[i
];
5559 gcc_checking_assert (parm
);
5563 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm
))
5564 || POINTER_TYPE_P (TREE_TYPE (parm
)))
5565 || !is_gimple_reg (parm
))
5568 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
), parm
);
5574 fprintf (dump_file
, "Adjusting mask for param %u to ", i
);
5575 print_hex (bits
[i
]->mask
, dump_file
);
5576 fprintf (dump_file
, "\n");
5579 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef
)))
5581 unsigned prec
= TYPE_PRECISION (TREE_TYPE (ddef
));
5582 signop sgn
= TYPE_SIGN (TREE_TYPE (ddef
));
5584 wide_int nonzero_bits
= wide_int::from (bits
[i
]->mask
, prec
, UNSIGNED
)
5585 | wide_int::from (bits
[i
]->value
, prec
, sgn
);
5586 set_nonzero_bits (ddef
, nonzero_bits
);
5590 unsigned tem
= bits
[i
]->mask
.to_uhwi ();
5591 unsigned HOST_WIDE_INT bitpos
= bits
[i
]->value
.to_uhwi ();
5592 unsigned align
= tem
& -tem
;
5593 unsigned misalign
= bitpos
& (align
- 1);
5598 fprintf (dump_file
, "Adjusting align: %u, misalign: %u\n", align
, misalign
);
5600 unsigned old_align
, old_misalign
;
5601 struct ptr_info_def
*pi
= get_ptr_info (ddef
);
5602 bool old_known
= get_ptr_info_alignment (pi
, &old_align
, &old_misalign
);
5605 && old_align
> align
)
5609 fprintf (dump_file
, "But alignment was already %u.\n", old_align
);
5610 if ((old_misalign
& (align
- 1)) != misalign
)
5611 fprintf (dump_file
, "old_misalign (%u) and misalign (%u) mismatch\n",
5612 old_misalign
, misalign
);
5618 && ((misalign
& (old_align
- 1)) != old_misalign
)
5620 fprintf (dump_file
, "old_misalign (%u) and misalign (%u) mismatch\n",
5621 old_misalign
, misalign
);
5623 set_ptr_info_alignment (pi
, align
, misalign
);
5630 ipa_vr::nonzero_p (tree expr_type
) const
5632 if (type
== VR_ANTI_RANGE
&& wi::eq_p (min
, 0) && wi::eq_p (max
, 0))
5635 unsigned prec
= TYPE_PRECISION (expr_type
);
5636 return (type
== VR_RANGE
5637 && TYPE_UNSIGNED (expr_type
)
5638 && wi::eq_p (min
, wi::one (prec
))
5639 && wi::eq_p (max
, wi::max_value (prec
, TYPE_SIGN (expr_type
))));
5642 /* Update value range of formal parameters as described in
5643 ipcp_transformation. */
5646 ipcp_update_vr (struct cgraph_node
*node
)
5648 ipcp_transformation
*ts
= ipcp_get_transformation_summary (node
);
5649 if (!ts
|| vec_safe_length (ts
->m_vr
) == 0)
5651 const vec
<ipa_vr
, va_gc
> &vr
= *ts
->m_vr
;
5652 unsigned count
= vr
.length ();
5656 auto_vec
<int, 16> new_indices
;
5657 bool need_remapping
= false;
5658 if (node
->clone
.param_adjustments
)
5660 node
->clone
.param_adjustments
->get_updated_indices (&new_indices
);
5661 need_remapping
= true;
5663 auto_vec
<tree
, 16> parm_decls
;
5664 push_function_arg_decls (&parm_decls
, node
->decl
);
5666 for (unsigned i
= 0; i
< count
; ++i
)
5672 if (i
>= new_indices
.length ())
5674 remapped_idx
= new_indices
[i
];
5675 if (remapped_idx
< 0)
5681 parm
= parm_decls
[remapped_idx
];
5683 gcc_checking_assert (parm
);
5684 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
), parm
);
5686 if (!ddef
|| !is_gimple_reg (parm
))
5690 && (vr
[i
].type
== VR_RANGE
|| vr
[i
].type
== VR_ANTI_RANGE
))
5692 tree type
= TREE_TYPE (ddef
);
5693 unsigned prec
= TYPE_PRECISION (type
);
5694 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef
)))
5698 fprintf (dump_file
, "Setting value range of param %u "
5699 "(now %i) ", i
, remapped_idx
);
5700 fprintf (dump_file
, "%s[",
5701 (vr
[i
].type
== VR_ANTI_RANGE
) ? "~" : "");
5702 print_decs (vr
[i
].min
, dump_file
);
5703 fprintf (dump_file
, ", ");
5704 print_decs (vr
[i
].max
, dump_file
);
5705 fprintf (dump_file
, "]\n");
5707 set_range_info (ddef
, vr
[i
].type
,
5708 wide_int_storage::from (vr
[i
].min
, prec
,
5710 wide_int_storage::from (vr
[i
].max
, prec
,
5713 else if (POINTER_TYPE_P (TREE_TYPE (ddef
))
5714 && vr
[i
].nonzero_p (TREE_TYPE (ddef
)))
5717 fprintf (dump_file
, "Setting nonnull for %u\n", i
);
5718 set_ptr_nonnull (ddef
);
5724 /* IPCP transformation phase doing propagation of aggregate values. */
5727 ipcp_transform_function (struct cgraph_node
*node
)
5729 vec
<ipa_param_descriptor
, va_gc
> *descriptors
= NULL
;
5730 struct ipa_func_body_info fbi
;
5731 struct ipa_agg_replacement_value
*aggval
;
5733 bool cfg_changed
= false, something_changed
= false;
5735 gcc_checking_assert (cfun
);
5736 gcc_checking_assert (current_function_decl
);
5739 fprintf (dump_file
, "Modification phase of node %s\n",
5740 node
->dump_name ());
5742 ipcp_update_bits (node
);
5743 ipcp_update_vr (node
);
5744 aggval
= ipa_get_agg_replacements_for_node (node
);
5747 param_count
= count_formal_params (node
->decl
);
5748 if (param_count
== 0)
5750 adjust_agg_replacement_values (node
, aggval
);
5752 ipa_dump_agg_replacement_values (dump_file
, aggval
);
5756 fbi
.bb_infos
= vNULL
;
5757 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
), true);
5758 fbi
.param_count
= param_count
;
5759 fbi
.aa_walk_budget
= opt_for_fn (node
->decl
, param_ipa_max_aa_steps
);
5761 vec_safe_grow_cleared (descriptors
, param_count
, true);
5762 ipa_populate_param_decls (node
, *descriptors
);
5763 calculate_dominance_info (CDI_DOMINATORS
);
5764 ipcp_modif_dom_walker (&fbi
, descriptors
, aggval
, &something_changed
,
5765 &cfg_changed
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5768 struct ipa_bb_info
*bi
;
5769 FOR_EACH_VEC_ELT (fbi
.bb_infos
, i
, bi
)
5770 free_ipa_bb_info (bi
);
5771 fbi
.bb_infos
.release ();
5772 free_dominance_info (CDI_DOMINATORS
);
5774 ipcp_transformation
*s
= ipcp_transformation_sum
->get (node
);
5775 s
->agg_values
= NULL
;
5779 vec_free (descriptors
);
5781 if (!something_changed
)
5785 delete_unreachable_blocks_update_callgraph (node
, false);
5787 return TODO_update_ssa_only_virtuals
;
5791 /* Return true if OTHER describes same agg value. */
5793 ipa_agg_value::equal_to (const ipa_agg_value
&other
)
5795 return offset
== other
.offset
5796 && operand_equal_p (value
, other
.value
, 0);
5798 #include "gt-ipa-prop.h"