1 /* Interprocedural analyses.
2 Copyright (C) 2005-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
30 #include "tree-streamer.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
47 #include "tree-inline.h"
48 #include "ipa-inline.h"
49 #include "gimple-pretty-print.h"
51 #include "ipa-utils.h"
56 /* Function summary where the parameter infos are actually stored. */
57 ipa_node_params_t
*ipa_node_params_sum
= NULL
;
58 /* Vector of IPA-CP transformation data for each clone. */
59 vec
<ipcp_transformation_summary
, va_gc
> *ipcp_transformations
;
60 /* Vector where the parameter infos are actually stored. */
61 vec
<ipa_edge_args
, va_gc
> *ipa_edge_args_vector
;
63 /* Holders of ipa cgraph hooks: */
64 static struct cgraph_edge_hook_list
*edge_removal_hook_holder
;
65 static struct cgraph_2edge_hook_list
*edge_duplication_hook_holder
;
66 static struct cgraph_node_hook_list
*function_insertion_hook_holder
;
68 /* Description of a reference to an IPA constant. */
69 struct ipa_cst_ref_desc
71 /* Edge that corresponds to the statement which took the reference. */
72 struct cgraph_edge
*cs
;
73 /* Linked list of duplicates created when call graph edges are cloned. */
74 struct ipa_cst_ref_desc
*next_duplicate
;
75 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
80 /* Allocation pool for reference descriptions. */
82 static object_allocator
<ipa_cst_ref_desc
> ipa_refdesc_pool
83 ("IPA-PROP ref descriptions");
85 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
86 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
89 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node
*node
)
91 tree fs_opts
= DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node
->decl
);
95 return !opt_for_fn (node
->decl
, optimize
) || !opt_for_fn (node
->decl
, flag_ipa_cp
);
98 /* Return index of the formal whose tree is PTREE in function which corresponds
102 ipa_get_param_decl_index_1 (vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
107 count
= vec_safe_length (descriptors
);
108 for (i
= 0; i
< count
; i
++)
109 if ((*descriptors
)[i
].decl_or_type
== ptree
)
115 /* Return index of the formal whose tree is PTREE in function which corresponds
119 ipa_get_param_decl_index (struct ipa_node_params
*info
, tree ptree
)
121 return ipa_get_param_decl_index_1 (info
->descriptors
, ptree
);
124 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
128 ipa_populate_param_decls (struct cgraph_node
*node
,
129 vec
<ipa_param_descriptor
, va_gc
> &descriptors
)
137 gcc_assert (gimple_has_body_p (fndecl
));
138 fnargs
= DECL_ARGUMENTS (fndecl
);
140 for (parm
= fnargs
; parm
; parm
= DECL_CHAIN (parm
))
142 descriptors
[param_num
].decl_or_type
= parm
;
143 descriptors
[param_num
].move_cost
= estimate_move_cost (TREE_TYPE (parm
),
149 /* Return how many formal parameters FNDECL has. */
152 count_formal_params (tree fndecl
)
156 gcc_assert (gimple_has_body_p (fndecl
));
158 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
164 /* Return the declaration of Ith formal parameter of the function corresponding
165 to INFO. Note there is no setter function as this array is built just once
166 using ipa_initialize_node_params. */
169 ipa_dump_param (FILE *file
, struct ipa_node_params
*info
, int i
)
171 fprintf (file
, "param #%i", i
);
172 if ((*info
->descriptors
)[i
].decl_or_type
)
175 print_generic_expr (file
, (*info
->descriptors
)[i
].decl_or_type
, 0);
179 /* If necessary, allocate vector of parameter descriptors in info of NODE.
180 Return true if they were allocated, false if not. */
183 ipa_alloc_node_params (struct cgraph_node
*node
, int param_count
)
185 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
187 if (!info
->descriptors
&& param_count
)
189 vec_safe_grow_cleared (info
->descriptors
, param_count
);
196 /* Initialize the ipa_node_params structure associated with NODE by counting
197 the function parameters, creating the descriptors and populating their
201 ipa_initialize_node_params (struct cgraph_node
*node
)
203 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
205 if (!info
->descriptors
206 && ipa_alloc_node_params (node
, count_formal_params (node
->decl
)))
207 ipa_populate_param_decls (node
, *info
->descriptors
);
210 /* Print the jump functions associated with call graph edge CS to file F. */
213 ipa_print_node_jump_functions_for_edge (FILE *f
, struct cgraph_edge
*cs
)
217 count
= ipa_get_cs_argument_count (IPA_EDGE_REF (cs
));
218 for (i
= 0; i
< count
; i
++)
220 struct ipa_jump_func
*jump_func
;
221 enum jump_func_type type
;
223 jump_func
= ipa_get_ith_jump_func (IPA_EDGE_REF (cs
), i
);
224 type
= jump_func
->type
;
226 fprintf (f
, " param %d: ", i
);
227 if (type
== IPA_JF_UNKNOWN
)
228 fprintf (f
, "UNKNOWN\n");
229 else if (type
== IPA_JF_CONST
)
231 tree val
= jump_func
->value
.constant
.value
;
232 fprintf (f
, "CONST: ");
233 print_generic_expr (f
, val
, 0);
234 if (TREE_CODE (val
) == ADDR_EXPR
235 && TREE_CODE (TREE_OPERAND (val
, 0)) == CONST_DECL
)
238 print_generic_expr (f
, DECL_INITIAL (TREE_OPERAND (val
, 0)),
243 else if (type
== IPA_JF_PASS_THROUGH
)
245 fprintf (f
, "PASS THROUGH: ");
246 fprintf (f
, "%d, op %s",
247 jump_func
->value
.pass_through
.formal_id
,
248 get_tree_code_name(jump_func
->value
.pass_through
.operation
));
249 if (jump_func
->value
.pass_through
.operation
!= NOP_EXPR
)
252 print_generic_expr (f
,
253 jump_func
->value
.pass_through
.operand
, 0);
255 if (jump_func
->value
.pass_through
.agg_preserved
)
256 fprintf (f
, ", agg_preserved");
259 else if (type
== IPA_JF_ANCESTOR
)
261 fprintf (f
, "ANCESTOR: ");
262 fprintf (f
, "%d, offset " HOST_WIDE_INT_PRINT_DEC
,
263 jump_func
->value
.ancestor
.formal_id
,
264 jump_func
->value
.ancestor
.offset
);
265 if (jump_func
->value
.ancestor
.agg_preserved
)
266 fprintf (f
, ", agg_preserved");
270 if (jump_func
->agg
.items
)
272 struct ipa_agg_jf_item
*item
;
275 fprintf (f
, " Aggregate passed by %s:\n",
276 jump_func
->agg
.by_ref
? "reference" : "value");
277 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, j
, item
)
279 fprintf (f
, " offset: " HOST_WIDE_INT_PRINT_DEC
", ",
281 if (TYPE_P (item
->value
))
282 fprintf (f
, "clobber of " HOST_WIDE_INT_PRINT_DEC
" bits",
283 tree_to_uhwi (TYPE_SIZE (item
->value
)));
286 fprintf (f
, "cst: ");
287 print_generic_expr (f
, item
->value
, 0);
293 struct ipa_polymorphic_call_context
*ctx
294 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs
), i
);
295 if (ctx
&& !ctx
->useless_p ())
297 fprintf (f
, " Context: ");
298 ctx
->dump (dump_file
);
301 if (jump_func
->bits
.known
)
303 fprintf (f
, " value: "); print_hex (jump_func
->bits
.value
, f
);
304 fprintf (f
, ", mask: "); print_hex (jump_func
->bits
.mask
, f
);
308 fprintf (f
, " Unknown bits\n");
310 if (jump_func
->vr_known
)
314 (jump_func
->m_vr
.type
== VR_ANTI_RANGE
) ? "~" : "");
315 print_decs (jump_func
->m_vr
.min
, f
);
317 print_decs (jump_func
->m_vr
.max
, f
);
321 fprintf (f
, " Unknown VR\n");
326 /* Print the jump functions of all arguments on all call graph edges going from
330 ipa_print_node_jump_functions (FILE *f
, struct cgraph_node
*node
)
332 struct cgraph_edge
*cs
;
334 fprintf (f
, " Jump functions of caller %s/%i:\n", node
->name (),
336 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
338 if (!ipa_edge_args_info_available_for_edge_p (cs
))
341 fprintf (f
, " callsite %s/%i -> %s/%i : \n",
342 xstrdup_for_dump (node
->name ()), node
->order
,
343 xstrdup_for_dump (cs
->callee
->name ()),
345 ipa_print_node_jump_functions_for_edge (f
, cs
);
348 for (cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
350 struct cgraph_indirect_call_info
*ii
;
351 if (!ipa_edge_args_info_available_for_edge_p (cs
))
354 ii
= cs
->indirect_info
;
355 if (ii
->agg_contents
)
356 fprintf (f
, " indirect %s callsite, calling param %i, "
357 "offset " HOST_WIDE_INT_PRINT_DEC
", %s",
358 ii
->member_ptr
? "member ptr" : "aggregate",
359 ii
->param_index
, ii
->offset
,
360 ii
->by_ref
? "by reference" : "by_value");
362 fprintf (f
, " indirect %s callsite, calling param %i, "
363 "offset " HOST_WIDE_INT_PRINT_DEC
,
364 ii
->polymorphic
? "polymorphic" : "simple", ii
->param_index
,
369 fprintf (f
, ", for stmt ");
370 print_gimple_stmt (f
, cs
->call_stmt
, 0, TDF_SLIM
);
375 ii
->context
.dump (f
);
376 ipa_print_node_jump_functions_for_edge (f
, cs
);
380 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
383 ipa_print_all_jump_functions (FILE *f
)
385 struct cgraph_node
*node
;
387 fprintf (f
, "\nJump functions:\n");
388 FOR_EACH_FUNCTION (node
)
390 ipa_print_node_jump_functions (f
, node
);
394 /* Set jfunc to be a know-really nothing jump function. */
397 ipa_set_jf_unknown (struct ipa_jump_func
*jfunc
)
399 jfunc
->type
= IPA_JF_UNKNOWN
;
400 jfunc
->bits
.known
= false;
401 jfunc
->vr_known
= false;
404 /* Set JFUNC to be a copy of another jmp (to be used by jump function
405 combination code). The two functions will share their rdesc. */
408 ipa_set_jf_cst_copy (struct ipa_jump_func
*dst
,
409 struct ipa_jump_func
*src
)
412 gcc_checking_assert (src
->type
== IPA_JF_CONST
);
413 dst
->type
= IPA_JF_CONST
;
414 dst
->value
.constant
= src
->value
.constant
;
417 /* Set JFUNC to be a constant jmp function. */
420 ipa_set_jf_constant (struct ipa_jump_func
*jfunc
, tree constant
,
421 struct cgraph_edge
*cs
)
423 jfunc
->type
= IPA_JF_CONST
;
424 jfunc
->value
.constant
.value
= unshare_expr_without_location (constant
);
426 if (TREE_CODE (constant
) == ADDR_EXPR
427 && TREE_CODE (TREE_OPERAND (constant
, 0)) == FUNCTION_DECL
)
429 struct ipa_cst_ref_desc
*rdesc
;
431 rdesc
= ipa_refdesc_pool
.allocate ();
433 rdesc
->next_duplicate
= NULL
;
435 jfunc
->value
.constant
.rdesc
= rdesc
;
438 jfunc
->value
.constant
.rdesc
= NULL
;
441 /* Set JFUNC to be a simple pass-through jump function. */
443 ipa_set_jf_simple_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
446 jfunc
->type
= IPA_JF_PASS_THROUGH
;
447 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
448 jfunc
->value
.pass_through
.formal_id
= formal_id
;
449 jfunc
->value
.pass_through
.operation
= NOP_EXPR
;
450 jfunc
->value
.pass_through
.agg_preserved
= agg_preserved
;
453 /* Set JFUNC to be an unary pass through jump function. */
456 ipa_set_jf_unary_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
457 enum tree_code operation
)
459 jfunc
->type
= IPA_JF_PASS_THROUGH
;
460 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
461 jfunc
->value
.pass_through
.formal_id
= formal_id
;
462 jfunc
->value
.pass_through
.operation
= operation
;
463 jfunc
->value
.pass_through
.agg_preserved
= false;
465 /* Set JFUNC to be an arithmetic pass through jump function. */
468 ipa_set_jf_arith_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
469 tree operand
, enum tree_code operation
)
471 jfunc
->type
= IPA_JF_PASS_THROUGH
;
472 jfunc
->value
.pass_through
.operand
= unshare_expr_without_location (operand
);
473 jfunc
->value
.pass_through
.formal_id
= formal_id
;
474 jfunc
->value
.pass_through
.operation
= operation
;
475 jfunc
->value
.pass_through
.agg_preserved
= false;
478 /* Set JFUNC to be an ancestor jump function. */
481 ipa_set_ancestor_jf (struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
,
482 int formal_id
, bool agg_preserved
)
484 jfunc
->type
= IPA_JF_ANCESTOR
;
485 jfunc
->value
.ancestor
.formal_id
= formal_id
;
486 jfunc
->value
.ancestor
.offset
= offset
;
487 jfunc
->value
.ancestor
.agg_preserved
= agg_preserved
;
490 /* Get IPA BB information about the given BB. FBI is the context of analyzis
491 of this function body. */
493 static struct ipa_bb_info
*
494 ipa_get_bb_info (struct ipa_func_body_info
*fbi
, basic_block bb
)
496 gcc_checking_assert (fbi
);
497 return &fbi
->bb_infos
[bb
->index
];
500 /* Structure to be passed in between detect_type_change and
501 check_stmt_for_type_change. */
503 struct prop_type_change_info
505 /* Offset into the object where there is the virtual method pointer we are
507 HOST_WIDE_INT offset
;
508 /* The declaration or SSA_NAME pointer of the base that we are checking for
511 /* Set to true if dynamic type change has been detected. */
512 bool type_maybe_changed
;
515 /* Return true if STMT can modify a virtual method table pointer.
517 This function makes special assumptions about both constructors and
518 destructors which are all the functions that are allowed to alter the VMT
519 pointers. It assumes that destructors begin with assignment into all VMT
520 pointers and that constructors essentially look in the following way:
522 1) The very first thing they do is that they call constructors of ancestor
523 sub-objects that have them.
525 2) Then VMT pointers of this and all its ancestors is set to new values
526 corresponding to the type corresponding to the constructor.
528 3) Only afterwards, other stuff such as constructor of member sub-objects
529 and the code written by the user is run. Only this may include calling
530 virtual functions, directly or indirectly.
532 There is no way to call a constructor of an ancestor sub-object in any
535 This means that we do not have to care whether constructors get the correct
536 type information because they will always change it (in fact, if we define
537 the type to be given by the VMT pointer, it is undefined).
539 The most important fact to derive from the above is that if, for some
540 statement in the section 3, we try to detect whether the dynamic type has
541 changed, we can safely ignore all calls as we examine the function body
542 backwards until we reach statements in section 2 because these calls cannot
543 be ancestor constructors or destructors (if the input is not bogus) and so
544 do not change the dynamic type (this holds true only for automatically
545 allocated objects but at the moment we devirtualize only these). We then
546 must detect that statements in section 2 change the dynamic type and can try
547 to derive the new type. That is enough and we can stop, we will never see
548 the calls into constructors of sub-objects in this code. Therefore we can
549 safely ignore all call statements that we traverse.
553 stmt_may_be_vtbl_ptr_store (gimple
*stmt
)
555 if (is_gimple_call (stmt
))
557 if (gimple_clobber_p (stmt
))
559 else if (is_gimple_assign (stmt
))
561 tree lhs
= gimple_assign_lhs (stmt
);
563 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs
)))
565 if (flag_strict_aliasing
566 && !POINTER_TYPE_P (TREE_TYPE (lhs
)))
569 if (TREE_CODE (lhs
) == COMPONENT_REF
570 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
572 /* In the future we might want to use get_base_ref_and_offset to find
573 if there is a field corresponding to the offset and if so, proceed
574 almost like if it was a component ref. */
580 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
581 to check whether a particular statement may modify the virtual table
582 pointerIt stores its result into DATA, which points to a
583 prop_type_change_info structure. */
586 check_stmt_for_type_change (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef
, void *data
)
588 gimple
*stmt
= SSA_NAME_DEF_STMT (vdef
);
589 struct prop_type_change_info
*tci
= (struct prop_type_change_info
*) data
;
591 if (stmt_may_be_vtbl_ptr_store (stmt
))
593 tci
->type_maybe_changed
= true;
600 /* See if ARG is PARAM_DECl describing instance passed by pointer
601 or reference in FUNCTION. Return false if the dynamic type may change
602 in between beggining of the function until CALL is invoked.
604 Generally functions are not allowed to change type of such instances,
605 but they call destructors. We assume that methods can not destroy the THIS
606 pointer. Also as a special cases, constructor and destructors may change
607 type of the THIS pointer. */
610 param_type_may_change_p (tree function
, tree arg
, gimple
*call
)
612 /* Pure functions can not do any changes on the dynamic type;
613 that require writting to memory. */
614 if (flags_from_decl_or_type (function
) & (ECF_PURE
| ECF_CONST
))
616 /* We need to check if we are within inlined consturctor
617 or destructor (ideally we would have way to check that the
618 inline cdtor is actually working on ARG, but we don't have
619 easy tie on this, so punt on all non-pure cdtors.
620 We may also record the types of cdtors and once we know type
621 of the instance match them.
623 Also code unification optimizations may merge calls from
624 different blocks making return values unreliable. So
625 do nothing during late optimization. */
626 if (DECL_STRUCT_FUNCTION (function
)->after_inlining
)
628 if (TREE_CODE (arg
) == SSA_NAME
629 && SSA_NAME_IS_DEFAULT_DEF (arg
)
630 && TREE_CODE (SSA_NAME_VAR (arg
)) == PARM_DECL
)
632 /* Normal (non-THIS) argument. */
633 if ((SSA_NAME_VAR (arg
) != DECL_ARGUMENTS (function
)
634 || TREE_CODE (TREE_TYPE (function
)) != METHOD_TYPE
)
635 /* THIS pointer of an method - here we want to watch constructors
636 and destructors as those definitely may change the dynamic
638 || (TREE_CODE (TREE_TYPE (function
)) == METHOD_TYPE
639 && !DECL_CXX_CONSTRUCTOR_P (function
)
640 && !DECL_CXX_DESTRUCTOR_P (function
)
641 && (SSA_NAME_VAR (arg
) == DECL_ARGUMENTS (function
))))
643 /* Walk the inline stack and watch out for ctors/dtors. */
644 for (tree block
= gimple_block (call
); block
&& TREE_CODE (block
) == BLOCK
;
645 block
= BLOCK_SUPERCONTEXT (block
))
646 if (inlined_polymorphic_ctor_dtor_block_p (block
, false))
654 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
655 callsite CALL) by looking for assignments to its virtual table pointer. If
656 it is, return true and fill in the jump function JFUNC with relevant type
657 information or set it to unknown. ARG is the object itself (not a pointer
658 to it, unless dereferenced). BASE is the base of the memory access as
659 returned by get_ref_base_and_extent, as is the offset.
661 This is helper function for detect_type_change and detect_type_change_ssa
662 that does the heavy work which is usually unnecesary. */
665 detect_type_change_from_memory_writes (tree arg
, tree base
, tree comp_type
,
666 gcall
*call
, struct ipa_jump_func
*jfunc
,
667 HOST_WIDE_INT offset
)
669 struct prop_type_change_info tci
;
671 bool entry_reached
= false;
673 gcc_checking_assert (DECL_P (arg
)
674 || TREE_CODE (arg
) == MEM_REF
675 || handled_component_p (arg
));
677 comp_type
= TYPE_MAIN_VARIANT (comp_type
);
679 /* Const calls cannot call virtual methods through VMT and so type changes do
681 if (!flag_devirtualize
|| !gimple_vuse (call
)
682 /* Be sure expected_type is polymorphic. */
684 || TREE_CODE (comp_type
) != RECORD_TYPE
685 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))
686 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))))
689 ao_ref_init (&ao
, arg
);
692 ao
.size
= POINTER_SIZE
;
693 ao
.max_size
= ao
.size
;
696 tci
.object
= get_base_address (arg
);
697 tci
.type_maybe_changed
= false;
699 walk_aliased_vdefs (&ao
, gimple_vuse (call
), check_stmt_for_type_change
,
700 &tci
, NULL
, &entry_reached
);
701 if (!tci
.type_maybe_changed
)
704 ipa_set_jf_unknown (jfunc
);
708 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
709 If it is, return true and fill in the jump function JFUNC with relevant type
710 information or set it to unknown. ARG is the object itself (not a pointer
711 to it, unless dereferenced). BASE is the base of the memory access as
712 returned by get_ref_base_and_extent, as is the offset. */
715 detect_type_change (tree arg
, tree base
, tree comp_type
, gcall
*call
,
716 struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
)
718 if (!flag_devirtualize
)
721 if (TREE_CODE (base
) == MEM_REF
722 && !param_type_may_change_p (current_function_decl
,
723 TREE_OPERAND (base
, 0),
726 return detect_type_change_from_memory_writes (arg
, base
, comp_type
,
727 call
, jfunc
, offset
);
730 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
731 SSA name (its dereference will become the base and the offset is assumed to
735 detect_type_change_ssa (tree arg
, tree comp_type
,
736 gcall
*call
, struct ipa_jump_func
*jfunc
)
738 gcc_checking_assert (TREE_CODE (arg
) == SSA_NAME
);
739 if (!flag_devirtualize
740 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
743 if (!param_type_may_change_p (current_function_decl
, arg
, call
))
746 arg
= build2 (MEM_REF
, ptr_type_node
, arg
,
747 build_int_cst (ptr_type_node
, 0));
749 return detect_type_change_from_memory_writes (arg
, arg
, comp_type
,
753 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
754 boolean variable pointed to by DATA. */
757 mark_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
760 bool *b
= (bool *) data
;
765 /* Return true if we have already walked so many statements in AA that we
766 should really just start giving up. */
769 aa_overwalked (struct ipa_func_body_info
*fbi
)
771 gcc_checking_assert (fbi
);
772 return fbi
->aa_walked
> (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS
);
775 /* Find the nearest valid aa status for parameter specified by INDEX that
778 static struct ipa_param_aa_status
*
779 find_dominating_aa_status (struct ipa_func_body_info
*fbi
, basic_block bb
,
784 bb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
787 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
788 if (!bi
->param_aa_statuses
.is_empty ()
789 && bi
->param_aa_statuses
[index
].valid
)
790 return &bi
->param_aa_statuses
[index
];
794 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
795 structures and/or intialize the result with a dominating description as
798 static struct ipa_param_aa_status
*
799 parm_bb_aa_status_for_bb (struct ipa_func_body_info
*fbi
, basic_block bb
,
802 gcc_checking_assert (fbi
);
803 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
804 if (bi
->param_aa_statuses
.is_empty ())
805 bi
->param_aa_statuses
.safe_grow_cleared (fbi
->param_count
);
806 struct ipa_param_aa_status
*paa
= &bi
->param_aa_statuses
[index
];
809 gcc_checking_assert (!paa
->parm_modified
810 && !paa
->ref_modified
811 && !paa
->pt_modified
);
812 struct ipa_param_aa_status
*dom_paa
;
813 dom_paa
= find_dominating_aa_status (fbi
, bb
, index
);
823 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
824 a value known not to be modified in this function before reaching the
825 statement STMT. FBI holds information about the function we have so far
826 gathered but do not survive the summary building stage. */
829 parm_preserved_before_stmt_p (struct ipa_func_body_info
*fbi
, int index
,
830 gimple
*stmt
, tree parm_load
)
832 struct ipa_param_aa_status
*paa
;
833 bool modified
= false;
836 tree base
= get_base_address (parm_load
);
837 gcc_assert (TREE_CODE (base
) == PARM_DECL
);
838 if (TREE_READONLY (base
))
841 /* FIXME: FBI can be NULL if we are being called from outside
842 ipa_node_analysis or ipcp_transform_function, which currently happens
843 during inlining analysis. It would be great to extend fbi's lifetime and
844 always have it. Currently, we are just not afraid of too much walking in
848 if (aa_overwalked (fbi
))
850 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
851 if (paa
->parm_modified
)
857 gcc_checking_assert (gimple_vuse (stmt
) != NULL_TREE
);
858 ao_ref_init (&refd
, parm_load
);
859 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
862 fbi
->aa_walked
+= walked
;
864 paa
->parm_modified
= true;
868 /* If STMT is an assignment that loads a value from an parameter declaration,
869 return the index of the parameter in ipa_node_params which has not been
870 modified. Otherwise return -1. */
873 load_from_unmodified_param (struct ipa_func_body_info
*fbi
,
874 vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
880 if (!gimple_assign_single_p (stmt
))
883 op1
= gimple_assign_rhs1 (stmt
);
884 if (TREE_CODE (op1
) != PARM_DECL
)
887 index
= ipa_get_param_decl_index_1 (descriptors
, op1
);
889 || !parm_preserved_before_stmt_p (fbi
, index
, stmt
, op1
))
895 /* Return true if memory reference REF (which must be a load through parameter
896 with INDEX) loads data that are known to be unmodified in this function
897 before reaching statement STMT. */
900 parm_ref_data_preserved_p (struct ipa_func_body_info
*fbi
,
901 int index
, gimple
*stmt
, tree ref
)
903 struct ipa_param_aa_status
*paa
;
904 bool modified
= false;
907 /* FIXME: FBI can be NULL if we are being called from outside
908 ipa_node_analysis or ipcp_transform_function, which currently happens
909 during inlining analysis. It would be great to extend fbi's lifetime and
910 always have it. Currently, we are just not afraid of too much walking in
914 if (aa_overwalked (fbi
))
916 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
917 if (paa
->ref_modified
)
923 gcc_checking_assert (gimple_vuse (stmt
));
924 ao_ref_init (&refd
, ref
);
925 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
928 fbi
->aa_walked
+= walked
;
930 paa
->ref_modified
= true;
934 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
935 is known to be unmodified in this function before reaching call statement
936 CALL into which it is passed. FBI describes the function body. */
939 parm_ref_data_pass_through_p (struct ipa_func_body_info
*fbi
, int index
,
940 gimple
*call
, tree parm
)
942 bool modified
= false;
945 /* It's unnecessary to calculate anything about memory contnets for a const
946 function because it is not goin to use it. But do not cache the result
947 either. Also, no such calculations for non-pointers. */
948 if (!gimple_vuse (call
)
949 || !POINTER_TYPE_P (TREE_TYPE (parm
))
950 || aa_overwalked (fbi
))
953 struct ipa_param_aa_status
*paa
= parm_bb_aa_status_for_bb (fbi
,
956 if (paa
->pt_modified
)
959 ao_ref_init_from_ptr_and_size (&refd
, parm
, NULL_TREE
);
960 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (call
), mark_modified
,
962 fbi
->aa_walked
+= walked
;
964 paa
->pt_modified
= true;
968 /* Return true if we can prove that OP is a memory reference loading
969 data from an aggregate passed as a parameter.
971 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
972 false if it cannot prove that the value has not been modified before the
973 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
974 if it cannot prove the value has not been modified, in that case it will
975 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
977 INFO and PARMS_AINFO describe parameters of the current function (but the
978 latter can be NULL), STMT is the load statement. If function returns true,
979 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
980 within the aggregate and whether it is a load from a value passed by
981 reference respectively. */
984 ipa_load_from_parm_agg (struct ipa_func_body_info
*fbi
,
985 vec
<ipa_param_descriptor
, va_gc
> *descriptors
,
986 gimple
*stmt
, tree op
, int *index_p
,
987 HOST_WIDE_INT
*offset_p
, HOST_WIDE_INT
*size_p
,
988 bool *by_ref_p
, bool *guaranteed_unmodified
)
991 HOST_WIDE_INT size
, max_size
;
994 = get_ref_base_and_extent (op
, offset_p
, &size
, &max_size
, &reverse
);
996 if (max_size
== -1 || max_size
!= size
|| *offset_p
< 0)
1001 int index
= ipa_get_param_decl_index_1 (descriptors
, base
);
1003 && parm_preserved_before_stmt_p (fbi
, index
, stmt
, op
))
1009 if (guaranteed_unmodified
)
1010 *guaranteed_unmodified
= true;
1016 if (TREE_CODE (base
) != MEM_REF
1017 || TREE_CODE (TREE_OPERAND (base
, 0)) != SSA_NAME
1018 || !integer_zerop (TREE_OPERAND (base
, 1)))
1021 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base
, 0)))
1023 tree parm
= SSA_NAME_VAR (TREE_OPERAND (base
, 0));
1024 index
= ipa_get_param_decl_index_1 (descriptors
, parm
);
1028 /* This branch catches situations where a pointer parameter is not a
1029 gimple register, for example:
1031 void hip7(S*) (struct S * p)
1033 void (*<T2e4>) (struct S *) D.1867;
1038 D.1867_2 = p.1_1->f;
1043 gimple
*def
= SSA_NAME_DEF_STMT (TREE_OPERAND (base
, 0));
1044 index
= load_from_unmodified_param (fbi
, descriptors
, def
);
1049 bool data_preserved
= parm_ref_data_preserved_p (fbi
, index
, stmt
, op
);
1050 if (!data_preserved
&& !guaranteed_unmodified
)
1057 if (guaranteed_unmodified
)
1058 *guaranteed_unmodified
= data_preserved
;
1064 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1065 of an assignment statement STMT, try to determine whether we are actually
1066 handling any of the following cases and construct an appropriate jump
1067 function into JFUNC if so:
1069 1) The passed value is loaded from a formal parameter which is not a gimple
1070 register (most probably because it is addressable, the value has to be
1071 scalar) and we can guarantee the value has not changed. This case can
1072 therefore be described by a simple pass-through jump function. For example:
1081 2) The passed value can be described by a simple arithmetic pass-through
1088 D.2064_4 = a.1(D) + 4;
1091 This case can also occur in combination of the previous one, e.g.:
1099 D.2064_4 = a.0_3 + 4;
1102 3) The passed value is an address of an object within another one (which
1103 also passed by reference). Such situations are described by an ancestor
1104 jump function and describe situations such as:
1106 B::foo() (struct B * const this)
1110 D.1845_2 = &this_1(D)->D.1748;
1113 INFO is the structure describing individual parameters access different
1114 stages of IPA optimizations. PARMS_AINFO contains the information that is
1115 only needed for intraprocedural analysis. */
1118 compute_complex_assign_jump_func (struct ipa_func_body_info
*fbi
,
1119 struct ipa_node_params
*info
,
1120 struct ipa_jump_func
*jfunc
,
1121 gcall
*call
, gimple
*stmt
, tree name
,
1124 HOST_WIDE_INT offset
, size
, max_size
;
1125 tree op1
, tc_ssa
, base
, ssa
;
1129 op1
= gimple_assign_rhs1 (stmt
);
1131 if (TREE_CODE (op1
) == SSA_NAME
)
1133 if (SSA_NAME_IS_DEFAULT_DEF (op1
))
1134 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (op1
));
1136 index
= load_from_unmodified_param (fbi
, info
->descriptors
,
1137 SSA_NAME_DEF_STMT (op1
));
1142 index
= load_from_unmodified_param (fbi
, info
->descriptors
, stmt
);
1143 tc_ssa
= gimple_assign_lhs (stmt
);
1148 switch (gimple_assign_rhs_class (stmt
))
1150 case GIMPLE_BINARY_RHS
:
1152 tree op2
= gimple_assign_rhs2 (stmt
);
1153 if (!is_gimple_ip_invariant (op2
)
1154 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt
))
1156 && !useless_type_conversion_p (TREE_TYPE (name
),
1160 ipa_set_jf_arith_pass_through (jfunc
, index
, op2
,
1161 gimple_assign_rhs_code (stmt
));
1164 case GIMPLE_SINGLE_RHS
:
1166 bool agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
,
1168 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1171 case GIMPLE_UNARY_RHS
:
1172 if (is_gimple_assign (stmt
)
1173 && gimple_assign_rhs_class (stmt
) == GIMPLE_UNARY_RHS
1174 && ! CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
)))
1175 ipa_set_jf_unary_pass_through (jfunc
, index
,
1176 gimple_assign_rhs_code (stmt
));
1182 if (TREE_CODE (op1
) != ADDR_EXPR
)
1184 op1
= TREE_OPERAND (op1
, 0);
1185 if (TREE_CODE (TREE_TYPE (op1
)) != RECORD_TYPE
)
1187 base
= get_ref_base_and_extent (op1
, &offset
, &size
, &max_size
, &reverse
);
1188 if (TREE_CODE (base
) != MEM_REF
1189 /* If this is a varying address, punt. */
1191 || max_size
!= size
)
1193 offset
+= mem_ref_offset (base
).to_short_addr () * BITS_PER_UNIT
;
1194 ssa
= TREE_OPERAND (base
, 0);
1195 if (TREE_CODE (ssa
) != SSA_NAME
1196 || !SSA_NAME_IS_DEFAULT_DEF (ssa
)
1200 /* Dynamic types are changed in constructors and destructors. */
1201 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (ssa
));
1202 if (index
>= 0 && param_type
&& POINTER_TYPE_P (param_type
))
1203 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1204 parm_ref_data_pass_through_p (fbi
, index
, call
, ssa
));
1207 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1210 iftmp.1_3 = &obj_2(D)->D.1762;
1212 The base of the MEM_REF must be a default definition SSA NAME of a
1213 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1214 whole MEM_REF expression is returned and the offset calculated from any
1215 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1216 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1219 get_ancestor_addr_info (gimple
*assign
, tree
*obj_p
, HOST_WIDE_INT
*offset
)
1221 HOST_WIDE_INT size
, max_size
;
1222 tree expr
, parm
, obj
;
1225 if (!gimple_assign_single_p (assign
))
1227 expr
= gimple_assign_rhs1 (assign
);
1229 if (TREE_CODE (expr
) != ADDR_EXPR
)
1231 expr
= TREE_OPERAND (expr
, 0);
1233 expr
= get_ref_base_and_extent (expr
, offset
, &size
, &max_size
, &reverse
);
1235 if (TREE_CODE (expr
) != MEM_REF
1236 /* If this is a varying address, punt. */
1241 parm
= TREE_OPERAND (expr
, 0);
1242 if (TREE_CODE (parm
) != SSA_NAME
1243 || !SSA_NAME_IS_DEFAULT_DEF (parm
)
1244 || TREE_CODE (SSA_NAME_VAR (parm
)) != PARM_DECL
)
1247 *offset
+= mem_ref_offset (expr
).to_short_addr () * BITS_PER_UNIT
;
1253 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1254 statement PHI, try to find out whether NAME is in fact a
1255 multiple-inheritance typecast from a descendant into an ancestor of a formal
1256 parameter and thus can be described by an ancestor jump function and if so,
1257 write the appropriate function into JFUNC.
1259 Essentially we want to match the following pattern:
1267 iftmp.1_3 = &obj_2(D)->D.1762;
1270 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1271 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1275 compute_complex_ancestor_jump_func (struct ipa_func_body_info
*fbi
,
1276 struct ipa_node_params
*info
,
1277 struct ipa_jump_func
*jfunc
,
1278 gcall
*call
, gphi
*phi
)
1280 HOST_WIDE_INT offset
;
1281 gimple
*assign
, *cond
;
1282 basic_block phi_bb
, assign_bb
, cond_bb
;
1283 tree tmp
, parm
, expr
, obj
;
1286 if (gimple_phi_num_args (phi
) != 2)
1289 if (integer_zerop (PHI_ARG_DEF (phi
, 1)))
1290 tmp
= PHI_ARG_DEF (phi
, 0);
1291 else if (integer_zerop (PHI_ARG_DEF (phi
, 0)))
1292 tmp
= PHI_ARG_DEF (phi
, 1);
1295 if (TREE_CODE (tmp
) != SSA_NAME
1296 || SSA_NAME_IS_DEFAULT_DEF (tmp
)
1297 || !POINTER_TYPE_P (TREE_TYPE (tmp
))
1298 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp
))) != RECORD_TYPE
)
1301 assign
= SSA_NAME_DEF_STMT (tmp
);
1302 assign_bb
= gimple_bb (assign
);
1303 if (!single_pred_p (assign_bb
))
1305 expr
= get_ancestor_addr_info (assign
, &obj
, &offset
);
1308 parm
= TREE_OPERAND (expr
, 0);
1309 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (parm
));
1313 cond_bb
= single_pred (assign_bb
);
1314 cond
= last_stmt (cond_bb
);
1316 || gimple_code (cond
) != GIMPLE_COND
1317 || gimple_cond_code (cond
) != NE_EXPR
1318 || gimple_cond_lhs (cond
) != parm
1319 || !integer_zerop (gimple_cond_rhs (cond
)))
1322 phi_bb
= gimple_bb (phi
);
1323 for (i
= 0; i
< 2; i
++)
1325 basic_block pred
= EDGE_PRED (phi_bb
, i
)->src
;
1326 if (pred
!= assign_bb
&& pred
!= cond_bb
)
1330 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1331 parm_ref_data_pass_through_p (fbi
, index
, call
, parm
));
1334 /* Inspect the given TYPE and return true iff it has the same structure (the
1335 same number of fields of the same types) as a C++ member pointer. If
1336 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1337 corresponding fields there. */
1340 type_like_member_ptr_p (tree type
, tree
*method_ptr
, tree
*delta
)
1344 if (TREE_CODE (type
) != RECORD_TYPE
)
1347 fld
= TYPE_FIELDS (type
);
1348 if (!fld
|| !POINTER_TYPE_P (TREE_TYPE (fld
))
1349 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld
))) != METHOD_TYPE
1350 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1356 fld
= DECL_CHAIN (fld
);
1357 if (!fld
|| INTEGRAL_TYPE_P (fld
)
1358 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1363 if (DECL_CHAIN (fld
))
1369 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1370 return the rhs of its defining statement. Otherwise return RHS as it
1374 get_ssa_def_if_simple_copy (tree rhs
)
1376 while (TREE_CODE (rhs
) == SSA_NAME
&& !SSA_NAME_IS_DEFAULT_DEF (rhs
))
1378 gimple
*def_stmt
= SSA_NAME_DEF_STMT (rhs
);
1380 if (gimple_assign_single_p (def_stmt
))
1381 rhs
= gimple_assign_rhs1 (def_stmt
);
1388 /* Simple linked list, describing known contents of an aggregate beforere
1391 struct ipa_known_agg_contents_list
1393 /* Offset and size of the described part of the aggregate. */
1394 HOST_WIDE_INT offset
, size
;
1395 /* Known constant value or NULL if the contents is known to be unknown. */
1397 /* Pointer to the next structure in the list. */
1398 struct ipa_known_agg_contents_list
*next
;
1401 /* Find the proper place in linked list of ipa_known_agg_contents_list
1402 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1403 unless there is a partial overlap, in which case return NULL, or such
1404 element is already there, in which case set *ALREADY_THERE to true. */
1406 static struct ipa_known_agg_contents_list
**
1407 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list
**list
,
1408 HOST_WIDE_INT lhs_offset
,
1409 HOST_WIDE_INT lhs_size
,
1410 bool *already_there
)
1412 struct ipa_known_agg_contents_list
**p
= list
;
1413 while (*p
&& (*p
)->offset
< lhs_offset
)
1415 if ((*p
)->offset
+ (*p
)->size
> lhs_offset
)
1420 if (*p
&& (*p
)->offset
< lhs_offset
+ lhs_size
)
1422 if ((*p
)->offset
== lhs_offset
&& (*p
)->size
== lhs_size
)
1423 /* We already know this value is subsequently overwritten with
1425 *already_there
= true;
1427 /* Otherwise this is a partial overlap which we cannot
1434 /* Build aggregate jump function from LIST, assuming there are exactly
1435 CONST_COUNT constant entries there and that th offset of the passed argument
1436 is ARG_OFFSET and store it into JFUNC. */
1439 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list
*list
,
1440 int const_count
, HOST_WIDE_INT arg_offset
,
1441 struct ipa_jump_func
*jfunc
)
1443 vec_alloc (jfunc
->agg
.items
, const_count
);
1448 struct ipa_agg_jf_item item
;
1449 item
.offset
= list
->offset
- arg_offset
;
1450 gcc_assert ((item
.offset
% BITS_PER_UNIT
) == 0);
1451 item
.value
= unshare_expr_without_location (list
->constant
);
1452 jfunc
->agg
.items
->quick_push (item
);
1458 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1459 in ARG is filled in with constant values. ARG can either be an aggregate
1460 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1461 aggregate. JFUNC is the jump function into which the constants are
1462 subsequently stored. */
1465 determine_locally_known_aggregate_parts (gcall
*call
, tree arg
,
1467 struct ipa_jump_func
*jfunc
)
1469 struct ipa_known_agg_contents_list
*list
= NULL
;
1470 int item_count
= 0, const_count
= 0;
1471 HOST_WIDE_INT arg_offset
, arg_size
;
1472 gimple_stmt_iterator gsi
;
1474 bool check_ref
, by_ref
;
1477 if (PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
) == 0)
1480 /* The function operates in three stages. First, we prepare check_ref, r,
1481 arg_base and arg_offset based on what is actually passed as an actual
1484 if (POINTER_TYPE_P (arg_type
))
1487 if (TREE_CODE (arg
) == SSA_NAME
)
1490 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type
))))
1495 type_size
= TYPE_SIZE (TREE_TYPE (arg_type
));
1496 arg_size
= tree_to_uhwi (type_size
);
1497 ao_ref_init_from_ptr_and_size (&r
, arg_base
, NULL_TREE
);
1499 else if (TREE_CODE (arg
) == ADDR_EXPR
)
1501 HOST_WIDE_INT arg_max_size
;
1504 arg
= TREE_OPERAND (arg
, 0);
1505 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1506 &arg_max_size
, &reverse
);
1507 if (arg_max_size
== -1
1508 || arg_max_size
!= arg_size
1511 if (DECL_P (arg_base
))
1514 ao_ref_init (&r
, arg_base
);
1524 HOST_WIDE_INT arg_max_size
;
1527 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg
)));
1531 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1532 &arg_max_size
, &reverse
);
1533 if (arg_max_size
== -1
1534 || arg_max_size
!= arg_size
1538 ao_ref_init (&r
, arg
);
1541 /* Second stage walks back the BB, looks at individual statements and as long
1542 as it is confident of how the statements affect contents of the
1543 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1545 gsi
= gsi_for_stmt (call
);
1547 for (; !gsi_end_p (gsi
); gsi_prev (&gsi
))
1549 struct ipa_known_agg_contents_list
*n
, **p
;
1550 gimple
*stmt
= gsi_stmt (gsi
);
1551 HOST_WIDE_INT lhs_offset
, lhs_size
, lhs_max_size
;
1552 tree lhs
, rhs
, lhs_base
;
1555 if (!stmt_may_clobber_ref_p_1 (stmt
, &r
))
1557 if (!gimple_assign_single_p (stmt
))
1560 lhs
= gimple_assign_lhs (stmt
);
1561 rhs
= gimple_assign_rhs1 (stmt
);
1562 if (!is_gimple_reg_type (TREE_TYPE (rhs
))
1563 || TREE_CODE (lhs
) == BIT_FIELD_REF
1564 || contains_bitfld_component_ref_p (lhs
))
1567 lhs_base
= get_ref_base_and_extent (lhs
, &lhs_offset
, &lhs_size
,
1568 &lhs_max_size
, &reverse
);
1569 if (lhs_max_size
== -1
1570 || lhs_max_size
!= lhs_size
)
1575 if (TREE_CODE (lhs_base
) != MEM_REF
1576 || TREE_OPERAND (lhs_base
, 0) != arg_base
1577 || !integer_zerop (TREE_OPERAND (lhs_base
, 1)))
1580 else if (lhs_base
!= arg_base
)
1582 if (DECL_P (lhs_base
))
1588 bool already_there
= false;
1589 p
= get_place_in_agg_contents_list (&list
, lhs_offset
, lhs_size
,
1596 rhs
= get_ssa_def_if_simple_copy (rhs
);
1597 n
= XALLOCA (struct ipa_known_agg_contents_list
);
1599 n
->offset
= lhs_offset
;
1600 if (is_gimple_ip_invariant (rhs
))
1606 n
->constant
= NULL_TREE
;
1611 if (const_count
== PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
)
1612 || item_count
== 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
))
1616 /* Third stage just goes over the list and creates an appropriate vector of
1617 ipa_agg_jf_item structures out of it, of sourse only if there are
1618 any known constants to begin with. */
1622 jfunc
->agg
.by_ref
= by_ref
;
1623 build_agg_jump_func_from_list (list
, const_count
, arg_offset
, jfunc
);
1627 /* Return the Ith param type of callee associated with call graph
1631 ipa_get_callee_param_type (struct cgraph_edge
*e
, int i
)
1634 tree type
= (e
->callee
1635 ? TREE_TYPE (e
->callee
->decl
)
1636 : gimple_call_fntype (e
->call_stmt
));
1637 tree t
= TYPE_ARG_TYPES (type
);
1639 for (n
= 0; n
< i
; n
++)
1646 return TREE_VALUE (t
);
1649 t
= DECL_ARGUMENTS (e
->callee
->decl
);
1650 for (n
= 0; n
< i
; n
++)
1657 return TREE_TYPE (t
);
1661 /* Compute jump function for all arguments of callsite CS and insert the
1662 information in the jump_functions array in the ipa_edge_args corresponding
1663 to this callsite. */
1666 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info
*fbi
,
1667 struct cgraph_edge
*cs
)
1669 struct ipa_node_params
*info
= IPA_NODE_REF (cs
->caller
);
1670 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
1671 gcall
*call
= cs
->call_stmt
;
1672 int n
, arg_num
= gimple_call_num_args (call
);
1673 bool useful_context
= false;
1675 if (arg_num
== 0 || args
->jump_functions
)
1677 vec_safe_grow_cleared (args
->jump_functions
, arg_num
);
1678 if (flag_devirtualize
)
1679 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, arg_num
);
1681 if (gimple_call_internal_p (call
))
1683 if (ipa_func_spec_opts_forbid_analysis_p (cs
->caller
))
1686 for (n
= 0; n
< arg_num
; n
++)
1688 struct ipa_jump_func
*jfunc
= ipa_get_ith_jump_func (args
, n
);
1689 tree arg
= gimple_call_arg (call
, n
);
1690 tree param_type
= ipa_get_callee_param_type (cs
, n
);
1691 if (flag_devirtualize
&& POINTER_TYPE_P (TREE_TYPE (arg
)))
1694 struct ipa_polymorphic_call_context
context (cs
->caller
->decl
,
1697 context
.get_dynamic_type (instance
, arg
, NULL
, cs
->call_stmt
);
1698 *ipa_get_ith_polymorhic_call_context (args
, n
) = context
;
1699 if (!context
.useless_p ())
1700 useful_context
= true;
1703 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
1705 bool addr_nonzero
= false;
1706 bool strict_overflow
= false;
1708 if (TREE_CODE (arg
) == SSA_NAME
1710 && get_ptr_nonnull (arg
))
1711 addr_nonzero
= true;
1712 else if (tree_single_nonzero_warnv_p (arg
, &strict_overflow
))
1713 addr_nonzero
= true;
1717 jfunc
->vr_known
= true;
1718 jfunc
->m_vr
.type
= VR_ANTI_RANGE
;
1719 jfunc
->m_vr
.min
= build_int_cst (TREE_TYPE (arg
), 0);
1720 jfunc
->m_vr
.max
= build_int_cst (TREE_TYPE (arg
), 0);
1721 jfunc
->m_vr
.equiv
= NULL
;
1724 gcc_assert (!jfunc
->vr_known
);
1729 value_range_type type
;
1730 if (TREE_CODE (arg
) == SSA_NAME
1732 && (type
= get_range_info (arg
, &min
, &max
))
1733 && (type
== VR_RANGE
|| type
== VR_ANTI_RANGE
))
1738 vr
.min
= wide_int_to_tree (TREE_TYPE (arg
), min
);
1739 vr
.max
= wide_int_to_tree (TREE_TYPE (arg
), max
);
1741 extract_range_from_unary_expr (&jfunc
->m_vr
,
1744 &vr
, TREE_TYPE (arg
));
1745 if (jfunc
->m_vr
.type
== VR_RANGE
1746 || jfunc
->m_vr
.type
== VR_ANTI_RANGE
)
1747 jfunc
->vr_known
= true;
1749 jfunc
->vr_known
= false;
1752 gcc_assert (!jfunc
->vr_known
);
1755 if (INTEGRAL_TYPE_P (TREE_TYPE (arg
))
1756 && (TREE_CODE (arg
) == SSA_NAME
|| TREE_CODE (arg
) == INTEGER_CST
))
1758 jfunc
->bits
.known
= true;
1760 if (TREE_CODE (arg
) == SSA_NAME
)
1762 jfunc
->bits
.value
= 0;
1763 jfunc
->bits
.mask
= widest_int::from (get_nonzero_bits (arg
),
1764 TYPE_SIGN (TREE_TYPE (arg
)));
1768 jfunc
->bits
.value
= wi::to_widest (arg
);
1769 jfunc
->bits
.mask
= 0;
1772 else if (POINTER_TYPE_P (TREE_TYPE (arg
)))
1774 unsigned HOST_WIDE_INT bitpos
;
1777 jfunc
->bits
.known
= true;
1778 get_pointer_alignment_1 (arg
, &align
, &bitpos
);
1779 jfunc
->bits
.mask
= wi::mask
<widest_int
>(TYPE_PRECISION (TREE_TYPE (arg
)), false)
1780 .and_not (align
/ BITS_PER_UNIT
- 1);
1781 jfunc
->bits
.value
= bitpos
/ BITS_PER_UNIT
;
1784 gcc_assert (!jfunc
->bits
.known
);
1786 if (is_gimple_ip_invariant (arg
)
1788 && is_global_var (arg
)
1789 && TREE_READONLY (arg
)))
1790 ipa_set_jf_constant (jfunc
, arg
, cs
);
1791 else if (!is_gimple_reg_type (TREE_TYPE (arg
))
1792 && TREE_CODE (arg
) == PARM_DECL
)
1794 int index
= ipa_get_param_decl_index (info
, arg
);
1796 gcc_assert (index
>=0);
1797 /* Aggregate passed by value, check for pass-through, otherwise we
1798 will attempt to fill in aggregate contents later in this
1800 if (parm_preserved_before_stmt_p (fbi
, index
, call
, arg
))
1802 ipa_set_jf_simple_pass_through (jfunc
, index
, false);
1806 else if (TREE_CODE (arg
) == SSA_NAME
)
1808 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
1810 int index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (arg
));
1814 agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
, arg
);
1815 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1820 gimple
*stmt
= SSA_NAME_DEF_STMT (arg
);
1821 if (is_gimple_assign (stmt
))
1822 compute_complex_assign_jump_func (fbi
, info
, jfunc
,
1823 call
, stmt
, arg
, param_type
);
1824 else if (gimple_code (stmt
) == GIMPLE_PHI
)
1825 compute_complex_ancestor_jump_func (fbi
, info
, jfunc
,
1827 as_a
<gphi
*> (stmt
));
1831 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1832 passed (because type conversions are ignored in gimple). Usually we can
1833 safely get type from function declaration, but in case of K&R prototypes or
1834 variadic functions we can try our luck with type of the pointer passed.
1835 TODO: Since we look for actual initialization of the memory object, we may better
1836 work out the type based on the memory stores we find. */
1838 param_type
= TREE_TYPE (arg
);
1840 if ((jfunc
->type
!= IPA_JF_PASS_THROUGH
1841 || !ipa_get_jf_pass_through_agg_preserved (jfunc
))
1842 && (jfunc
->type
!= IPA_JF_ANCESTOR
1843 || !ipa_get_jf_ancestor_agg_preserved (jfunc
))
1844 && (AGGREGATE_TYPE_P (TREE_TYPE (arg
))
1845 || POINTER_TYPE_P (param_type
)))
1846 determine_locally_known_aggregate_parts (call
, arg
, param_type
, jfunc
);
1848 if (!useful_context
)
1849 vec_free (args
->polymorphic_call_contexts
);
1852 /* Compute jump functions for all edges - both direct and indirect - outgoing
1856 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info
*fbi
, basic_block bb
)
1858 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
1860 struct cgraph_edge
*cs
;
1862 FOR_EACH_VEC_ELT_REVERSE (bi
->cg_edges
, i
, cs
)
1864 struct cgraph_node
*callee
= cs
->callee
;
1868 callee
->ultimate_alias_target ();
1869 /* We do not need to bother analyzing calls to unknown functions
1870 unless they may become known during lto/whopr. */
1871 if (!callee
->definition
&& !flag_lto
)
1874 ipa_compute_jump_functions_for_edge (fbi
, cs
);
1878 /* If STMT looks like a statement loading a value from a member pointer formal
1879 parameter, return that parameter and store the offset of the field to
1880 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1881 might be clobbered). If USE_DELTA, then we look for a use of the delta
1882 field rather than the pfn. */
1885 ipa_get_stmt_member_ptr_load_param (gimple
*stmt
, bool use_delta
,
1886 HOST_WIDE_INT
*offset_p
)
1888 tree rhs
, rec
, ref_field
, ref_offset
, fld
, ptr_field
, delta_field
;
1890 if (!gimple_assign_single_p (stmt
))
1893 rhs
= gimple_assign_rhs1 (stmt
);
1894 if (TREE_CODE (rhs
) == COMPONENT_REF
)
1896 ref_field
= TREE_OPERAND (rhs
, 1);
1897 rhs
= TREE_OPERAND (rhs
, 0);
1900 ref_field
= NULL_TREE
;
1901 if (TREE_CODE (rhs
) != MEM_REF
)
1903 rec
= TREE_OPERAND (rhs
, 0);
1904 if (TREE_CODE (rec
) != ADDR_EXPR
)
1906 rec
= TREE_OPERAND (rec
, 0);
1907 if (TREE_CODE (rec
) != PARM_DECL
1908 || !type_like_member_ptr_p (TREE_TYPE (rec
), &ptr_field
, &delta_field
))
1910 ref_offset
= TREE_OPERAND (rhs
, 1);
1917 *offset_p
= int_bit_position (fld
);
1921 if (integer_nonzerop (ref_offset
))
1923 return ref_field
== fld
? rec
: NULL_TREE
;
1926 return tree_int_cst_equal (byte_position (fld
), ref_offset
) ? rec
1930 /* Returns true iff T is an SSA_NAME defined by a statement. */
1933 ipa_is_ssa_with_stmt_def (tree t
)
1935 if (TREE_CODE (t
) == SSA_NAME
1936 && !SSA_NAME_IS_DEFAULT_DEF (t
))
1942 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1943 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1944 indirect call graph edge. */
1946 static struct cgraph_edge
*
1947 ipa_note_param_call (struct cgraph_node
*node
, int param_index
,
1950 struct cgraph_edge
*cs
;
1952 cs
= node
->get_edge (stmt
);
1953 cs
->indirect_info
->param_index
= param_index
;
1954 cs
->indirect_info
->agg_contents
= 0;
1955 cs
->indirect_info
->member_ptr
= 0;
1956 cs
->indirect_info
->guaranteed_unmodified
= 0;
1960 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1961 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1962 intermediate information about each formal parameter. Currently it checks
1963 whether the call calls a pointer that is a formal parameter and if so, the
1964 parameter is marked with the called flag and an indirect call graph edge
1965 describing the call is created. This is very simple for ordinary pointers
1966 represented in SSA but not-so-nice when it comes to member pointers. The
1967 ugly part of this function does nothing more than trying to match the
1968 pattern of such a call. An example of such a pattern is the gimple dump
1969 below, the call is on the last line:
1972 f$__delta_5 = f.__delta;
1973 f$__pfn_24 = f.__pfn;
1977 f$__delta_5 = MEM[(struct *)&f];
1978 f$__pfn_24 = MEM[(struct *)&f + 4B];
1980 and a few lines below:
1983 D.2496_3 = (int) f$__pfn_24;
1984 D.2497_4 = D.2496_3 & 1;
1991 D.2500_7 = (unsigned int) f$__delta_5;
1992 D.2501_8 = &S + D.2500_7;
1993 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1994 D.2503_10 = *D.2502_9;
1995 D.2504_12 = f$__pfn_24 + -1;
1996 D.2505_13 = (unsigned int) D.2504_12;
1997 D.2506_14 = D.2503_10 + D.2505_13;
1998 D.2507_15 = *D.2506_14;
1999 iftmp.11_16 = (String:: *) D.2507_15;
2002 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2003 D.2500_19 = (unsigned int) f$__delta_5;
2004 D.2508_20 = &S + D.2500_19;
2005 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2007 Such patterns are results of simple calls to a member pointer:
2009 int doprinting (int (MyString::* f)(int) const)
2011 MyString S ("somestring");
2016 Moreover, the function also looks for called pointers loaded from aggregates
2017 passed by value or reference. */
2020 ipa_analyze_indirect_call_uses (struct ipa_func_body_info
*fbi
, gcall
*call
,
2023 struct ipa_node_params
*info
= fbi
->info
;
2024 HOST_WIDE_INT offset
;
2027 if (SSA_NAME_IS_DEFAULT_DEF (target
))
2029 tree var
= SSA_NAME_VAR (target
);
2030 int index
= ipa_get_param_decl_index (info
, var
);
2032 ipa_note_param_call (fbi
->node
, index
, call
);
2037 gimple
*def
= SSA_NAME_DEF_STMT (target
);
2038 bool guaranteed_unmodified
;
2039 if (gimple_assign_single_p (def
)
2040 && ipa_load_from_parm_agg (fbi
, info
->descriptors
, def
,
2041 gimple_assign_rhs1 (def
), &index
, &offset
,
2042 NULL
, &by_ref
, &guaranteed_unmodified
))
2044 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2045 cs
->indirect_info
->offset
= offset
;
2046 cs
->indirect_info
->agg_contents
= 1;
2047 cs
->indirect_info
->by_ref
= by_ref
;
2048 cs
->indirect_info
->guaranteed_unmodified
= guaranteed_unmodified
;
2052 /* Now we need to try to match the complex pattern of calling a member
2054 if (gimple_code (def
) != GIMPLE_PHI
2055 || gimple_phi_num_args (def
) != 2
2056 || !POINTER_TYPE_P (TREE_TYPE (target
))
2057 || TREE_CODE (TREE_TYPE (TREE_TYPE (target
))) != METHOD_TYPE
)
2060 /* First, we need to check whether one of these is a load from a member
2061 pointer that is a parameter to this function. */
2062 tree n1
= PHI_ARG_DEF (def
, 0);
2063 tree n2
= PHI_ARG_DEF (def
, 1);
2064 if (!ipa_is_ssa_with_stmt_def (n1
) || !ipa_is_ssa_with_stmt_def (n2
))
2066 gimple
*d1
= SSA_NAME_DEF_STMT (n1
);
2067 gimple
*d2
= SSA_NAME_DEF_STMT (n2
);
2070 basic_block bb
, virt_bb
;
2071 basic_block join
= gimple_bb (def
);
2072 if ((rec
= ipa_get_stmt_member_ptr_load_param (d1
, false, &offset
)))
2074 if (ipa_get_stmt_member_ptr_load_param (d2
, false, NULL
))
2077 bb
= EDGE_PRED (join
, 0)->src
;
2078 virt_bb
= gimple_bb (d2
);
2080 else if ((rec
= ipa_get_stmt_member_ptr_load_param (d2
, false, &offset
)))
2082 bb
= EDGE_PRED (join
, 1)->src
;
2083 virt_bb
= gimple_bb (d1
);
2088 /* Second, we need to check that the basic blocks are laid out in the way
2089 corresponding to the pattern. */
2091 if (!single_pred_p (virt_bb
) || !single_succ_p (virt_bb
)
2092 || single_pred (virt_bb
) != bb
2093 || single_succ (virt_bb
) != join
)
2096 /* Third, let's see that the branching is done depending on the least
2097 significant bit of the pfn. */
2099 gimple
*branch
= last_stmt (bb
);
2100 if (!branch
|| gimple_code (branch
) != GIMPLE_COND
)
2103 if ((gimple_cond_code (branch
) != NE_EXPR
2104 && gimple_cond_code (branch
) != EQ_EXPR
)
2105 || !integer_zerop (gimple_cond_rhs (branch
)))
2108 tree cond
= gimple_cond_lhs (branch
);
2109 if (!ipa_is_ssa_with_stmt_def (cond
))
2112 def
= SSA_NAME_DEF_STMT (cond
);
2113 if (!is_gimple_assign (def
)
2114 || gimple_assign_rhs_code (def
) != BIT_AND_EXPR
2115 || !integer_onep (gimple_assign_rhs2 (def
)))
2118 cond
= gimple_assign_rhs1 (def
);
2119 if (!ipa_is_ssa_with_stmt_def (cond
))
2122 def
= SSA_NAME_DEF_STMT (cond
);
2124 if (is_gimple_assign (def
)
2125 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
2127 cond
= gimple_assign_rhs1 (def
);
2128 if (!ipa_is_ssa_with_stmt_def (cond
))
2130 def
= SSA_NAME_DEF_STMT (cond
);
2134 rec2
= ipa_get_stmt_member_ptr_load_param (def
,
2135 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2136 == ptrmemfunc_vbit_in_delta
),
2141 index
= ipa_get_param_decl_index (info
, rec
);
2143 && parm_preserved_before_stmt_p (fbi
, index
, call
, rec
))
2145 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2146 cs
->indirect_info
->offset
= offset
;
2147 cs
->indirect_info
->agg_contents
= 1;
2148 cs
->indirect_info
->member_ptr
= 1;
2149 cs
->indirect_info
->guaranteed_unmodified
= 1;
2155 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2156 object referenced in the expression is a formal parameter of the caller
2157 FBI->node (described by FBI->info), create a call note for the
2161 ipa_analyze_virtual_call_uses (struct ipa_func_body_info
*fbi
,
2162 gcall
*call
, tree target
)
2164 tree obj
= OBJ_TYPE_REF_OBJECT (target
);
2166 HOST_WIDE_INT anc_offset
;
2168 if (!flag_devirtualize
)
2171 if (TREE_CODE (obj
) != SSA_NAME
)
2174 struct ipa_node_params
*info
= fbi
->info
;
2175 if (SSA_NAME_IS_DEFAULT_DEF (obj
))
2177 struct ipa_jump_func jfunc
;
2178 if (TREE_CODE (SSA_NAME_VAR (obj
)) != PARM_DECL
)
2182 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (obj
));
2183 gcc_assert (index
>= 0);
2184 if (detect_type_change_ssa (obj
, obj_type_ref_class (target
),
2190 struct ipa_jump_func jfunc
;
2191 gimple
*stmt
= SSA_NAME_DEF_STMT (obj
);
2194 expr
= get_ancestor_addr_info (stmt
, &obj
, &anc_offset
);
2197 index
= ipa_get_param_decl_index (info
,
2198 SSA_NAME_VAR (TREE_OPERAND (expr
, 0)));
2199 gcc_assert (index
>= 0);
2200 if (detect_type_change (obj
, expr
, obj_type_ref_class (target
),
2201 call
, &jfunc
, anc_offset
))
2205 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2206 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
2207 ii
->offset
= anc_offset
;
2208 ii
->otr_token
= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target
));
2209 ii
->otr_type
= obj_type_ref_class (target
);
2210 ii
->polymorphic
= 1;
2213 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2214 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2215 containing intermediate information about each formal parameter. */
2218 ipa_analyze_call_uses (struct ipa_func_body_info
*fbi
, gcall
*call
)
2220 tree target
= gimple_call_fn (call
);
2223 || (TREE_CODE (target
) != SSA_NAME
2224 && !virtual_method_call_p (target
)))
2227 struct cgraph_edge
*cs
= fbi
->node
->get_edge (call
);
2228 /* If we previously turned the call into a direct call, there is
2229 no need to analyze. */
2230 if (cs
&& !cs
->indirect_unknown_callee
)
2233 if (cs
->indirect_info
->polymorphic
&& flag_devirtualize
)
2236 tree target
= gimple_call_fn (call
);
2237 ipa_polymorphic_call_context
context (current_function_decl
,
2238 target
, call
, &instance
);
2240 gcc_checking_assert (cs
->indirect_info
->otr_type
2241 == obj_type_ref_class (target
));
2242 gcc_checking_assert (cs
->indirect_info
->otr_token
2243 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target
)));
2245 cs
->indirect_info
->vptr_changed
2246 = !context
.get_dynamic_type (instance
,
2247 OBJ_TYPE_REF_OBJECT (target
),
2248 obj_type_ref_class (target
), call
);
2249 cs
->indirect_info
->context
= context
;
2252 if (TREE_CODE (target
) == SSA_NAME
)
2253 ipa_analyze_indirect_call_uses (fbi
, call
, target
);
2254 else if (virtual_method_call_p (target
))
2255 ipa_analyze_virtual_call_uses (fbi
, call
, target
);
2259 /* Analyze the call statement STMT with respect to formal parameters (described
2260 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2261 formal parameters are called. */
2264 ipa_analyze_stmt_uses (struct ipa_func_body_info
*fbi
, gimple
*stmt
)
2266 if (is_gimple_call (stmt
))
2267 ipa_analyze_call_uses (fbi
, as_a
<gcall
*> (stmt
));
2270 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2271 If OP is a parameter declaration, mark it as used in the info structure
2275 visit_ref_for_mod_analysis (gimple
*, tree op
, tree
, void *data
)
2277 struct ipa_node_params
*info
= (struct ipa_node_params
*) data
;
2279 op
= get_base_address (op
);
2281 && TREE_CODE (op
) == PARM_DECL
)
2283 int index
= ipa_get_param_decl_index (info
, op
);
2284 gcc_assert (index
>= 0);
2285 ipa_set_param_used (info
, index
, true);
2291 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2292 the findings in various structures of the associated ipa_node_params
2293 structure, such as parameter flags, notes etc. FBI holds various data about
2294 the function being analyzed. */
2297 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info
*fbi
, basic_block bb
)
2299 gimple_stmt_iterator gsi
;
2300 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2302 gimple
*stmt
= gsi_stmt (gsi
);
2304 if (is_gimple_debug (stmt
))
2307 ipa_analyze_stmt_uses (fbi
, stmt
);
2308 walk_stmt_load_store_addr_ops (stmt
, fbi
->info
,
2309 visit_ref_for_mod_analysis
,
2310 visit_ref_for_mod_analysis
,
2311 visit_ref_for_mod_analysis
);
2313 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2314 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), fbi
->info
,
2315 visit_ref_for_mod_analysis
,
2316 visit_ref_for_mod_analysis
,
2317 visit_ref_for_mod_analysis
);
2320 /* Calculate controlled uses of parameters of NODE. */
2323 ipa_analyze_controlled_uses (struct cgraph_node
*node
)
2325 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
2327 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2329 tree parm
= ipa_get_param (info
, i
);
2330 int controlled_uses
= 0;
2332 /* For SSA regs see if parameter is used. For non-SSA we compute
2333 the flag during modification analysis. */
2334 if (is_gimple_reg (parm
))
2336 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
),
2338 if (ddef
&& !has_zero_uses (ddef
))
2340 imm_use_iterator imm_iter
;
2341 use_operand_p use_p
;
2343 ipa_set_param_used (info
, i
, true);
2344 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, ddef
)
2345 if (!is_gimple_call (USE_STMT (use_p
)))
2347 if (!is_gimple_debug (USE_STMT (use_p
)))
2349 controlled_uses
= IPA_UNDESCRIBED_USE
;
2357 controlled_uses
= 0;
2360 controlled_uses
= IPA_UNDESCRIBED_USE
;
2361 ipa_set_controlled_uses (info
, i
, controlled_uses
);
2365 /* Free stuff in BI. */
2368 free_ipa_bb_info (struct ipa_bb_info
*bi
)
2370 bi
->cg_edges
.release ();
2371 bi
->param_aa_statuses
.release ();
2374 /* Dominator walker driving the analysis. */
2376 class analysis_dom_walker
: public dom_walker
2379 analysis_dom_walker (struct ipa_func_body_info
*fbi
)
2380 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
) {}
2382 virtual edge
before_dom_children (basic_block
);
2385 struct ipa_func_body_info
*m_fbi
;
2389 analysis_dom_walker::before_dom_children (basic_block bb
)
2391 ipa_analyze_params_uses_in_bb (m_fbi
, bb
);
2392 ipa_compute_jump_functions_for_bb (m_fbi
, bb
);
2396 /* Release body info FBI. */
2399 ipa_release_body_info (struct ipa_func_body_info
*fbi
)
2402 struct ipa_bb_info
*bi
;
2404 FOR_EACH_VEC_ELT (fbi
->bb_infos
, i
, bi
)
2405 free_ipa_bb_info (bi
);
2406 fbi
->bb_infos
.release ();
2409 /* Initialize the array describing properties of formal parameters
2410 of NODE, analyze their uses and compute jump functions associated
2411 with actual arguments of calls from within NODE. */
2414 ipa_analyze_node (struct cgraph_node
*node
)
2416 struct ipa_func_body_info fbi
;
2417 struct ipa_node_params
*info
;
2419 ipa_check_create_node_params ();
2420 ipa_check_create_edge_args ();
2421 info
= IPA_NODE_REF (node
);
2423 if (info
->analysis_done
)
2425 info
->analysis_done
= 1;
2427 if (ipa_func_spec_opts_forbid_analysis_p (node
))
2429 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2431 ipa_set_param_used (info
, i
, true);
2432 ipa_set_controlled_uses (info
, i
, IPA_UNDESCRIBED_USE
);
2437 struct function
*func
= DECL_STRUCT_FUNCTION (node
->decl
);
2439 calculate_dominance_info (CDI_DOMINATORS
);
2440 ipa_initialize_node_params (node
);
2441 ipa_analyze_controlled_uses (node
);
2444 fbi
.info
= IPA_NODE_REF (node
);
2445 fbi
.bb_infos
= vNULL
;
2446 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
));
2447 fbi
.param_count
= ipa_get_param_count (info
);
2450 for (struct cgraph_edge
*cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
2452 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
2453 bi
->cg_edges
.safe_push (cs
);
2456 for (struct cgraph_edge
*cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
2458 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
2459 bi
->cg_edges
.safe_push (cs
);
2462 analysis_dom_walker (&fbi
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
2464 ipa_release_body_info (&fbi
);
2465 free_dominance_info (CDI_DOMINATORS
);
2469 /* Update the jump functions associated with call graph edge E when the call
2470 graph edge CS is being inlined, assuming that E->caller is already (possibly
2471 indirectly) inlined into CS->callee and that E has not been inlined. */
2474 update_jump_functions_after_inlining (struct cgraph_edge
*cs
,
2475 struct cgraph_edge
*e
)
2477 struct ipa_edge_args
*top
= IPA_EDGE_REF (cs
);
2478 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
2479 int count
= ipa_get_cs_argument_count (args
);
2482 for (i
= 0; i
< count
; i
++)
2484 struct ipa_jump_func
*dst
= ipa_get_ith_jump_func (args
, i
);
2485 struct ipa_polymorphic_call_context
*dst_ctx
2486 = ipa_get_ith_polymorhic_call_context (args
, i
);
2488 if (dst
->type
== IPA_JF_ANCESTOR
)
2490 struct ipa_jump_func
*src
;
2491 int dst_fid
= dst
->value
.ancestor
.formal_id
;
2492 struct ipa_polymorphic_call_context
*src_ctx
2493 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
2495 /* Variable number of arguments can cause havoc if we try to access
2496 one that does not exist in the inlined edge. So make sure we
2498 if (dst_fid
>= ipa_get_cs_argument_count (top
))
2500 ipa_set_jf_unknown (dst
);
2504 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2506 if (src_ctx
&& !src_ctx
->useless_p ())
2508 struct ipa_polymorphic_call_context ctx
= *src_ctx
;
2510 /* TODO: Make type preserved safe WRT contexts. */
2511 if (!ipa_get_jf_ancestor_type_preserved (dst
))
2512 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
2513 ctx
.offset_by (dst
->value
.ancestor
.offset
);
2514 if (!ctx
.useless_p ())
2518 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
2520 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
2523 dst_ctx
->combine_with (ctx
);
2528 && (dst
->value
.ancestor
.agg_preserved
|| !src
->agg
.by_ref
))
2530 struct ipa_agg_jf_item
*item
;
2533 /* Currently we do not produce clobber aggregate jump functions,
2534 replace with merging when we do. */
2535 gcc_assert (!dst
->agg
.items
);
2537 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2538 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2539 FOR_EACH_VEC_SAFE_ELT (dst
->agg
.items
, j
, item
)
2540 item
->offset
-= dst
->value
.ancestor
.offset
;
2543 if (src
->type
== IPA_JF_PASS_THROUGH
2544 && src
->value
.pass_through
.operation
== NOP_EXPR
)
2546 dst
->value
.ancestor
.formal_id
= src
->value
.pass_through
.formal_id
;
2547 dst
->value
.ancestor
.agg_preserved
&=
2548 src
->value
.pass_through
.agg_preserved
;
2550 else if (src
->type
== IPA_JF_PASS_THROUGH
2551 && TREE_CODE_CLASS (src
->value
.pass_through
.operation
) == tcc_unary
)
2553 dst
->value
.ancestor
.formal_id
= src
->value
.pass_through
.formal_id
;
2554 dst
->value
.ancestor
.agg_preserved
= false;
2556 else if (src
->type
== IPA_JF_ANCESTOR
)
2558 dst
->value
.ancestor
.formal_id
= src
->value
.ancestor
.formal_id
;
2559 dst
->value
.ancestor
.offset
+= src
->value
.ancestor
.offset
;
2560 dst
->value
.ancestor
.agg_preserved
&=
2561 src
->value
.ancestor
.agg_preserved
;
2564 ipa_set_jf_unknown (dst
);
2566 else if (dst
->type
== IPA_JF_PASS_THROUGH
)
2568 struct ipa_jump_func
*src
;
2569 /* We must check range due to calls with variable number of arguments
2570 and we cannot combine jump functions with operations. */
2571 if (dst
->value
.pass_through
.operation
== NOP_EXPR
2572 && (dst
->value
.pass_through
.formal_id
2573 < ipa_get_cs_argument_count (top
)))
2575 int dst_fid
= dst
->value
.pass_through
.formal_id
;
2576 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2577 bool dst_agg_p
= ipa_get_jf_pass_through_agg_preserved (dst
);
2578 struct ipa_polymorphic_call_context
*src_ctx
2579 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
2581 if (src_ctx
&& !src_ctx
->useless_p ())
2583 struct ipa_polymorphic_call_context ctx
= *src_ctx
;
2585 /* TODO: Make type preserved safe WRT contexts. */
2586 if (!ipa_get_jf_pass_through_type_preserved (dst
))
2587 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
2588 if (!ctx
.useless_p ())
2592 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
2594 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
2596 dst_ctx
->combine_with (ctx
);
2601 case IPA_JF_UNKNOWN
:
2602 ipa_set_jf_unknown (dst
);
2605 ipa_set_jf_cst_copy (dst
, src
);
2608 case IPA_JF_PASS_THROUGH
:
2610 int formal_id
= ipa_get_jf_pass_through_formal_id (src
);
2611 enum tree_code operation
;
2612 operation
= ipa_get_jf_pass_through_operation (src
);
2614 if (operation
== NOP_EXPR
)
2618 && ipa_get_jf_pass_through_agg_preserved (src
);
2619 ipa_set_jf_simple_pass_through (dst
, formal_id
, agg_p
);
2621 else if (TREE_CODE_CLASS (operation
) == tcc_unary
)
2622 ipa_set_jf_unary_pass_through (dst
, formal_id
, operation
);
2625 tree operand
= ipa_get_jf_pass_through_operand (src
);
2626 ipa_set_jf_arith_pass_through (dst
, formal_id
, operand
,
2631 case IPA_JF_ANCESTOR
:
2635 && ipa_get_jf_ancestor_agg_preserved (src
);
2636 ipa_set_ancestor_jf (dst
,
2637 ipa_get_jf_ancestor_offset (src
),
2638 ipa_get_jf_ancestor_formal_id (src
),
2647 && (dst_agg_p
|| !src
->agg
.by_ref
))
2649 /* Currently we do not produce clobber aggregate jump
2650 functions, replace with merging when we do. */
2651 gcc_assert (!dst
->agg
.items
);
2653 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2654 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2658 ipa_set_jf_unknown (dst
);
2663 /* If TARGET is an addr_expr of a function declaration, make it the
2664 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2665 Otherwise, return NULL. */
2667 struct cgraph_edge
*
2668 ipa_make_edge_direct_to_target (struct cgraph_edge
*ie
, tree target
,
2671 struct cgraph_node
*callee
;
2672 struct inline_edge_summary
*es
= inline_edge_summary (ie
);
2673 bool unreachable
= false;
2675 if (TREE_CODE (target
) == ADDR_EXPR
)
2676 target
= TREE_OPERAND (target
, 0);
2677 if (TREE_CODE (target
) != FUNCTION_DECL
)
2679 target
= canonicalize_constructor_val (target
, NULL
);
2680 if (!target
|| TREE_CODE (target
) != FUNCTION_DECL
)
2682 /* Member pointer call that goes through a VMT lookup. */
2683 if (ie
->indirect_info
->member_ptr
2684 /* Or if target is not an invariant expression and we do not
2685 know if it will evaulate to function at runtime.
2686 This can happen when folding through &VAR, where &VAR
2687 is IP invariant, but VAR itself is not.
2689 TODO: Revisit this when GCC 5 is branched. It seems that
2690 member_ptr check is not needed and that we may try to fold
2691 the expression and see if VAR is readonly. */
2692 || !is_gimple_ip_invariant (target
))
2694 if (dump_enabled_p ())
2696 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2697 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2698 "discovered direct call non-invariant "
2700 ie
->caller
->name (), ie
->caller
->order
);
2706 if (dump_enabled_p ())
2708 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2709 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2710 "discovered direct call to non-function in %s/%i, "
2711 "making it __builtin_unreachable\n",
2712 ie
->caller
->name (), ie
->caller
->order
);
2715 target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2716 callee
= cgraph_node::get_create (target
);
2720 callee
= cgraph_node::get (target
);
2723 callee
= cgraph_node::get (target
);
2725 /* Because may-edges are not explicitely represented and vtable may be external,
2726 we may create the first reference to the object in the unit. */
2727 if (!callee
|| callee
->global
.inlined_to
)
2730 /* We are better to ensure we can refer to it.
2731 In the case of static functions we are out of luck, since we already
2732 removed its body. In the case of public functions we may or may
2733 not introduce the reference. */
2734 if (!canonicalize_constructor_val (target
, NULL
)
2735 || !TREE_PUBLIC (target
))
2738 fprintf (dump_file
, "ipa-prop: Discovered call to a known target "
2739 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2740 xstrdup_for_dump (ie
->caller
->name ()),
2742 xstrdup_for_dump (ie
->callee
->name ()),
2746 callee
= cgraph_node::get_create (target
);
2749 /* If the edge is already speculated. */
2750 if (speculative
&& ie
->speculative
)
2752 struct cgraph_edge
*e2
;
2753 struct ipa_ref
*ref
;
2754 ie
->speculative_call_info (e2
, ie
, ref
);
2755 if (e2
->callee
->ultimate_alias_target ()
2756 != callee
->ultimate_alias_target ())
2759 fprintf (dump_file
, "ipa-prop: Discovered call to a speculative target "
2760 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2761 xstrdup_for_dump (ie
->caller
->name ()),
2763 xstrdup_for_dump (callee
->name ()),
2765 xstrdup_for_dump (e2
->callee
->name ()),
2771 fprintf (dump_file
, "ipa-prop: Discovered call to a speculative target "
2772 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2773 xstrdup_for_dump (ie
->caller
->name ()),
2775 xstrdup_for_dump (callee
->name ()),
2781 if (!dbg_cnt (devirt
))
2784 ipa_check_create_node_params ();
2786 /* We can not make edges to inline clones. It is bug that someone removed
2787 the cgraph node too early. */
2788 gcc_assert (!callee
->global
.inlined_to
);
2790 if (dump_file
&& !unreachable
)
2792 fprintf (dump_file
, "ipa-prop: Discovered %s call to a %s target "
2793 "(%s/%i -> %s/%i), for stmt ",
2794 ie
->indirect_info
->polymorphic
? "a virtual" : "an indirect",
2795 speculative
? "speculative" : "known",
2796 xstrdup_for_dump (ie
->caller
->name ()),
2798 xstrdup_for_dump (callee
->name ()),
2801 print_gimple_stmt (dump_file
, ie
->call_stmt
, 2, TDF_SLIM
);
2803 fprintf (dump_file
, "with uid %i\n", ie
->lto_stmt_uid
);
2805 if (dump_enabled_p ())
2807 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2809 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2810 "converting indirect call in %s to direct call to %s\n",
2811 ie
->caller
->name (), callee
->name ());
2815 struct cgraph_edge
*orig
= ie
;
2816 ie
= ie
->make_direct (callee
);
2817 /* If we resolved speculative edge the cost is already up to date
2818 for direct call (adjusted by inline_edge_duplication_hook). */
2821 es
= inline_edge_summary (ie
);
2822 es
->call_stmt_size
-= (eni_size_weights
.indirect_call_cost
2823 - eni_size_weights
.call_cost
);
2824 es
->call_stmt_time
-= (eni_time_weights
.indirect_call_cost
2825 - eni_time_weights
.call_cost
);
2830 if (!callee
->can_be_discarded_p ())
2833 alias
= dyn_cast
<cgraph_node
*> (callee
->noninterposable_alias ());
2837 /* make_speculative will update ie's cost to direct call cost. */
2838 ie
= ie
->make_speculative
2839 (callee
, ie
->count
* 8 / 10, ie
->frequency
* 8 / 10);
2845 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
2846 CONSTRUCTOR and return it. Return NULL if the search fails for some
2850 find_constructor_constant_at_offset (tree constructor
, HOST_WIDE_INT req_offset
)
2852 tree type
= TREE_TYPE (constructor
);
2853 if (TREE_CODE (type
) != ARRAY_TYPE
2854 && TREE_CODE (type
) != RECORD_TYPE
)
2859 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor
), ix
, index
, val
)
2861 HOST_WIDE_INT elt_offset
;
2862 if (TREE_CODE (type
) == ARRAY_TYPE
)
2865 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (type
));
2866 gcc_assert (TREE_CODE (unit_size
) == INTEGER_CST
);
2870 off
= wi::to_offset (index
);
2871 if (TYPE_DOMAIN (type
) && TYPE_MIN_VALUE (TYPE_DOMAIN (type
)))
2873 tree low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
2874 gcc_assert (TREE_CODE (unit_size
) == INTEGER_CST
);
2875 off
= wi::sext (off
- wi::to_offset (low_bound
),
2876 TYPE_PRECISION (TREE_TYPE (index
)));
2878 off
*= wi::to_offset (unit_size
);
2881 off
= wi::to_offset (unit_size
) * ix
;
2883 off
= wi::lshift (off
, LOG2_BITS_PER_UNIT
);
2884 if (!wi::fits_shwi_p (off
) || wi::neg_p (off
))
2886 elt_offset
= off
.to_shwi ();
2888 else if (TREE_CODE (type
) == RECORD_TYPE
)
2890 gcc_checking_assert (index
&& TREE_CODE (index
) == FIELD_DECL
);
2891 if (DECL_BIT_FIELD (index
))
2893 elt_offset
= int_bit_position (index
);
2898 if (elt_offset
> req_offset
)
2901 if (TREE_CODE (val
) == CONSTRUCTOR
)
2902 return find_constructor_constant_at_offset (val
,
2903 req_offset
- elt_offset
);
2905 if (elt_offset
== req_offset
2906 && is_gimple_reg_type (TREE_TYPE (val
))
2907 && is_gimple_ip_invariant (val
))
2913 /* Check whether SCALAR could be used to look up an aggregate interprocedural
2914 invariant from a static constructor and if so, return it. Otherwise return
2918 ipa_find_agg_cst_from_init (tree scalar
, HOST_WIDE_INT offset
, bool by_ref
)
2922 if (TREE_CODE (scalar
) != ADDR_EXPR
)
2924 scalar
= TREE_OPERAND (scalar
, 0);
2928 || !is_global_var (scalar
)
2929 || !TREE_READONLY (scalar
)
2930 || !DECL_INITIAL (scalar
)
2931 || TREE_CODE (DECL_INITIAL (scalar
)) != CONSTRUCTOR
)
2934 return find_constructor_constant_at_offset (DECL_INITIAL (scalar
), offset
);
2937 /* Retrieve value from aggregate jump function AGG or static initializer of
2938 SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
2939 none. BY_REF specifies whether the value has to be passed by reference or
2940 by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
2941 to is set to true if the value comes from an initializer of a constant. */
2944 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function
*agg
, tree scalar
,
2945 HOST_WIDE_INT offset
, bool by_ref
,
2946 bool *from_global_constant
)
2948 struct ipa_agg_jf_item
*item
;
2953 tree res
= ipa_find_agg_cst_from_init (scalar
, offset
, by_ref
);
2956 if (from_global_constant
)
2957 *from_global_constant
= true;
2963 || by_ref
!= agg
->by_ref
)
2966 FOR_EACH_VEC_SAFE_ELT (agg
->items
, i
, item
)
2967 if (item
->offset
== offset
)
2969 /* Currently we do not have clobber values, return NULL for them once
2971 gcc_checking_assert (is_gimple_ip_invariant (item
->value
));
2972 if (from_global_constant
)
2973 *from_global_constant
= false;
2979 /* Remove a reference to SYMBOL from the list of references of a node given by
2980 reference description RDESC. Return true if the reference has been
2981 successfully found and removed. */
2984 remove_described_reference (symtab_node
*symbol
, struct ipa_cst_ref_desc
*rdesc
)
2986 struct ipa_ref
*to_del
;
2987 struct cgraph_edge
*origin
;
2992 to_del
= origin
->caller
->find_reference (symbol
, origin
->call_stmt
,
2993 origin
->lto_stmt_uid
);
2997 to_del
->remove_reference ();
2999 fprintf (dump_file
, "ipa-prop: Removed a reference from %s/%i to %s.\n",
3000 xstrdup_for_dump (origin
->caller
->name ()),
3001 origin
->caller
->order
, xstrdup_for_dump (symbol
->name ()));
3005 /* If JFUNC has a reference description with refcount different from
3006 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3007 NULL. JFUNC must be a constant jump function. */
3009 static struct ipa_cst_ref_desc
*
3010 jfunc_rdesc_usable (struct ipa_jump_func
*jfunc
)
3012 struct ipa_cst_ref_desc
*rdesc
= ipa_get_jf_constant_rdesc (jfunc
);
3013 if (rdesc
&& rdesc
->refcount
!= IPA_UNDESCRIBED_USE
)
3019 /* If the value of constant jump function JFUNC is an address of a function
3020 declaration, return the associated call graph node. Otherwise return
3023 static cgraph_node
*
3024 cgraph_node_for_jfunc (struct ipa_jump_func
*jfunc
)
3026 gcc_checking_assert (jfunc
->type
== IPA_JF_CONST
);
3027 tree cst
= ipa_get_jf_constant (jfunc
);
3028 if (TREE_CODE (cst
) != ADDR_EXPR
3029 || TREE_CODE (TREE_OPERAND (cst
, 0)) != FUNCTION_DECL
)
3032 return cgraph_node::get (TREE_OPERAND (cst
, 0));
3036 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3037 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3038 the edge specified in the rdesc. Return false if either the symbol or the
3039 reference could not be found, otherwise return true. */
3042 try_decrement_rdesc_refcount (struct ipa_jump_func
*jfunc
)
3044 struct ipa_cst_ref_desc
*rdesc
;
3045 if (jfunc
->type
== IPA_JF_CONST
3046 && (rdesc
= jfunc_rdesc_usable (jfunc
))
3047 && --rdesc
->refcount
== 0)
3049 symtab_node
*symbol
= cgraph_node_for_jfunc (jfunc
);
3053 return remove_described_reference (symbol
, rdesc
);
3058 /* Try to find a destination for indirect edge IE that corresponds to a simple
3059 call or a call of a member function pointer and where the destination is a
3060 pointer formal parameter described by jump function JFUNC. If it can be
3061 determined, return the newly direct edge, otherwise return NULL.
3062 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3064 static struct cgraph_edge
*
3065 try_make_edge_direct_simple_call (struct cgraph_edge
*ie
,
3066 struct ipa_jump_func
*jfunc
,
3067 struct ipa_node_params
*new_root_info
)
3069 struct cgraph_edge
*cs
;
3071 bool agg_contents
= ie
->indirect_info
->agg_contents
;
3072 tree scalar
= ipa_value_from_jfunc (new_root_info
, jfunc
);
3075 bool from_global_constant
;
3076 target
= ipa_find_agg_cst_for_param (&jfunc
->agg
, scalar
,
3077 ie
->indirect_info
->offset
,
3078 ie
->indirect_info
->by_ref
,
3079 &from_global_constant
);
3081 && !from_global_constant
3082 && !ie
->indirect_info
->guaranteed_unmodified
)
3089 cs
= ipa_make_edge_direct_to_target (ie
, target
);
3091 if (cs
&& !agg_contents
)
3094 gcc_checking_assert (cs
->callee
3096 || jfunc
->type
!= IPA_JF_CONST
3097 || !cgraph_node_for_jfunc (jfunc
)
3098 || cs
->callee
== cgraph_node_for_jfunc (jfunc
)));
3099 ok
= try_decrement_rdesc_refcount (jfunc
);
3100 gcc_checking_assert (ok
);
3106 /* Return the target to be used in cases of impossible devirtualization. IE
3107 and target (the latter can be NULL) are dumped when dumping is enabled. */
3110 ipa_impossible_devirt_target (struct cgraph_edge
*ie
, tree target
)
3116 "Type inconsistent devirtualization: %s/%i->%s\n",
3117 ie
->caller
->name (), ie
->caller
->order
,
3118 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target
)));
3121 "No devirtualization target in %s/%i\n",
3122 ie
->caller
->name (), ie
->caller
->order
);
3124 tree new_target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
3125 cgraph_node::get_create (new_target
);
3129 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3130 call based on a formal parameter which is described by jump function JFUNC
3131 and if it can be determined, make it direct and return the direct edge.
3132 Otherwise, return NULL. CTX describes the polymorphic context that the
3133 parameter the call is based on brings along with it. */
3135 static struct cgraph_edge
*
3136 try_make_edge_direct_virtual_call (struct cgraph_edge
*ie
,
3137 struct ipa_jump_func
*jfunc
,
3138 struct ipa_polymorphic_call_context ctx
)
3141 bool speculative
= false;
3143 if (!opt_for_fn (ie
->caller
->decl
, flag_devirtualize
))
3146 gcc_assert (!ie
->indirect_info
->by_ref
);
3148 /* Try to do lookup via known virtual table pointer value. */
3149 if (!ie
->indirect_info
->vptr_changed
3150 || opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
))
3153 unsigned HOST_WIDE_INT offset
;
3154 tree scalar
= (jfunc
->type
== IPA_JF_CONST
) ? ipa_get_jf_constant (jfunc
)
3156 tree t
= ipa_find_agg_cst_for_param (&jfunc
->agg
, scalar
,
3157 ie
->indirect_info
->offset
,
3159 if (t
&& vtable_pointer_value_to_vtable (t
, &vtable
, &offset
))
3162 t
= gimple_get_virt_method_for_vtable (ie
->indirect_info
->otr_token
,
3163 vtable
, offset
, &can_refer
);
3167 || (TREE_CODE (TREE_TYPE (t
)) == FUNCTION_TYPE
3168 && DECL_FUNCTION_CODE (t
) == BUILT_IN_UNREACHABLE
)
3169 || !possible_polymorphic_call_target_p
3170 (ie
, cgraph_node::get (t
)))
3172 /* Do not speculate builtin_unreachable, it is stupid! */
3173 if (!ie
->indirect_info
->vptr_changed
)
3174 target
= ipa_impossible_devirt_target (ie
, target
);
3181 speculative
= ie
->indirect_info
->vptr_changed
;
3187 ipa_polymorphic_call_context
ie_context (ie
);
3188 vec
<cgraph_node
*>targets
;
3191 ctx
.offset_by (ie
->indirect_info
->offset
);
3192 if (ie
->indirect_info
->vptr_changed
)
3193 ctx
.possible_dynamic_type_change (ie
->in_polymorphic_cdtor
,
3194 ie
->indirect_info
->otr_type
);
3195 ctx
.combine_with (ie_context
, ie
->indirect_info
->otr_type
);
3196 targets
= possible_polymorphic_call_targets
3197 (ie
->indirect_info
->otr_type
,
3198 ie
->indirect_info
->otr_token
,
3200 if (final
&& targets
.length () <= 1)
3202 speculative
= false;
3203 if (targets
.length () == 1)
3204 target
= targets
[0]->decl
;
3206 target
= ipa_impossible_devirt_target (ie
, NULL_TREE
);
3208 else if (!target
&& opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
)
3209 && !ie
->speculative
&& ie
->maybe_hot_p ())
3212 n
= try_speculative_devirtualization (ie
->indirect_info
->otr_type
,
3213 ie
->indirect_info
->otr_token
,
3214 ie
->indirect_info
->context
);
3224 if (!possible_polymorphic_call_target_p
3225 (ie
, cgraph_node::get_create (target
)))
3229 target
= ipa_impossible_devirt_target (ie
, target
);
3231 return ipa_make_edge_direct_to_target (ie
, target
, speculative
);
3237 /* Update the param called notes associated with NODE when CS is being inlined,
3238 assuming NODE is (potentially indirectly) inlined into CS->callee.
3239 Moreover, if the callee is discovered to be constant, create a new cgraph
3240 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3241 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3244 update_indirect_edges_after_inlining (struct cgraph_edge
*cs
,
3245 struct cgraph_node
*node
,
3246 vec
<cgraph_edge
*> *new_edges
)
3248 struct ipa_edge_args
*top
;
3249 struct cgraph_edge
*ie
, *next_ie
, *new_direct_edge
;
3250 struct ipa_node_params
*new_root_info
;
3253 ipa_check_create_edge_args ();
3254 top
= IPA_EDGE_REF (cs
);
3255 new_root_info
= IPA_NODE_REF (cs
->caller
->global
.inlined_to
3256 ? cs
->caller
->global
.inlined_to
3259 for (ie
= node
->indirect_calls
; ie
; ie
= next_ie
)
3261 struct cgraph_indirect_call_info
*ici
= ie
->indirect_info
;
3262 struct ipa_jump_func
*jfunc
;
3264 cgraph_node
*spec_target
= NULL
;
3266 next_ie
= ie
->next_callee
;
3268 if (ici
->param_index
== -1)
3271 /* We must check range due to calls with variable number of arguments: */
3272 if (ici
->param_index
>= ipa_get_cs_argument_count (top
))
3274 ici
->param_index
= -1;
3278 param_index
= ici
->param_index
;
3279 jfunc
= ipa_get_ith_jump_func (top
, param_index
);
3281 if (ie
->speculative
)
3283 struct cgraph_edge
*de
;
3284 struct ipa_ref
*ref
;
3285 ie
->speculative_call_info (de
, ie
, ref
);
3286 spec_target
= de
->callee
;
3289 if (!opt_for_fn (node
->decl
, flag_indirect_inlining
))
3290 new_direct_edge
= NULL
;
3291 else if (ici
->polymorphic
)
3293 ipa_polymorphic_call_context ctx
;
3294 ctx
= ipa_context_from_jfunc (new_root_info
, cs
, param_index
, jfunc
);
3295 new_direct_edge
= try_make_edge_direct_virtual_call (ie
, jfunc
, ctx
);
3298 new_direct_edge
= try_make_edge_direct_simple_call (ie
, jfunc
,
3300 /* If speculation was removed, then we need to do nothing. */
3301 if (new_direct_edge
&& new_direct_edge
!= ie
3302 && new_direct_edge
->callee
== spec_target
)
3304 new_direct_edge
->indirect_inlining_edge
= 1;
3305 top
= IPA_EDGE_REF (cs
);
3307 if (!new_direct_edge
->speculative
)
3310 else if (new_direct_edge
)
3312 new_direct_edge
->indirect_inlining_edge
= 1;
3313 if (new_direct_edge
->call_stmt
)
3314 new_direct_edge
->call_stmt_cannot_inline_p
3315 = !gimple_check_call_matching_types (
3316 new_direct_edge
->call_stmt
,
3317 new_direct_edge
->callee
->decl
, false);
3320 new_edges
->safe_push (new_direct_edge
);
3323 top
= IPA_EDGE_REF (cs
);
3324 /* If speculative edge was introduced we still need to update
3325 call info of the indirect edge. */
3326 if (!new_direct_edge
->speculative
)
3329 if (jfunc
->type
== IPA_JF_PASS_THROUGH
3330 && ipa_get_jf_pass_through_operation (jfunc
) == NOP_EXPR
)
3332 if (ici
->agg_contents
3333 && !ipa_get_jf_pass_through_agg_preserved (jfunc
)
3334 && !ici
->polymorphic
)
3335 ici
->param_index
= -1;
3338 ici
->param_index
= ipa_get_jf_pass_through_formal_id (jfunc
);
3339 if (ici
->polymorphic
3340 && !ipa_get_jf_pass_through_type_preserved (jfunc
))
3341 ici
->vptr_changed
= true;
3344 else if (jfunc
->type
== IPA_JF_ANCESTOR
)
3346 if (ici
->agg_contents
3347 && !ipa_get_jf_ancestor_agg_preserved (jfunc
)
3348 && !ici
->polymorphic
)
3349 ici
->param_index
= -1;
3352 ici
->param_index
= ipa_get_jf_ancestor_formal_id (jfunc
);
3353 ici
->offset
+= ipa_get_jf_ancestor_offset (jfunc
);
3354 if (ici
->polymorphic
3355 && !ipa_get_jf_ancestor_type_preserved (jfunc
))
3356 ici
->vptr_changed
= true;
3360 /* Either we can find a destination for this edge now or never. */
3361 ici
->param_index
= -1;
3367 /* Recursively traverse subtree of NODE (including node) made of inlined
3368 cgraph_edges when CS has been inlined and invoke
3369 update_indirect_edges_after_inlining on all nodes and
3370 update_jump_functions_after_inlining on all non-inlined edges that lead out
3371 of this subtree. Newly discovered indirect edges will be added to
3372 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3376 propagate_info_to_inlined_callees (struct cgraph_edge
*cs
,
3377 struct cgraph_node
*node
,
3378 vec
<cgraph_edge
*> *new_edges
)
3380 struct cgraph_edge
*e
;
3383 res
= update_indirect_edges_after_inlining (cs
, node
, new_edges
);
3385 for (e
= node
->callees
; e
; e
= e
->next_callee
)
3386 if (!e
->inline_failed
)
3387 res
|= propagate_info_to_inlined_callees (cs
, e
->callee
, new_edges
);
3389 update_jump_functions_after_inlining (cs
, e
);
3390 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
3391 update_jump_functions_after_inlining (cs
, e
);
3396 /* Combine two controlled uses counts as done during inlining. */
3399 combine_controlled_uses_counters (int c
, int d
)
3401 if (c
== IPA_UNDESCRIBED_USE
|| d
== IPA_UNDESCRIBED_USE
)
3402 return IPA_UNDESCRIBED_USE
;
3407 /* Propagate number of controlled users from CS->caleee to the new root of the
3408 tree of inlined nodes. */
3411 propagate_controlled_uses (struct cgraph_edge
*cs
)
3413 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
3414 struct cgraph_node
*new_root
= cs
->caller
->global
.inlined_to
3415 ? cs
->caller
->global
.inlined_to
: cs
->caller
;
3416 struct ipa_node_params
*new_root_info
= IPA_NODE_REF (new_root
);
3417 struct ipa_node_params
*old_root_info
= IPA_NODE_REF (cs
->callee
);
3420 count
= MIN (ipa_get_cs_argument_count (args
),
3421 ipa_get_param_count (old_root_info
));
3422 for (i
= 0; i
< count
; i
++)
3424 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
3425 struct ipa_cst_ref_desc
*rdesc
;
3427 if (jf
->type
== IPA_JF_PASS_THROUGH
)
3430 src_idx
= ipa_get_jf_pass_through_formal_id (jf
);
3431 c
= ipa_get_controlled_uses (new_root_info
, src_idx
);
3432 d
= ipa_get_controlled_uses (old_root_info
, i
);
3434 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf
)
3435 == NOP_EXPR
|| c
== IPA_UNDESCRIBED_USE
);
3436 c
= combine_controlled_uses_counters (c
, d
);
3437 ipa_set_controlled_uses (new_root_info
, src_idx
, c
);
3438 if (c
== 0 && new_root_info
->ipcp_orig_node
)
3440 struct cgraph_node
*n
;
3441 struct ipa_ref
*ref
;
3442 tree t
= new_root_info
->known_csts
[src_idx
];
3444 if (t
&& TREE_CODE (t
) == ADDR_EXPR
3445 && TREE_CODE (TREE_OPERAND (t
, 0)) == FUNCTION_DECL
3446 && (n
= cgraph_node::get (TREE_OPERAND (t
, 0)))
3447 && (ref
= new_root
->find_reference (n
, NULL
, 0)))
3450 fprintf (dump_file
, "ipa-prop: Removing cloning-created "
3451 "reference from %s/%i to %s/%i.\n",
3452 xstrdup_for_dump (new_root
->name ()),
3454 xstrdup_for_dump (n
->name ()), n
->order
);
3455 ref
->remove_reference ();
3459 else if (jf
->type
== IPA_JF_CONST
3460 && (rdesc
= jfunc_rdesc_usable (jf
)))
3462 int d
= ipa_get_controlled_uses (old_root_info
, i
);
3463 int c
= rdesc
->refcount
;
3464 rdesc
->refcount
= combine_controlled_uses_counters (c
, d
);
3465 if (rdesc
->refcount
== 0)
3467 tree cst
= ipa_get_jf_constant (jf
);
3468 struct cgraph_node
*n
;
3469 gcc_checking_assert (TREE_CODE (cst
) == ADDR_EXPR
3470 && TREE_CODE (TREE_OPERAND (cst
, 0))
3472 n
= cgraph_node::get (TREE_OPERAND (cst
, 0));
3475 struct cgraph_node
*clone
;
3477 ok
= remove_described_reference (n
, rdesc
);
3478 gcc_checking_assert (ok
);
3481 while (clone
->global
.inlined_to
3482 && clone
!= rdesc
->cs
->caller
3483 && IPA_NODE_REF (clone
)->ipcp_orig_node
)
3485 struct ipa_ref
*ref
;
3486 ref
= clone
->find_reference (n
, NULL
, 0);
3490 fprintf (dump_file
, "ipa-prop: Removing "
3491 "cloning-created reference "
3492 "from %s/%i to %s/%i.\n",
3493 xstrdup_for_dump (clone
->name ()),
3495 xstrdup_for_dump (n
->name ()),
3497 ref
->remove_reference ();
3499 clone
= clone
->callers
->caller
;
3506 for (i
= ipa_get_param_count (old_root_info
);
3507 i
< ipa_get_cs_argument_count (args
);
3510 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
3512 if (jf
->type
== IPA_JF_CONST
)
3514 struct ipa_cst_ref_desc
*rdesc
= jfunc_rdesc_usable (jf
);
3516 rdesc
->refcount
= IPA_UNDESCRIBED_USE
;
3518 else if (jf
->type
== IPA_JF_PASS_THROUGH
)
3519 ipa_set_controlled_uses (new_root_info
,
3520 jf
->value
.pass_through
.formal_id
,
3521 IPA_UNDESCRIBED_USE
);
3525 /* Update jump functions and call note functions on inlining the call site CS.
3526 CS is expected to lead to a node already cloned by
3527 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3528 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3532 ipa_propagate_indirect_call_infos (struct cgraph_edge
*cs
,
3533 vec
<cgraph_edge
*> *new_edges
)
3536 /* Do nothing if the preparation phase has not been carried out yet
3537 (i.e. during early inlining). */
3538 if (!ipa_node_params_sum
)
3540 gcc_assert (ipa_edge_args_vector
);
3542 propagate_controlled_uses (cs
);
3543 changed
= propagate_info_to_inlined_callees (cs
, cs
->callee
, new_edges
);
3548 /* Frees all dynamically allocated structures that the argument info points
3552 ipa_free_edge_args_substructures (struct ipa_edge_args
*args
)
3554 vec_free (args
->jump_functions
);
3555 memset (args
, 0, sizeof (*args
));
3558 /* Free all ipa_edge structures. */
3561 ipa_free_all_edge_args (void)
3564 struct ipa_edge_args
*args
;
3566 if (!ipa_edge_args_vector
)
3569 FOR_EACH_VEC_ELT (*ipa_edge_args_vector
, i
, args
)
3570 ipa_free_edge_args_substructures (args
);
3572 vec_free (ipa_edge_args_vector
);
3575 /* Free all ipa_node_params structures. */
3578 ipa_free_all_node_params (void)
3580 ipa_node_params_sum
->release ();
3581 ipa_node_params_sum
= NULL
;
3584 /* Grow ipcp_transformations if necessary. */
3587 ipcp_grow_transformations_if_necessary (void)
3589 if (vec_safe_length (ipcp_transformations
)
3590 <= (unsigned) symtab
->cgraph_max_uid
)
3591 vec_safe_grow_cleared (ipcp_transformations
, symtab
->cgraph_max_uid
+ 1);
3594 /* Set the aggregate replacements of NODE to be AGGVALS. */
3597 ipa_set_node_agg_value_chain (struct cgraph_node
*node
,
3598 struct ipa_agg_replacement_value
*aggvals
)
3600 ipcp_grow_transformations_if_necessary ();
3601 (*ipcp_transformations
)[node
->uid
].agg_values
= aggvals
;
3604 /* Hook that is called by cgraph.c when an edge is removed. */
3607 ipa_edge_removal_hook (struct cgraph_edge
*cs
, void *data ATTRIBUTE_UNUSED
)
3609 struct ipa_edge_args
*args
;
3611 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3612 if (vec_safe_length (ipa_edge_args_vector
) <= (unsigned)cs
->uid
)
3615 args
= IPA_EDGE_REF (cs
);
3616 if (args
->jump_functions
)
3618 struct ipa_jump_func
*jf
;
3620 FOR_EACH_VEC_ELT (*args
->jump_functions
, i
, jf
)
3622 struct ipa_cst_ref_desc
*rdesc
;
3623 try_decrement_rdesc_refcount (jf
);
3624 if (jf
->type
== IPA_JF_CONST
3625 && (rdesc
= ipa_get_jf_constant_rdesc (jf
))
3631 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs
));
3634 /* Hook that is called by cgraph.c when an edge is duplicated. */
3637 ipa_edge_duplication_hook (struct cgraph_edge
*src
, struct cgraph_edge
*dst
,
3640 struct ipa_edge_args
*old_args
, *new_args
;
3643 ipa_check_create_edge_args ();
3645 old_args
= IPA_EDGE_REF (src
);
3646 new_args
= IPA_EDGE_REF (dst
);
3648 new_args
->jump_functions
= vec_safe_copy (old_args
->jump_functions
);
3649 if (old_args
->polymorphic_call_contexts
)
3650 new_args
->polymorphic_call_contexts
3651 = vec_safe_copy (old_args
->polymorphic_call_contexts
);
3653 for (i
= 0; i
< vec_safe_length (old_args
->jump_functions
); i
++)
3655 struct ipa_jump_func
*src_jf
= ipa_get_ith_jump_func (old_args
, i
);
3656 struct ipa_jump_func
*dst_jf
= ipa_get_ith_jump_func (new_args
, i
);
3658 dst_jf
->agg
.items
= vec_safe_copy (dst_jf
->agg
.items
);
3660 if (src_jf
->type
== IPA_JF_CONST
)
3662 struct ipa_cst_ref_desc
*src_rdesc
= jfunc_rdesc_usable (src_jf
);
3665 dst_jf
->value
.constant
.rdesc
= NULL
;
3666 else if (src
->caller
== dst
->caller
)
3668 struct ipa_ref
*ref
;
3669 symtab_node
*n
= cgraph_node_for_jfunc (src_jf
);
3670 gcc_checking_assert (n
);
3671 ref
= src
->caller
->find_reference (n
, src
->call_stmt
,
3673 gcc_checking_assert (ref
);
3674 dst
->caller
->clone_reference (ref
, ref
->stmt
);
3676 struct ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
3677 dst_rdesc
->cs
= dst
;
3678 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3679 dst_rdesc
->next_duplicate
= NULL
;
3680 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3682 else if (src_rdesc
->cs
== src
)
3684 struct ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
3685 dst_rdesc
->cs
= dst
;
3686 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3687 dst_rdesc
->next_duplicate
= src_rdesc
->next_duplicate
;
3688 src_rdesc
->next_duplicate
= dst_rdesc
;
3689 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3693 struct ipa_cst_ref_desc
*dst_rdesc
;
3694 /* This can happen during inlining, when a JFUNC can refer to a
3695 reference taken in a function up in the tree of inline clones.
3696 We need to find the duplicate that refers to our tree of
3699 gcc_assert (dst
->caller
->global
.inlined_to
);
3700 for (dst_rdesc
= src_rdesc
->next_duplicate
;
3702 dst_rdesc
= dst_rdesc
->next_duplicate
)
3704 struct cgraph_node
*top
;
3705 top
= dst_rdesc
->cs
->caller
->global
.inlined_to
3706 ? dst_rdesc
->cs
->caller
->global
.inlined_to
3707 : dst_rdesc
->cs
->caller
;
3708 if (dst
->caller
->global
.inlined_to
== top
)
3711 gcc_assert (dst_rdesc
);
3712 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3715 else if (dst_jf
->type
== IPA_JF_PASS_THROUGH
3716 && src
->caller
== dst
->caller
)
3718 struct cgraph_node
*inline_root
= dst
->caller
->global
.inlined_to
3719 ? dst
->caller
->global
.inlined_to
: dst
->caller
;
3720 struct ipa_node_params
*root_info
= IPA_NODE_REF (inline_root
);
3721 int idx
= ipa_get_jf_pass_through_formal_id (dst_jf
);
3723 int c
= ipa_get_controlled_uses (root_info
, idx
);
3724 if (c
!= IPA_UNDESCRIBED_USE
)
3727 ipa_set_controlled_uses (root_info
, idx
, c
);
3733 /* Analyze newly added function into callgraph. */
3736 ipa_add_new_function (cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
3738 if (node
->has_gimple_body_p ())
3739 ipa_analyze_node (node
);
3742 /* Hook that is called by summary when a node is duplicated. */
3745 ipa_node_params_t::duplicate(cgraph_node
*src
, cgraph_node
*dst
,
3746 ipa_node_params
*old_info
,
3747 ipa_node_params
*new_info
)
3749 ipa_agg_replacement_value
*old_av
, *new_av
;
3751 new_info
->descriptors
= vec_safe_copy (old_info
->descriptors
);
3752 new_info
->lattices
= NULL
;
3753 new_info
->ipcp_orig_node
= old_info
->ipcp_orig_node
;
3754 new_info
->known_csts
= old_info
->known_csts
.copy ();
3755 new_info
->known_contexts
= old_info
->known_contexts
.copy ();
3757 new_info
->analysis_done
= old_info
->analysis_done
;
3758 new_info
->node_enqueued
= old_info
->node_enqueued
;
3759 new_info
->versionable
= old_info
->versionable
;
3761 old_av
= ipa_get_agg_replacements_for_node (src
);
3767 struct ipa_agg_replacement_value
*v
;
3769 v
= ggc_alloc
<ipa_agg_replacement_value
> ();
3770 memcpy (v
, old_av
, sizeof (*v
));
3773 old_av
= old_av
->next
;
3775 ipa_set_node_agg_value_chain (dst
, new_av
);
3778 ipcp_transformation_summary
*src_trans
= ipcp_get_transformation_summary (src
);
3782 ipcp_grow_transformations_if_necessary ();
3783 src_trans
= ipcp_get_transformation_summary (src
);
3784 const vec
<ipa_vr
, va_gc
> *src_vr
= src_trans
->m_vr
;
3785 vec
<ipa_vr
, va_gc
> *&dst_vr
3786 = ipcp_get_transformation_summary (dst
)->m_vr
;
3787 if (vec_safe_length (src_trans
->m_vr
) > 0)
3789 vec_safe_reserve_exact (dst_vr
, src_vr
->length ());
3790 for (unsigned i
= 0; i
< src_vr
->length (); ++i
)
3791 dst_vr
->quick_push ((*src_vr
)[i
]);
3795 if (src_trans
&& vec_safe_length (src_trans
->bits
) > 0)
3797 ipcp_grow_transformations_if_necessary ();
3798 src_trans
= ipcp_get_transformation_summary (src
);
3799 const vec
<ipa_bits
, va_gc
> *src_bits
= src_trans
->bits
;
3800 vec
<ipa_bits
, va_gc
> *&dst_bits
3801 = ipcp_get_transformation_summary (dst
)->bits
;
3802 vec_safe_reserve_exact (dst_bits
, src_bits
->length ());
3803 for (unsigned i
= 0; i
< src_bits
->length (); ++i
)
3804 dst_bits
->quick_push ((*src_bits
)[i
]);
3808 /* Register our cgraph hooks if they are not already there. */
3811 ipa_register_cgraph_hooks (void)
3813 ipa_check_create_node_params ();
3815 if (!edge_removal_hook_holder
)
3816 edge_removal_hook_holder
=
3817 symtab
->add_edge_removal_hook (&ipa_edge_removal_hook
, NULL
);
3818 if (!edge_duplication_hook_holder
)
3819 edge_duplication_hook_holder
=
3820 symtab
->add_edge_duplication_hook (&ipa_edge_duplication_hook
, NULL
);
3821 function_insertion_hook_holder
=
3822 symtab
->add_cgraph_insertion_hook (&ipa_add_new_function
, NULL
);
3825 /* Unregister our cgraph hooks if they are not already there. */
3828 ipa_unregister_cgraph_hooks (void)
3830 symtab
->remove_edge_removal_hook (edge_removal_hook_holder
);
3831 edge_removal_hook_holder
= NULL
;
3832 symtab
->remove_edge_duplication_hook (edge_duplication_hook_holder
);
3833 edge_duplication_hook_holder
= NULL
;
3834 symtab
->remove_cgraph_insertion_hook (function_insertion_hook_holder
);
3835 function_insertion_hook_holder
= NULL
;
3838 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3839 longer needed after ipa-cp. */
3842 ipa_free_all_structures_after_ipa_cp (void)
3844 if (!optimize
&& !in_lto_p
)
3846 ipa_free_all_edge_args ();
3847 ipa_free_all_node_params ();
3848 ipcp_sources_pool
.release ();
3849 ipcp_cst_values_pool
.release ();
3850 ipcp_poly_ctx_values_pool
.release ();
3851 ipcp_agg_lattice_pool
.release ();
3852 ipa_unregister_cgraph_hooks ();
3853 ipa_refdesc_pool
.release ();
3857 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3858 longer needed after indirect inlining. */
3861 ipa_free_all_structures_after_iinln (void)
3863 ipa_free_all_edge_args ();
3864 ipa_free_all_node_params ();
3865 ipa_unregister_cgraph_hooks ();
3866 ipcp_sources_pool
.release ();
3867 ipcp_cst_values_pool
.release ();
3868 ipcp_poly_ctx_values_pool
.release ();
3869 ipcp_agg_lattice_pool
.release ();
3870 ipa_refdesc_pool
.release ();
3873 /* Print ipa_tree_map data structures of all functions in the
3877 ipa_print_node_params (FILE *f
, struct cgraph_node
*node
)
3880 struct ipa_node_params
*info
;
3882 if (!node
->definition
)
3884 info
= IPA_NODE_REF (node
);
3885 fprintf (f
, " function %s/%i parameter descriptors:\n",
3886 node
->name (), node
->order
);
3887 count
= ipa_get_param_count (info
);
3888 for (i
= 0; i
< count
; i
++)
3893 ipa_dump_param (f
, info
, i
);
3894 if (ipa_is_param_used (info
, i
))
3895 fprintf (f
, " used");
3896 c
= ipa_get_controlled_uses (info
, i
);
3897 if (c
== IPA_UNDESCRIBED_USE
)
3898 fprintf (f
, " undescribed_use");
3900 fprintf (f
, " controlled_uses=%i", c
);
3905 /* Print ipa_tree_map data structures of all functions in the
3909 ipa_print_all_params (FILE * f
)
3911 struct cgraph_node
*node
;
3913 fprintf (f
, "\nFunction parameters:\n");
3914 FOR_EACH_FUNCTION (node
)
3915 ipa_print_node_params (f
, node
);
3918 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3921 ipa_get_vector_of_formal_parms (tree fndecl
)
3927 gcc_assert (!flag_wpa
);
3928 count
= count_formal_params (fndecl
);
3929 args
.create (count
);
3930 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
3931 args
.quick_push (parm
);
3936 /* Return a heap allocated vector containing types of formal parameters of
3937 function type FNTYPE. */
3940 ipa_get_vector_of_formal_parm_types (tree fntype
)
3946 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
3949 types
.create (count
);
3950 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
3951 types
.quick_push (TREE_VALUE (t
));
3956 /* Modify the function declaration FNDECL and its type according to the plan in
3957 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3958 to reflect the actual parameters being modified which are determined by the
3959 base_index field. */
3962 ipa_modify_formal_parameters (tree fndecl
, ipa_parm_adjustment_vec adjustments
)
3964 vec
<tree
> oparms
= ipa_get_vector_of_formal_parms (fndecl
);
3965 tree orig_type
= TREE_TYPE (fndecl
);
3966 tree old_arg_types
= TYPE_ARG_TYPES (orig_type
);
3968 /* The following test is an ugly hack, some functions simply don't have any
3969 arguments in their type. This is probably a bug but well... */
3970 bool care_for_types
= (old_arg_types
!= NULL_TREE
);
3971 bool last_parm_void
;
3975 last_parm_void
= (TREE_VALUE (tree_last (old_arg_types
))
3977 otypes
= ipa_get_vector_of_formal_parm_types (orig_type
);
3979 gcc_assert (oparms
.length () + 1 == otypes
.length ());
3981 gcc_assert (oparms
.length () == otypes
.length ());
3985 last_parm_void
= false;
3989 int len
= adjustments
.length ();
3990 tree
*link
= &DECL_ARGUMENTS (fndecl
);
3991 tree new_arg_types
= NULL
;
3992 for (int i
= 0; i
< len
; i
++)
3994 struct ipa_parm_adjustment
*adj
;
3997 adj
= &adjustments
[i
];
3999 if (adj
->op
== IPA_PARM_OP_NEW
)
4002 parm
= oparms
[adj
->base_index
];
4005 if (adj
->op
== IPA_PARM_OP_COPY
)
4008 new_arg_types
= tree_cons (NULL_TREE
, otypes
[adj
->base_index
],
4011 link
= &DECL_CHAIN (parm
);
4013 else if (adj
->op
!= IPA_PARM_OP_REMOVE
)
4019 ptype
= build_pointer_type (adj
->type
);
4023 if (is_gimple_reg_type (ptype
))
4025 unsigned malign
= GET_MODE_ALIGNMENT (TYPE_MODE (ptype
));
4026 if (TYPE_ALIGN (ptype
) != malign
)
4027 ptype
= build_aligned_type (ptype
, malign
);
4032 new_arg_types
= tree_cons (NULL_TREE
, ptype
, new_arg_types
);
4034 new_parm
= build_decl (UNKNOWN_LOCATION
, PARM_DECL
, NULL_TREE
,
4036 const char *prefix
= adj
->arg_prefix
? adj
->arg_prefix
: "SYNTH";
4037 DECL_NAME (new_parm
) = create_tmp_var_name (prefix
);
4038 DECL_ARTIFICIAL (new_parm
) = 1;
4039 DECL_ARG_TYPE (new_parm
) = ptype
;
4040 DECL_CONTEXT (new_parm
) = fndecl
;
4041 TREE_USED (new_parm
) = 1;
4042 DECL_IGNORED_P (new_parm
) = 1;
4043 layout_decl (new_parm
, 0);
4045 if (adj
->op
== IPA_PARM_OP_NEW
)
4049 adj
->new_decl
= new_parm
;
4052 link
= &DECL_CHAIN (new_parm
);
4058 tree new_reversed
= NULL
;
4061 new_reversed
= nreverse (new_arg_types
);
4065 TREE_CHAIN (new_arg_types
) = void_list_node
;
4067 new_reversed
= void_list_node
;
4071 /* Use copy_node to preserve as much as possible from original type
4072 (debug info, attribute lists etc.)
4073 Exception is METHOD_TYPEs must have THIS argument.
4074 When we are asked to remove it, we need to build new FUNCTION_TYPE
4076 tree new_type
= NULL
;
4077 if (TREE_CODE (orig_type
) != METHOD_TYPE
4078 || (adjustments
[0].op
== IPA_PARM_OP_COPY
4079 && adjustments
[0].base_index
== 0))
4081 new_type
= build_distinct_type_copy (orig_type
);
4082 TYPE_ARG_TYPES (new_type
) = new_reversed
;
4087 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type
),
4089 TYPE_CONTEXT (new_type
) = TYPE_CONTEXT (orig_type
);
4090 DECL_VINDEX (fndecl
) = NULL_TREE
;
4093 /* When signature changes, we need to clear builtin info. */
4094 if (DECL_BUILT_IN (fndecl
))
4096 DECL_BUILT_IN_CLASS (fndecl
) = NOT_BUILT_IN
;
4097 DECL_FUNCTION_CODE (fndecl
) = (enum built_in_function
) 0;
4100 TREE_TYPE (fndecl
) = new_type
;
4101 DECL_VIRTUAL_P (fndecl
) = 0;
4102 DECL_LANG_SPECIFIC (fndecl
) = NULL
;
4107 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
4108 If this is a directly recursive call, CS must be NULL. Otherwise it must
4109 contain the corresponding call graph edge. */
4112 ipa_modify_call_arguments (struct cgraph_edge
*cs
, gcall
*stmt
,
4113 ipa_parm_adjustment_vec adjustments
)
4115 struct cgraph_node
*current_node
= cgraph_node::get (current_function_decl
);
4117 vec
<tree
, va_gc
> **debug_args
= NULL
;
4119 gimple_stmt_iterator gsi
, prev_gsi
;
4123 len
= adjustments
.length ();
4125 callee_decl
= !cs
? gimple_call_fndecl (stmt
) : cs
->callee
->decl
;
4126 current_node
->remove_stmt_references (stmt
);
4128 gsi
= gsi_for_stmt (stmt
);
4130 gsi_prev (&prev_gsi
);
4131 for (i
= 0; i
< len
; i
++)
4133 struct ipa_parm_adjustment
*adj
;
4135 adj
= &adjustments
[i
];
4137 if (adj
->op
== IPA_PARM_OP_COPY
)
4139 tree arg
= gimple_call_arg (stmt
, adj
->base_index
);
4141 vargs
.quick_push (arg
);
4143 else if (adj
->op
!= IPA_PARM_OP_REMOVE
)
4145 tree expr
, base
, off
;
4147 unsigned int deref_align
= 0;
4148 bool deref_base
= false;
4150 /* We create a new parameter out of the value of the old one, we can
4151 do the following kind of transformations:
4153 - A scalar passed by reference is converted to a scalar passed by
4154 value. (adj->by_ref is false and the type of the original
4155 actual argument is a pointer to a scalar).
4157 - A part of an aggregate is passed instead of the whole aggregate.
4158 The part can be passed either by value or by reference, this is
4159 determined by value of adj->by_ref. Moreover, the code below
4160 handles both situations when the original aggregate is passed by
4161 value (its type is not a pointer) and when it is passed by
4162 reference (it is a pointer to an aggregate).
4164 When the new argument is passed by reference (adj->by_ref is true)
4165 it must be a part of an aggregate and therefore we form it by
4166 simply taking the address of a reference inside the original
4169 gcc_checking_assert (adj
->offset
% BITS_PER_UNIT
== 0);
4170 base
= gimple_call_arg (stmt
, adj
->base_index
);
4171 loc
= DECL_P (base
) ? DECL_SOURCE_LOCATION (base
)
4172 : EXPR_LOCATION (base
);
4174 if (TREE_CODE (base
) != ADDR_EXPR
4175 && POINTER_TYPE_P (TREE_TYPE (base
)))
4176 off
= build_int_cst (adj
->alias_ptr_type
,
4177 adj
->offset
/ BITS_PER_UNIT
);
4180 HOST_WIDE_INT base_offset
;
4184 if (TREE_CODE (base
) == ADDR_EXPR
)
4186 base
= TREE_OPERAND (base
, 0);
4192 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
4193 /* Aggregate arguments can have non-invariant addresses. */
4196 base
= build_fold_addr_expr (prev_base
);
4197 off
= build_int_cst (adj
->alias_ptr_type
,
4198 adj
->offset
/ BITS_PER_UNIT
);
4200 else if (TREE_CODE (base
) == MEM_REF
)
4205 deref_align
= TYPE_ALIGN (TREE_TYPE (base
));
4207 off
= build_int_cst (adj
->alias_ptr_type
,
4209 + adj
->offset
/ BITS_PER_UNIT
);
4210 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1),
4212 base
= TREE_OPERAND (base
, 0);
4216 off
= build_int_cst (adj
->alias_ptr_type
,
4218 + adj
->offset
/ BITS_PER_UNIT
);
4219 base
= build_fold_addr_expr (base
);
4225 tree type
= adj
->type
;
4227 unsigned HOST_WIDE_INT misalign
;
4231 align
= deref_align
;
4236 get_pointer_alignment_1 (base
, &align
, &misalign
);
4237 if (TYPE_ALIGN (type
) > align
)
4238 align
= TYPE_ALIGN (type
);
4240 misalign
+= (offset_int::from (off
, SIGNED
).to_short_addr ()
4242 misalign
= misalign
& (align
- 1);
4244 align
= least_bit_hwi (misalign
);
4245 if (align
< TYPE_ALIGN (type
))
4246 type
= build_aligned_type (type
, align
);
4247 base
= force_gimple_operand_gsi (&gsi
, base
,
4248 true, NULL
, true, GSI_SAME_STMT
);
4249 expr
= fold_build2_loc (loc
, MEM_REF
, type
, base
, off
);
4250 REF_REVERSE_STORAGE_ORDER (expr
) = adj
->reverse
;
4251 /* If expr is not a valid gimple call argument emit
4252 a load into a temporary. */
4253 if (is_gimple_reg_type (TREE_TYPE (expr
)))
4255 gimple
*tem
= gimple_build_assign (NULL_TREE
, expr
);
4256 if (gimple_in_ssa_p (cfun
))
4258 gimple_set_vuse (tem
, gimple_vuse (stmt
));
4259 expr
= make_ssa_name (TREE_TYPE (expr
), tem
);
4262 expr
= create_tmp_reg (TREE_TYPE (expr
));
4263 gimple_assign_set_lhs (tem
, expr
);
4264 gsi_insert_before (&gsi
, tem
, GSI_SAME_STMT
);
4269 expr
= fold_build2_loc (loc
, MEM_REF
, adj
->type
, base
, off
);
4270 REF_REVERSE_STORAGE_ORDER (expr
) = adj
->reverse
;
4271 expr
= build_fold_addr_expr (expr
);
4272 expr
= force_gimple_operand_gsi (&gsi
, expr
,
4273 true, NULL
, true, GSI_SAME_STMT
);
4275 vargs
.quick_push (expr
);
4277 if (adj
->op
!= IPA_PARM_OP_COPY
&& MAY_HAVE_DEBUG_STMTS
)
4280 tree ddecl
= NULL_TREE
, origin
= DECL_ORIGIN (adj
->base
), arg
;
4283 arg
= gimple_call_arg (stmt
, adj
->base_index
);
4284 if (!useless_type_conversion_p (TREE_TYPE (origin
), TREE_TYPE (arg
)))
4286 if (!fold_convertible_p (TREE_TYPE (origin
), arg
))
4288 arg
= fold_convert_loc (gimple_location (stmt
),
4289 TREE_TYPE (origin
), arg
);
4291 if (debug_args
== NULL
)
4292 debug_args
= decl_debug_args_insert (callee_decl
);
4293 for (ix
= 0; vec_safe_iterate (*debug_args
, ix
, &ddecl
); ix
+= 2)
4294 if (ddecl
== origin
)
4296 ddecl
= (**debug_args
)[ix
+ 1];
4301 ddecl
= make_node (DEBUG_EXPR_DECL
);
4302 DECL_ARTIFICIAL (ddecl
) = 1;
4303 TREE_TYPE (ddecl
) = TREE_TYPE (origin
);
4304 SET_DECL_MODE (ddecl
, DECL_MODE (origin
));
4306 vec_safe_push (*debug_args
, origin
);
4307 vec_safe_push (*debug_args
, ddecl
);
4309 def_temp
= gimple_build_debug_bind (ddecl
, unshare_expr (arg
), stmt
);
4310 gsi_insert_before (&gsi
, def_temp
, GSI_SAME_STMT
);
4314 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4316 fprintf (dump_file
, "replacing stmt:");
4317 print_gimple_stmt (dump_file
, gsi_stmt (gsi
), 0, 0);
4320 new_stmt
= gimple_build_call_vec (callee_decl
, vargs
);
4322 if (gimple_call_lhs (stmt
))
4323 gimple_call_set_lhs (new_stmt
, gimple_call_lhs (stmt
));
4325 gimple_set_block (new_stmt
, gimple_block (stmt
));
4326 if (gimple_has_location (stmt
))
4327 gimple_set_location (new_stmt
, gimple_location (stmt
));
4328 gimple_call_set_chain (new_stmt
, gimple_call_chain (stmt
));
4329 gimple_call_copy_flags (new_stmt
, stmt
);
4330 if (gimple_in_ssa_p (cfun
))
4332 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
4333 if (gimple_vdef (stmt
))
4335 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
4336 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
4340 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4342 fprintf (dump_file
, "with stmt:");
4343 print_gimple_stmt (dump_file
, new_stmt
, 0, 0);
4344 fprintf (dump_file
, "\n");
4346 gsi_replace (&gsi
, new_stmt
, true);
4348 cs
->set_call_stmt (new_stmt
);
4351 current_node
->record_stmt_references (gsi_stmt (gsi
));
4354 while (gsi_stmt (gsi
) != gsi_stmt (prev_gsi
));
4357 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4358 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4359 specifies whether the function should care about type incompatibility the
4360 current and new expressions. If it is false, the function will leave
4361 incompatibility issues to the caller. Return true iff the expression
4365 ipa_modify_expr (tree
*expr
, bool convert
,
4366 ipa_parm_adjustment_vec adjustments
)
4368 struct ipa_parm_adjustment
*cand
4369 = ipa_get_adjustment_candidate (&expr
, &convert
, adjustments
, false);
4376 src
= build_simple_mem_ref (cand
->new_decl
);
4377 REF_REVERSE_STORAGE_ORDER (src
) = cand
->reverse
;
4380 src
= cand
->new_decl
;
4382 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4384 fprintf (dump_file
, "About to replace expr ");
4385 print_generic_expr (dump_file
, *expr
, 0);
4386 fprintf (dump_file
, " with ");
4387 print_generic_expr (dump_file
, src
, 0);
4388 fprintf (dump_file
, "\n");
4391 if (convert
&& !useless_type_conversion_p (TREE_TYPE (*expr
), cand
->type
))
4393 tree vce
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (*expr
), src
);
4401 /* If T is an SSA_NAME, return NULL if it is not a default def or
4402 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4403 the base variable is always returned, regardless if it is a default
4404 def. Return T if it is not an SSA_NAME. */
4407 get_ssa_base_param (tree t
, bool ignore_default_def
)
4409 if (TREE_CODE (t
) == SSA_NAME
)
4411 if (ignore_default_def
|| SSA_NAME_IS_DEFAULT_DEF (t
))
4412 return SSA_NAME_VAR (t
);
4419 /* Given an expression, return an adjustment entry specifying the
4420 transformation to be done on EXPR. If no suitable adjustment entry
4421 was found, returns NULL.
4423 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4424 default def, otherwise bail on them.
4426 If CONVERT is non-NULL, this function will set *CONVERT if the
4427 expression provided is a component reference. ADJUSTMENTS is the
4428 adjustments vector. */
4430 ipa_parm_adjustment
*
4431 ipa_get_adjustment_candidate (tree
**expr
, bool *convert
,
4432 ipa_parm_adjustment_vec adjustments
,
4433 bool ignore_default_def
)
4435 if (TREE_CODE (**expr
) == BIT_FIELD_REF
4436 || TREE_CODE (**expr
) == IMAGPART_EXPR
4437 || TREE_CODE (**expr
) == REALPART_EXPR
)
4439 *expr
= &TREE_OPERAND (**expr
, 0);
4444 HOST_WIDE_INT offset
, size
, max_size
;
4447 = get_ref_base_and_extent (**expr
, &offset
, &size
, &max_size
, &reverse
);
4448 if (!base
|| size
== -1 || max_size
== -1)
4451 if (TREE_CODE (base
) == MEM_REF
)
4453 offset
+= mem_ref_offset (base
).to_short_addr () * BITS_PER_UNIT
;
4454 base
= TREE_OPERAND (base
, 0);
4457 base
= get_ssa_base_param (base
, ignore_default_def
);
4458 if (!base
|| TREE_CODE (base
) != PARM_DECL
)
4461 struct ipa_parm_adjustment
*cand
= NULL
;
4462 unsigned int len
= adjustments
.length ();
4463 for (unsigned i
= 0; i
< len
; i
++)
4465 struct ipa_parm_adjustment
*adj
= &adjustments
[i
];
4467 if (adj
->base
== base
4468 && (adj
->offset
== offset
|| adj
->op
== IPA_PARM_OP_REMOVE
))
4475 if (!cand
|| cand
->op
== IPA_PARM_OP_COPY
|| cand
->op
== IPA_PARM_OP_REMOVE
)
4480 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4483 index_in_adjustments_multiple_times_p (int base_index
,
4484 ipa_parm_adjustment_vec adjustments
)
4486 int i
, len
= adjustments
.length ();
4489 for (i
= 0; i
< len
; i
++)
4491 struct ipa_parm_adjustment
*adj
;
4492 adj
= &adjustments
[i
];
4494 if (adj
->base_index
== base_index
)
4506 /* Return adjustments that should have the same effect on function parameters
4507 and call arguments as if they were first changed according to adjustments in
4508 INNER and then by adjustments in OUTER. */
4510 ipa_parm_adjustment_vec
4511 ipa_combine_adjustments (ipa_parm_adjustment_vec inner
,
4512 ipa_parm_adjustment_vec outer
)
4514 int i
, outlen
= outer
.length ();
4515 int inlen
= inner
.length ();
4517 ipa_parm_adjustment_vec adjustments
, tmp
;
4520 for (i
= 0; i
< inlen
; i
++)
4522 struct ipa_parm_adjustment
*n
;
4525 if (n
->op
== IPA_PARM_OP_REMOVE
)
4529 /* FIXME: Handling of new arguments are not implemented yet. */
4530 gcc_assert (n
->op
!= IPA_PARM_OP_NEW
);
4531 tmp
.quick_push (*n
);
4535 adjustments
.create (outlen
+ removals
);
4536 for (i
= 0; i
< outlen
; i
++)
4538 struct ipa_parm_adjustment r
;
4539 struct ipa_parm_adjustment
*out
= &outer
[i
];
4540 struct ipa_parm_adjustment
*in
= &tmp
[out
->base_index
];
4542 memset (&r
, 0, sizeof (r
));
4543 gcc_assert (in
->op
!= IPA_PARM_OP_REMOVE
);
4544 if (out
->op
== IPA_PARM_OP_REMOVE
)
4546 if (!index_in_adjustments_multiple_times_p (in
->base_index
, tmp
))
4548 r
.op
= IPA_PARM_OP_REMOVE
;
4549 adjustments
.quick_push (r
);
4555 /* FIXME: Handling of new arguments are not implemented yet. */
4556 gcc_assert (out
->op
!= IPA_PARM_OP_NEW
);
4559 r
.base_index
= in
->base_index
;
4562 /* FIXME: Create nonlocal value too. */
4564 if (in
->op
== IPA_PARM_OP_COPY
&& out
->op
== IPA_PARM_OP_COPY
)
4565 r
.op
= IPA_PARM_OP_COPY
;
4566 else if (in
->op
== IPA_PARM_OP_COPY
)
4567 r
.offset
= out
->offset
;
4568 else if (out
->op
== IPA_PARM_OP_COPY
)
4569 r
.offset
= in
->offset
;
4571 r
.offset
= in
->offset
+ out
->offset
;
4572 adjustments
.quick_push (r
);
4575 for (i
= 0; i
< inlen
; i
++)
4577 struct ipa_parm_adjustment
*n
= &inner
[i
];
4579 if (n
->op
== IPA_PARM_OP_REMOVE
)
4580 adjustments
.quick_push (*n
);
4587 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4588 friendly way, assuming they are meant to be applied to FNDECL. */
4591 ipa_dump_param_adjustments (FILE *file
, ipa_parm_adjustment_vec adjustments
,
4594 int i
, len
= adjustments
.length ();
4596 vec
<tree
> parms
= ipa_get_vector_of_formal_parms (fndecl
);
4598 fprintf (file
, "IPA param adjustments: ");
4599 for (i
= 0; i
< len
; i
++)
4601 struct ipa_parm_adjustment
*adj
;
4602 adj
= &adjustments
[i
];
4605 fprintf (file
, " ");
4609 fprintf (file
, "%i. base_index: %i - ", i
, adj
->base_index
);
4610 print_generic_expr (file
, parms
[adj
->base_index
], 0);
4613 fprintf (file
, ", base: ");
4614 print_generic_expr (file
, adj
->base
, 0);
4618 fprintf (file
, ", new_decl: ");
4619 print_generic_expr (file
, adj
->new_decl
, 0);
4621 if (adj
->new_ssa_base
)
4623 fprintf (file
, ", new_ssa_base: ");
4624 print_generic_expr (file
, adj
->new_ssa_base
, 0);
4627 if (adj
->op
== IPA_PARM_OP_COPY
)
4628 fprintf (file
, ", copy_param");
4629 else if (adj
->op
== IPA_PARM_OP_REMOVE
)
4630 fprintf (file
, ", remove_param");
4632 fprintf (file
, ", offset %li", (long) adj
->offset
);
4634 fprintf (file
, ", by_ref");
4635 print_node_brief (file
, ", type: ", adj
->type
, 0);
4636 fprintf (file
, "\n");
4641 /* Dump the AV linked list. */
4644 ipa_dump_agg_replacement_values (FILE *f
, struct ipa_agg_replacement_value
*av
)
4647 fprintf (f
, " Aggregate replacements:");
4648 for (; av
; av
= av
->next
)
4650 fprintf (f
, "%s %i[" HOST_WIDE_INT_PRINT_DEC
"]=", comma
? "," : "",
4651 av
->index
, av
->offset
);
4652 print_generic_expr (f
, av
->value
, 0);
4658 /* Stream out jump function JUMP_FUNC to OB. */
4661 ipa_write_jump_function (struct output_block
*ob
,
4662 struct ipa_jump_func
*jump_func
)
4664 struct ipa_agg_jf_item
*item
;
4665 struct bitpack_d bp
;
4668 streamer_write_uhwi (ob
, jump_func
->type
);
4669 switch (jump_func
->type
)
4671 case IPA_JF_UNKNOWN
:
4675 EXPR_LOCATION (jump_func
->value
.constant
.value
) == UNKNOWN_LOCATION
);
4676 stream_write_tree (ob
, jump_func
->value
.constant
.value
, true);
4678 case IPA_JF_PASS_THROUGH
:
4679 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.operation
);
4680 if (jump_func
->value
.pass_through
.operation
== NOP_EXPR
)
4682 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4683 bp
= bitpack_create (ob
->main_stream
);
4684 bp_pack_value (&bp
, jump_func
->value
.pass_through
.agg_preserved
, 1);
4685 streamer_write_bitpack (&bp
);
4687 else if (TREE_CODE_CLASS (jump_func
->value
.pass_through
.operation
)
4689 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4692 stream_write_tree (ob
, jump_func
->value
.pass_through
.operand
, true);
4693 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4696 case IPA_JF_ANCESTOR
:
4697 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.offset
);
4698 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.formal_id
);
4699 bp
= bitpack_create (ob
->main_stream
);
4700 bp_pack_value (&bp
, jump_func
->value
.ancestor
.agg_preserved
, 1);
4701 streamer_write_bitpack (&bp
);
4705 count
= vec_safe_length (jump_func
->agg
.items
);
4706 streamer_write_uhwi (ob
, count
);
4709 bp
= bitpack_create (ob
->main_stream
);
4710 bp_pack_value (&bp
, jump_func
->agg
.by_ref
, 1);
4711 streamer_write_bitpack (&bp
);
4714 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, i
, item
)
4716 streamer_write_uhwi (ob
, item
->offset
);
4717 stream_write_tree (ob
, item
->value
, true);
4720 bp
= bitpack_create (ob
->main_stream
);
4721 bp_pack_value (&bp
, jump_func
->bits
.known
, 1);
4722 streamer_write_bitpack (&bp
);
4723 if (jump_func
->bits
.known
)
4725 streamer_write_widest_int (ob
, jump_func
->bits
.value
);
4726 streamer_write_widest_int (ob
, jump_func
->bits
.mask
);
4728 bp_pack_value (&bp
, jump_func
->vr_known
, 1);
4729 streamer_write_bitpack (&bp
);
4730 if (jump_func
->vr_known
)
4732 streamer_write_enum (ob
->main_stream
, value_rang_type
,
4733 VR_LAST
, jump_func
->m_vr
.type
);
4734 stream_write_tree (ob
, jump_func
->m_vr
.min
, true);
4735 stream_write_tree (ob
, jump_func
->m_vr
.max
, true);
4739 /* Read in jump function JUMP_FUNC from IB. */
4742 ipa_read_jump_function (struct lto_input_block
*ib
,
4743 struct ipa_jump_func
*jump_func
,
4744 struct cgraph_edge
*cs
,
4745 struct data_in
*data_in
)
4747 enum jump_func_type jftype
;
4748 enum tree_code operation
;
4751 jftype
= (enum jump_func_type
) streamer_read_uhwi (ib
);
4754 case IPA_JF_UNKNOWN
:
4755 ipa_set_jf_unknown (jump_func
);
4758 ipa_set_jf_constant (jump_func
, stream_read_tree (ib
, data_in
), cs
);
4760 case IPA_JF_PASS_THROUGH
:
4761 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
4762 if (operation
== NOP_EXPR
)
4764 int formal_id
= streamer_read_uhwi (ib
);
4765 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4766 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4767 ipa_set_jf_simple_pass_through (jump_func
, formal_id
, agg_preserved
);
4769 else if (TREE_CODE_CLASS (operation
) == tcc_unary
)
4771 int formal_id
= streamer_read_uhwi (ib
);
4772 ipa_set_jf_unary_pass_through (jump_func
, formal_id
, operation
);
4776 tree operand
= stream_read_tree (ib
, data_in
);
4777 int formal_id
= streamer_read_uhwi (ib
);
4778 ipa_set_jf_arith_pass_through (jump_func
, formal_id
, operand
,
4782 case IPA_JF_ANCESTOR
:
4784 HOST_WIDE_INT offset
= streamer_read_uhwi (ib
);
4785 int formal_id
= streamer_read_uhwi (ib
);
4786 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4787 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4788 ipa_set_ancestor_jf (jump_func
, offset
, formal_id
, agg_preserved
);
4793 count
= streamer_read_uhwi (ib
);
4794 vec_alloc (jump_func
->agg
.items
, count
);
4797 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4798 jump_func
->agg
.by_ref
= bp_unpack_value (&bp
, 1);
4800 for (i
= 0; i
< count
; i
++)
4802 struct ipa_agg_jf_item item
;
4803 item
.offset
= streamer_read_uhwi (ib
);
4804 item
.value
= stream_read_tree (ib
, data_in
);
4805 jump_func
->agg
.items
->quick_push (item
);
4808 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4809 bool bits_known
= bp_unpack_value (&bp
, 1);
4812 jump_func
->bits
.known
= true;
4813 jump_func
->bits
.value
= streamer_read_widest_int (ib
);
4814 jump_func
->bits
.mask
= streamer_read_widest_int (ib
);
4817 jump_func
->bits
.known
= false;
4819 struct bitpack_d vr_bp
= streamer_read_bitpack (ib
);
4820 bool vr_known
= bp_unpack_value (&vr_bp
, 1);
4823 jump_func
->vr_known
= true;
4824 jump_func
->m_vr
.type
= streamer_read_enum (ib
,
4827 jump_func
->m_vr
.min
= stream_read_tree (ib
, data_in
);
4828 jump_func
->m_vr
.max
= stream_read_tree (ib
, data_in
);
4831 jump_func
->vr_known
= false;
4834 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4835 relevant to indirect inlining to OB. */
4838 ipa_write_indirect_edge_info (struct output_block
*ob
,
4839 struct cgraph_edge
*cs
)
4841 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4842 struct bitpack_d bp
;
4844 streamer_write_hwi (ob
, ii
->param_index
);
4845 bp
= bitpack_create (ob
->main_stream
);
4846 bp_pack_value (&bp
, ii
->polymorphic
, 1);
4847 bp_pack_value (&bp
, ii
->agg_contents
, 1);
4848 bp_pack_value (&bp
, ii
->member_ptr
, 1);
4849 bp_pack_value (&bp
, ii
->by_ref
, 1);
4850 bp_pack_value (&bp
, ii
->guaranteed_unmodified
, 1);
4851 bp_pack_value (&bp
, ii
->vptr_changed
, 1);
4852 streamer_write_bitpack (&bp
);
4853 if (ii
->agg_contents
|| ii
->polymorphic
)
4854 streamer_write_hwi (ob
, ii
->offset
);
4856 gcc_assert (ii
->offset
== 0);
4858 if (ii
->polymorphic
)
4860 streamer_write_hwi (ob
, ii
->otr_token
);
4861 stream_write_tree (ob
, ii
->otr_type
, true);
4862 ii
->context
.stream_out (ob
);
4866 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4867 relevant to indirect inlining from IB. */
4870 ipa_read_indirect_edge_info (struct lto_input_block
*ib
,
4871 struct data_in
*data_in
,
4872 struct cgraph_edge
*cs
)
4874 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4875 struct bitpack_d bp
;
4877 ii
->param_index
= (int) streamer_read_hwi (ib
);
4878 bp
= streamer_read_bitpack (ib
);
4879 ii
->polymorphic
= bp_unpack_value (&bp
, 1);
4880 ii
->agg_contents
= bp_unpack_value (&bp
, 1);
4881 ii
->member_ptr
= bp_unpack_value (&bp
, 1);
4882 ii
->by_ref
= bp_unpack_value (&bp
, 1);
4883 ii
->guaranteed_unmodified
= bp_unpack_value (&bp
, 1);
4884 ii
->vptr_changed
= bp_unpack_value (&bp
, 1);
4885 if (ii
->agg_contents
|| ii
->polymorphic
)
4886 ii
->offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4889 if (ii
->polymorphic
)
4891 ii
->otr_token
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4892 ii
->otr_type
= stream_read_tree (ib
, data_in
);
4893 ii
->context
.stream_in (ib
, data_in
);
4897 /* Stream out NODE info to OB. */
4900 ipa_write_node_info (struct output_block
*ob
, struct cgraph_node
*node
)
4903 lto_symtab_encoder_t encoder
;
4904 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
4906 struct cgraph_edge
*e
;
4907 struct bitpack_d bp
;
4909 encoder
= ob
->decl_state
->symtab_node_encoder
;
4910 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4911 streamer_write_uhwi (ob
, node_ref
);
4913 streamer_write_uhwi (ob
, ipa_get_param_count (info
));
4914 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4915 streamer_write_uhwi (ob
, ipa_get_param_move_cost (info
, j
));
4916 bp
= bitpack_create (ob
->main_stream
);
4917 gcc_assert (info
->analysis_done
4918 || ipa_get_param_count (info
) == 0);
4919 gcc_assert (!info
->node_enqueued
);
4920 gcc_assert (!info
->ipcp_orig_node
);
4921 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4922 bp_pack_value (&bp
, ipa_is_param_used (info
, j
), 1);
4923 streamer_write_bitpack (&bp
);
4924 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4926 streamer_write_hwi (ob
, ipa_get_controlled_uses (info
, j
));
4927 stream_write_tree (ob
, ipa_get_type (info
, j
), true);
4929 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4931 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4933 streamer_write_uhwi (ob
,
4934 ipa_get_cs_argument_count (args
) * 2
4935 + (args
->polymorphic_call_contexts
!= NULL
));
4936 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4938 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4939 if (args
->polymorphic_call_contexts
!= NULL
)
4940 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
4943 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4945 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4947 streamer_write_uhwi (ob
,
4948 ipa_get_cs_argument_count (args
) * 2
4949 + (args
->polymorphic_call_contexts
!= NULL
));
4950 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4952 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4953 if (args
->polymorphic_call_contexts
!= NULL
)
4954 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
4956 ipa_write_indirect_edge_info (ob
, e
);
4960 /* Stream in NODE info from IB. */
4963 ipa_read_node_info (struct lto_input_block
*ib
, struct cgraph_node
*node
,
4964 struct data_in
*data_in
)
4966 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
4968 struct cgraph_edge
*e
;
4969 struct bitpack_d bp
;
4971 ipa_alloc_node_params (node
, streamer_read_uhwi (ib
));
4973 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4974 (*info
->descriptors
)[k
].move_cost
= streamer_read_uhwi (ib
);
4976 bp
= streamer_read_bitpack (ib
);
4977 if (ipa_get_param_count (info
) != 0)
4978 info
->analysis_done
= true;
4979 info
->node_enqueued
= false;
4980 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4981 ipa_set_param_used (info
, k
, bp_unpack_value (&bp
, 1));
4982 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4984 ipa_set_controlled_uses (info
, k
, streamer_read_hwi (ib
));
4985 (*info
->descriptors
)[k
].decl_or_type
= stream_read_tree (ib
, data_in
);
4987 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4989 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4990 int count
= streamer_read_uhwi (ib
);
4991 bool contexts_computed
= count
& 1;
4996 vec_safe_grow_cleared (args
->jump_functions
, count
);
4997 if (contexts_computed
)
4998 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
);
5000 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
5002 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
5004 if (contexts_computed
)
5005 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in (ib
, data_in
);
5008 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
5010 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
5011 int count
= streamer_read_uhwi (ib
);
5012 bool contexts_computed
= count
& 1;
5017 vec_safe_grow_cleared (args
->jump_functions
, count
);
5018 if (contexts_computed
)
5019 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
);
5020 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
5022 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
5024 if (contexts_computed
)
5025 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in (ib
, data_in
);
5028 ipa_read_indirect_edge_info (ib
, data_in
, e
);
5032 /* Write jump functions for nodes in SET. */
5035 ipa_prop_write_jump_functions (void)
5037 struct cgraph_node
*node
;
5038 struct output_block
*ob
;
5039 unsigned int count
= 0;
5040 lto_symtab_encoder_iterator lsei
;
5041 lto_symtab_encoder_t encoder
;
5043 if (!ipa_node_params_sum
)
5046 ob
= create_output_block (LTO_section_jump_functions
);
5047 encoder
= ob
->decl_state
->symtab_node_encoder
;
5049 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5050 lsei_next_function_in_partition (&lsei
))
5052 node
= lsei_cgraph_node (lsei
);
5053 if (node
->has_gimple_body_p ()
5054 && IPA_NODE_REF (node
) != NULL
)
5058 streamer_write_uhwi (ob
, count
);
5060 /* Process all of the functions. */
5061 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5062 lsei_next_function_in_partition (&lsei
))
5064 node
= lsei_cgraph_node (lsei
);
5065 if (node
->has_gimple_body_p ()
5066 && IPA_NODE_REF (node
) != NULL
)
5067 ipa_write_node_info (ob
, node
);
5069 streamer_write_char_stream (ob
->main_stream
, 0);
5070 produce_asm (ob
, NULL
);
5071 destroy_output_block (ob
);
5074 /* Read section in file FILE_DATA of length LEN with data DATA. */
5077 ipa_prop_read_section (struct lto_file_decl_data
*file_data
, const char *data
,
5080 const struct lto_function_header
*header
=
5081 (const struct lto_function_header
*) data
;
5082 const int cfg_offset
= sizeof (struct lto_function_header
);
5083 const int main_offset
= cfg_offset
+ header
->cfg_size
;
5084 const int string_offset
= main_offset
+ header
->main_size
;
5085 struct data_in
*data_in
;
5089 lto_input_block
ib_main ((const char *) data
+ main_offset
,
5090 header
->main_size
, file_data
->mode_table
);
5093 lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
5094 header
->string_size
, vNULL
);
5095 count
= streamer_read_uhwi (&ib_main
);
5097 for (i
= 0; i
< count
; i
++)
5100 struct cgraph_node
*node
;
5101 lto_symtab_encoder_t encoder
;
5103 index
= streamer_read_uhwi (&ib_main
);
5104 encoder
= file_data
->symtab_node_encoder
;
5105 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
5107 gcc_assert (node
->definition
);
5108 ipa_read_node_info (&ib_main
, node
, data_in
);
5110 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
5112 lto_data_in_delete (data_in
);
5115 /* Read ipcp jump functions. */
5118 ipa_prop_read_jump_functions (void)
5120 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
5121 struct lto_file_decl_data
*file_data
;
5124 ipa_check_create_node_params ();
5125 ipa_check_create_edge_args ();
5126 ipa_register_cgraph_hooks ();
5128 while ((file_data
= file_data_vec
[j
++]))
5131 const char *data
= lto_get_section_data (file_data
, LTO_section_jump_functions
, NULL
, &len
);
5134 ipa_prop_read_section (file_data
, data
, len
);
5138 /* After merging units, we can get mismatch in argument counts.
5139 Also decl merging might've rendered parameter lists obsolete.
5140 Also compute called_with_variable_arg info. */
5143 ipa_update_after_lto_read (void)
5145 ipa_check_create_node_params ();
5146 ipa_check_create_edge_args ();
5150 write_ipcp_transformation_info (output_block
*ob
, cgraph_node
*node
)
5153 unsigned int count
= 0;
5154 lto_symtab_encoder_t encoder
;
5155 struct ipa_agg_replacement_value
*aggvals
, *av
;
5157 aggvals
= ipa_get_agg_replacements_for_node (node
);
5158 encoder
= ob
->decl_state
->symtab_node_encoder
;
5159 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
5160 streamer_write_uhwi (ob
, node_ref
);
5162 for (av
= aggvals
; av
; av
= av
->next
)
5164 streamer_write_uhwi (ob
, count
);
5166 for (av
= aggvals
; av
; av
= av
->next
)
5168 struct bitpack_d bp
;
5170 streamer_write_uhwi (ob
, av
->offset
);
5171 streamer_write_uhwi (ob
, av
->index
);
5172 stream_write_tree (ob
, av
->value
, true);
5174 bp
= bitpack_create (ob
->main_stream
);
5175 bp_pack_value (&bp
, av
->by_ref
, 1);
5176 streamer_write_bitpack (&bp
);
5179 ipcp_transformation_summary
*ts
= ipcp_get_transformation_summary (node
);
5180 if (ts
&& vec_safe_length (ts
->m_vr
) > 0)
5182 count
= ts
->m_vr
->length ();
5183 streamer_write_uhwi (ob
, count
);
5184 for (unsigned i
= 0; i
< count
; ++i
)
5186 struct bitpack_d bp
;
5187 ipa_vr
*parm_vr
= &(*ts
->m_vr
)[i
];
5188 bp
= bitpack_create (ob
->main_stream
);
5189 bp_pack_value (&bp
, parm_vr
->known
, 1);
5190 streamer_write_bitpack (&bp
);
5193 streamer_write_enum (ob
->main_stream
, value_rang_type
,
5194 VR_LAST
, parm_vr
->type
);
5195 streamer_write_wide_int (ob
, parm_vr
->min
);
5196 streamer_write_wide_int (ob
, parm_vr
->max
);
5201 streamer_write_uhwi (ob
, 0);
5203 if (ts
&& vec_safe_length (ts
->bits
) > 0)
5205 count
= ts
->bits
->length ();
5206 streamer_write_uhwi (ob
, count
);
5208 for (unsigned i
= 0; i
< count
; ++i
)
5210 const ipa_bits
& bits_jfunc
= (*ts
->bits
)[i
];
5211 struct bitpack_d bp
= bitpack_create (ob
->main_stream
);
5212 bp_pack_value (&bp
, bits_jfunc
.known
, 1);
5213 streamer_write_bitpack (&bp
);
5214 if (bits_jfunc
.known
)
5216 streamer_write_widest_int (ob
, bits_jfunc
.value
);
5217 streamer_write_widest_int (ob
, bits_jfunc
.mask
);
5222 streamer_write_uhwi (ob
, 0);
5225 /* Stream in the aggregate value replacement chain for NODE from IB. */
5228 read_ipcp_transformation_info (lto_input_block
*ib
, cgraph_node
*node
,
5231 struct ipa_agg_replacement_value
*aggvals
= NULL
;
5232 unsigned int count
, i
;
5234 count
= streamer_read_uhwi (ib
);
5235 for (i
= 0; i
<count
; i
++)
5237 struct ipa_agg_replacement_value
*av
;
5238 struct bitpack_d bp
;
5240 av
= ggc_alloc
<ipa_agg_replacement_value
> ();
5241 av
->offset
= streamer_read_uhwi (ib
);
5242 av
->index
= streamer_read_uhwi (ib
);
5243 av
->value
= stream_read_tree (ib
, data_in
);
5244 bp
= streamer_read_bitpack (ib
);
5245 av
->by_ref
= bp_unpack_value (&bp
, 1);
5249 ipa_set_node_agg_value_chain (node
, aggvals
);
5251 count
= streamer_read_uhwi (ib
);
5254 ipcp_grow_transformations_if_necessary ();
5256 ipcp_transformation_summary
*ts
= ipcp_get_transformation_summary (node
);
5257 vec_safe_grow_cleared (ts
->m_vr
, count
);
5258 for (i
= 0; i
< count
; i
++)
5261 parm_vr
= &(*ts
->m_vr
)[i
];
5262 struct bitpack_d bp
;
5263 bp
= streamer_read_bitpack (ib
);
5264 parm_vr
->known
= bp_unpack_value (&bp
, 1);
5267 parm_vr
->type
= streamer_read_enum (ib
, value_range_type
,
5269 parm_vr
->min
= streamer_read_wide_int (ib
);
5270 parm_vr
->max
= streamer_read_wide_int (ib
);
5274 count
= streamer_read_uhwi (ib
);
5277 ipcp_grow_transformations_if_necessary ();
5279 ipcp_transformation_summary
*ts
= ipcp_get_transformation_summary (node
);
5280 vec_safe_grow_cleared (ts
->bits
, count
);
5282 for (i
= 0; i
< count
; i
++)
5284 ipa_bits
& bits_jfunc
= (*ts
->bits
)[i
];
5285 struct bitpack_d bp
= streamer_read_bitpack (ib
);
5286 bits_jfunc
.known
= bp_unpack_value (&bp
, 1);
5287 if (bits_jfunc
.known
)
5289 bits_jfunc
.value
= streamer_read_widest_int (ib
);
5290 bits_jfunc
.mask
= streamer_read_widest_int (ib
);
5296 /* Write all aggregate replacement for nodes in set. */
5299 ipcp_write_transformation_summaries (void)
5301 struct cgraph_node
*node
;
5302 struct output_block
*ob
;
5303 unsigned int count
= 0;
5304 lto_symtab_encoder_iterator lsei
;
5305 lto_symtab_encoder_t encoder
;
5307 ob
= create_output_block (LTO_section_ipcp_transform
);
5308 encoder
= ob
->decl_state
->symtab_node_encoder
;
5310 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5311 lsei_next_function_in_partition (&lsei
))
5313 node
= lsei_cgraph_node (lsei
);
5314 if (node
->has_gimple_body_p ())
5318 streamer_write_uhwi (ob
, count
);
5320 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5321 lsei_next_function_in_partition (&lsei
))
5323 node
= lsei_cgraph_node (lsei
);
5324 if (node
->has_gimple_body_p ())
5325 write_ipcp_transformation_info (ob
, node
);
5327 streamer_write_char_stream (ob
->main_stream
, 0);
5328 produce_asm (ob
, NULL
);
5329 destroy_output_block (ob
);
5332 /* Read replacements section in file FILE_DATA of length LEN with data
5336 read_replacements_section (struct lto_file_decl_data
*file_data
,
5340 const struct lto_function_header
*header
=
5341 (const struct lto_function_header
*) data
;
5342 const int cfg_offset
= sizeof (struct lto_function_header
);
5343 const int main_offset
= cfg_offset
+ header
->cfg_size
;
5344 const int string_offset
= main_offset
+ header
->main_size
;
5345 struct data_in
*data_in
;
5349 lto_input_block
ib_main ((const char *) data
+ main_offset
,
5350 header
->main_size
, file_data
->mode_table
);
5352 data_in
= lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
5353 header
->string_size
, vNULL
);
5354 count
= streamer_read_uhwi (&ib_main
);
5356 for (i
= 0; i
< count
; i
++)
5359 struct cgraph_node
*node
;
5360 lto_symtab_encoder_t encoder
;
5362 index
= streamer_read_uhwi (&ib_main
);
5363 encoder
= file_data
->symtab_node_encoder
;
5364 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
5366 gcc_assert (node
->definition
);
5367 read_ipcp_transformation_info (&ib_main
, node
, data_in
);
5369 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
5371 lto_data_in_delete (data_in
);
5374 /* Read IPA-CP aggregate replacements. */
5377 ipcp_read_transformation_summaries (void)
5379 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
5380 struct lto_file_decl_data
*file_data
;
5383 while ((file_data
= file_data_vec
[j
++]))
5386 const char *data
= lto_get_section_data (file_data
,
5387 LTO_section_ipcp_transform
,
5390 read_replacements_section (file_data
, data
, len
);
5394 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5398 adjust_agg_replacement_values (struct cgraph_node
*node
,
5399 struct ipa_agg_replacement_value
*aggval
)
5401 struct ipa_agg_replacement_value
*v
;
5402 int i
, c
= 0, d
= 0, *adj
;
5404 if (!node
->clone
.combined_args_to_skip
)
5407 for (v
= aggval
; v
; v
= v
->next
)
5409 gcc_assert (v
->index
>= 0);
5415 adj
= XALLOCAVEC (int, c
);
5416 for (i
= 0; i
< c
; i
++)
5417 if (bitmap_bit_p (node
->clone
.combined_args_to_skip
, i
))
5425 for (v
= aggval
; v
; v
= v
->next
)
5426 v
->index
= adj
[v
->index
];
5429 /* Dominator walker driving the ipcp modification phase. */
5431 class ipcp_modif_dom_walker
: public dom_walker
5434 ipcp_modif_dom_walker (struct ipa_func_body_info
*fbi
,
5435 vec
<ipa_param_descriptor
, va_gc
> *descs
,
5436 struct ipa_agg_replacement_value
*av
,
5438 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
), m_descriptors (descs
),
5439 m_aggval (av
), m_something_changed (sc
), m_cfg_changed (cc
) {}
5441 virtual edge
before_dom_children (basic_block
);
5444 struct ipa_func_body_info
*m_fbi
;
5445 vec
<ipa_param_descriptor
, va_gc
> *m_descriptors
;
5446 struct ipa_agg_replacement_value
*m_aggval
;
5447 bool *m_something_changed
, *m_cfg_changed
;
5451 ipcp_modif_dom_walker::before_dom_children (basic_block bb
)
5453 gimple_stmt_iterator gsi
;
5454 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5456 struct ipa_agg_replacement_value
*v
;
5457 gimple
*stmt
= gsi_stmt (gsi
);
5459 HOST_WIDE_INT offset
, size
;
5463 if (!gimple_assign_load_p (stmt
))
5465 rhs
= gimple_assign_rhs1 (stmt
);
5466 if (!is_gimple_reg_type (TREE_TYPE (rhs
)))
5471 while (handled_component_p (t
))
5473 /* V_C_E can do things like convert an array of integers to one
5474 bigger integer and similar things we do not handle below. */
5475 if (TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
)
5480 t
= TREE_OPERAND (t
, 0);
5485 if (!ipa_load_from_parm_agg (m_fbi
, m_descriptors
, stmt
, rhs
, &index
,
5486 &offset
, &size
, &by_ref
))
5488 for (v
= m_aggval
; v
; v
= v
->next
)
5489 if (v
->index
== index
5490 && v
->offset
== offset
)
5493 || v
->by_ref
!= by_ref
5494 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v
->value
))) != size
)
5497 gcc_checking_assert (is_gimple_ip_invariant (v
->value
));
5498 if (!useless_type_conversion_p (TREE_TYPE (rhs
), TREE_TYPE (v
->value
)))
5500 if (fold_convertible_p (TREE_TYPE (rhs
), v
->value
))
5501 val
= fold_build1 (NOP_EXPR
, TREE_TYPE (rhs
), v
->value
);
5502 else if (TYPE_SIZE (TREE_TYPE (rhs
))
5503 == TYPE_SIZE (TREE_TYPE (v
->value
)))
5504 val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (rhs
), v
->value
);
5509 fprintf (dump_file
, " const ");
5510 print_generic_expr (dump_file
, v
->value
, 0);
5511 fprintf (dump_file
, " can't be converted to type of ");
5512 print_generic_expr (dump_file
, rhs
, 0);
5513 fprintf (dump_file
, "\n");
5521 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5523 fprintf (dump_file
, "Modifying stmt:\n ");
5524 print_gimple_stmt (dump_file
, stmt
, 0, 0);
5526 gimple_assign_set_rhs_from_tree (&gsi
, val
);
5529 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5531 fprintf (dump_file
, "into:\n ");
5532 print_gimple_stmt (dump_file
, stmt
, 0, 0);
5533 fprintf (dump_file
, "\n");
5536 *m_something_changed
= true;
5537 if (maybe_clean_eh_stmt (stmt
)
5538 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
5539 *m_cfg_changed
= true;
5544 /* Update bits info of formal parameters as described in
5545 ipcp_transformation_summary. */
5548 ipcp_update_bits (struct cgraph_node
*node
)
5550 tree parm
= DECL_ARGUMENTS (node
->decl
);
5551 tree next_parm
= parm
;
5552 ipcp_transformation_summary
*ts
= ipcp_get_transformation_summary (node
);
5554 if (!ts
|| vec_safe_length (ts
->bits
) == 0)
5557 vec
<ipa_bits
, va_gc
> &bits
= *ts
->bits
;
5558 unsigned count
= bits
.length ();
5560 for (unsigned i
= 0; i
< count
; ++i
, parm
= next_parm
)
5562 if (node
->clone
.combined_args_to_skip
5563 && bitmap_bit_p (node
->clone
.combined_args_to_skip
, i
))
5566 gcc_checking_assert (parm
);
5567 next_parm
= DECL_CHAIN (parm
);
5570 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm
)) || POINTER_TYPE_P (TREE_TYPE (parm
)))
5571 || !is_gimple_reg (parm
))
5574 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
), parm
);
5580 fprintf (dump_file
, "Adjusting mask for param %u to ", i
);
5581 print_hex (bits
[i
].mask
, dump_file
);
5582 fprintf (dump_file
, "\n");
5585 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef
)))
5587 unsigned prec
= TYPE_PRECISION (TREE_TYPE (ddef
));
5588 signop sgn
= TYPE_SIGN (TREE_TYPE (ddef
));
5590 wide_int nonzero_bits
= wide_int::from (bits
[i
].mask
, prec
, UNSIGNED
)
5591 | wide_int::from (bits
[i
].value
, prec
, sgn
);
5592 set_nonzero_bits (ddef
, nonzero_bits
);
5596 unsigned tem
= bits
[i
].mask
.to_uhwi ();
5597 unsigned HOST_WIDE_INT bitpos
= bits
[i
].value
.to_uhwi ();
5598 unsigned align
= tem
& -tem
;
5599 unsigned misalign
= bitpos
& (align
- 1);
5604 fprintf (dump_file
, "Adjusting align: %u, misalign: %u\n", align
, misalign
);
5606 unsigned old_align
, old_misalign
;
5607 struct ptr_info_def
*pi
= get_ptr_info (ddef
);
5608 bool old_known
= get_ptr_info_alignment (pi
, &old_align
, &old_misalign
);
5611 && old_align
> align
)
5615 fprintf (dump_file
, "But alignment was already %u.\n", old_align
);
5616 if ((old_misalign
& (align
- 1)) != misalign
)
5617 fprintf (dump_file
, "old_misalign (%u) and misalign (%u) mismatch\n",
5618 old_misalign
, misalign
);
5624 && ((misalign
& (old_align
- 1)) != old_misalign
)
5626 fprintf (dump_file
, "old_misalign (%u) and misalign (%u) mismatch\n",
5627 old_misalign
, misalign
);
5629 set_ptr_info_alignment (pi
, align
, misalign
);
5635 /* Update value range of formal parameters as described in
5636 ipcp_transformation_summary. */
5639 ipcp_update_vr (struct cgraph_node
*node
)
5641 tree fndecl
= node
->decl
;
5642 tree parm
= DECL_ARGUMENTS (fndecl
);
5643 tree next_parm
= parm
;
5644 ipcp_transformation_summary
*ts
= ipcp_get_transformation_summary (node
);
5645 if (!ts
|| vec_safe_length (ts
->m_vr
) == 0)
5647 const vec
<ipa_vr
, va_gc
> &vr
= *ts
->m_vr
;
5648 unsigned count
= vr
.length ();
5650 for (unsigned i
= 0; i
< count
; ++i
, parm
= next_parm
)
5652 if (node
->clone
.combined_args_to_skip
5653 && bitmap_bit_p (node
->clone
.combined_args_to_skip
, i
))
5655 gcc_checking_assert (parm
);
5656 next_parm
= DECL_CHAIN (parm
);
5657 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
), parm
);
5659 if (!ddef
|| !is_gimple_reg (parm
))
5663 && (vr
[i
].type
== VR_RANGE
|| vr
[i
].type
== VR_ANTI_RANGE
))
5665 tree type
= TREE_TYPE (ddef
);
5666 unsigned prec
= TYPE_PRECISION (type
);
5667 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef
)))
5671 fprintf (dump_file
, "Setting value range of param %u ", i
);
5672 fprintf (dump_file
, "%s[",
5673 (vr
[i
].type
== VR_ANTI_RANGE
) ? "~" : "");
5674 print_decs (vr
[i
].min
, dump_file
);
5675 fprintf (dump_file
, ", ");
5676 print_decs (vr
[i
].max
, dump_file
);
5677 fprintf (dump_file
, "]\n");
5679 set_range_info (ddef
, vr
[i
].type
,
5680 wide_int_storage::from (vr
[i
].min
, prec
,
5682 wide_int_storage::from (vr
[i
].max
, prec
,
5685 else if (POINTER_TYPE_P (TREE_TYPE (ddef
))
5686 && vr
[i
].type
== VR_ANTI_RANGE
5687 && wi::eq_p (vr
[i
].min
, 0)
5688 && wi::eq_p (vr
[i
].max
, 0))
5691 fprintf (dump_file
, "Setting nonnull for %u\n", i
);
5692 set_ptr_nonnull (ddef
);
5698 /* IPCP transformation phase doing propagation of aggregate values. */
5701 ipcp_transform_function (struct cgraph_node
*node
)
5703 vec
<ipa_param_descriptor
, va_gc
> *descriptors
= NULL
;
5704 struct ipa_func_body_info fbi
;
5705 struct ipa_agg_replacement_value
*aggval
;
5707 bool cfg_changed
= false, something_changed
= false;
5709 gcc_checking_assert (cfun
);
5710 gcc_checking_assert (current_function_decl
);
5713 fprintf (dump_file
, "Modification phase of node %s/%i\n",
5714 node
->name (), node
->order
);
5716 ipcp_update_bits (node
);
5717 ipcp_update_vr (node
);
5718 aggval
= ipa_get_agg_replacements_for_node (node
);
5721 param_count
= count_formal_params (node
->decl
);
5722 if (param_count
== 0)
5724 adjust_agg_replacement_values (node
, aggval
);
5726 ipa_dump_agg_replacement_values (dump_file
, aggval
);
5730 fbi
.bb_infos
= vNULL
;
5731 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
));
5732 fbi
.param_count
= param_count
;
5735 vec_safe_grow_cleared (descriptors
, param_count
);
5736 ipa_populate_param_decls (node
, *descriptors
);
5737 calculate_dominance_info (CDI_DOMINATORS
);
5738 ipcp_modif_dom_walker (&fbi
, descriptors
, aggval
, &something_changed
,
5739 &cfg_changed
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5742 struct ipa_bb_info
*bi
;
5743 FOR_EACH_VEC_ELT (fbi
.bb_infos
, i
, bi
)
5744 free_ipa_bb_info (bi
);
5745 fbi
.bb_infos
.release ();
5746 free_dominance_info (CDI_DOMINATORS
);
5747 (*ipcp_transformations
)[node
->uid
].agg_values
= NULL
;
5748 (*ipcp_transformations
)[node
->uid
].bits
= NULL
;
5749 (*ipcp_transformations
)[node
->uid
].m_vr
= NULL
;
5751 vec_free (descriptors
);
5753 if (!something_changed
)
5755 else if (cfg_changed
)
5756 return TODO_update_ssa_only_virtuals
| TODO_cleanup_cfg
;
5758 return TODO_update_ssa_only_virtuals
;