1 /* Interprocedural analyses.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
26 #include "double-int.h"
34 #include "fold-const.h"
37 #include "hard-reg-set.h"
39 #include "dominance.h"
41 #include "basic-block.h"
42 #include "tree-ssa-alias.h"
43 #include "internal-fn.h"
44 #include "gimple-fold.h"
46 #include "gimple-expr.h"
52 #include "statistics.h"
54 #include "fixed-value.h"
55 #include "insn-config.h"
64 #include "stor-layout.h"
65 #include "print-tree.h"
67 #include "gimple-iterator.h"
68 #include "gimplify-me.h"
69 #include "gimple-walk.h"
70 #include "langhooks.h"
73 #include "plugin-api.h"
76 #include "alloc-pool.h"
77 #include "symbol-summary.h"
80 #include "gimple-ssa.h"
82 #include "tree-phinodes.h"
83 #include "ssa-iterators.h"
84 #include "tree-into-ssa.h"
86 #include "tree-pass.h"
87 #include "tree-inline.h"
88 #include "ipa-inline.h"
89 #include "diagnostic.h"
90 #include "gimple-pretty-print.h"
91 #include "lto-streamer.h"
92 #include "data-streamer.h"
93 #include "tree-streamer.h"
95 #include "ipa-utils.h"
96 #include "stringpool.h"
97 #include "tree-ssanames.h"
100 #include "builtins.h"
102 /* Intermediate information that we get from alias analysis about a particular
103 parameter in a particular basic_block. When a parameter or the memory it
104 references is marked modified, we use that information in all dominatd
105 blocks without cosulting alias analysis oracle. */
107 struct param_aa_status
109 /* Set when this structure contains meaningful information. If not, the
110 structure describing a dominating BB should be used instead. */
113 /* Whether we have seen something which might have modified the data in
114 question. PARM is for the parameter itself, REF is for data it points to
115 but using the alias type of individual accesses and PT is the same thing
116 but for computing aggregate pass-through functions using a very inclusive
118 bool parm_modified
, ref_modified
, pt_modified
;
121 /* Information related to a given BB that used only when looking at function
126 /* Call graph edges going out of this BB. */
127 vec
<cgraph_edge
*> cg_edges
;
128 /* Alias analysis statuses of each formal parameter at this bb. */
129 vec
<param_aa_status
> param_aa_statuses
;
132 /* Structure with global information that is only used when looking at function
135 struct func_body_info
137 /* The node that is being analyzed. */
141 struct ipa_node_params
*info
;
143 /* Information about individual BBs. */
144 vec
<ipa_bb_info
> bb_infos
;
146 /* Number of parameters. */
149 /* Number of statements already walked by when analyzing this function. */
150 unsigned int aa_walked
;
153 /* Function summary where the parameter infos are actually stored. */
154 ipa_node_params_t
*ipa_node_params_sum
= NULL
;
155 /* Vector of IPA-CP transformation data for each clone. */
156 vec
<ipcp_transformation_summary
, va_gc
> *ipcp_transformations
;
157 /* Vector where the parameter infos are actually stored. */
158 vec
<ipa_edge_args
, va_gc
> *ipa_edge_args_vector
;
160 /* Holders of ipa cgraph hooks: */
161 static struct cgraph_edge_hook_list
*edge_removal_hook_holder
;
162 static struct cgraph_2edge_hook_list
*edge_duplication_hook_holder
;
163 static struct cgraph_node_hook_list
*function_insertion_hook_holder
;
165 /* Description of a reference to an IPA constant. */
166 struct ipa_cst_ref_desc
168 /* Edge that corresponds to the statement which took the reference. */
169 struct cgraph_edge
*cs
;
170 /* Linked list of duplicates created when call graph edges are cloned. */
171 struct ipa_cst_ref_desc
*next_duplicate
;
172 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
173 if out of control. */
177 /* Allocation pool for reference descriptions. */
179 static alloc_pool ipa_refdesc_pool
;
181 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
182 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
185 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node
*node
)
187 tree fs_opts
= DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node
->decl
);
191 return !opt_for_fn (node
->decl
, optimize
) || !opt_for_fn (node
->decl
, flag_ipa_cp
);
194 /* Return index of the formal whose tree is PTREE in function which corresponds
198 ipa_get_param_decl_index_1 (vec
<ipa_param_descriptor
> descriptors
, tree ptree
)
202 count
= descriptors
.length ();
203 for (i
= 0; i
< count
; i
++)
204 if (descriptors
[i
].decl
== ptree
)
210 /* Return index of the formal whose tree is PTREE in function which corresponds
214 ipa_get_param_decl_index (struct ipa_node_params
*info
, tree ptree
)
216 return ipa_get_param_decl_index_1 (info
->descriptors
, ptree
);
219 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
223 ipa_populate_param_decls (struct cgraph_node
*node
,
224 vec
<ipa_param_descriptor
> &descriptors
)
232 gcc_assert (gimple_has_body_p (fndecl
));
233 fnargs
= DECL_ARGUMENTS (fndecl
);
235 for (parm
= fnargs
; parm
; parm
= DECL_CHAIN (parm
))
237 descriptors
[param_num
].decl
= parm
;
238 descriptors
[param_num
].move_cost
= estimate_move_cost (TREE_TYPE (parm
),
244 /* Return how many formal parameters FNDECL has. */
247 count_formal_params (tree fndecl
)
251 gcc_assert (gimple_has_body_p (fndecl
));
253 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
259 /* Return the declaration of Ith formal parameter of the function corresponding
260 to INFO. Note there is no setter function as this array is built just once
261 using ipa_initialize_node_params. */
264 ipa_dump_param (FILE *file
, struct ipa_node_params
*info
, int i
)
266 fprintf (file
, "param #%i", i
);
267 if (info
->descriptors
[i
].decl
)
270 print_generic_expr (file
, info
->descriptors
[i
].decl
, 0);
274 /* Initialize the ipa_node_params structure associated with NODE
275 to hold PARAM_COUNT parameters. */
278 ipa_alloc_node_params (struct cgraph_node
*node
, int param_count
)
280 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
282 if (!info
->descriptors
.exists () && param_count
)
283 info
->descriptors
.safe_grow_cleared (param_count
);
286 /* Initialize the ipa_node_params structure associated with NODE by counting
287 the function parameters, creating the descriptors and populating their
291 ipa_initialize_node_params (struct cgraph_node
*node
)
293 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
295 if (!info
->descriptors
.exists ())
297 ipa_alloc_node_params (node
, count_formal_params (node
->decl
));
298 ipa_populate_param_decls (node
, info
->descriptors
);
302 /* Print the jump functions associated with call graph edge CS to file F. */
305 ipa_print_node_jump_functions_for_edge (FILE *f
, struct cgraph_edge
*cs
)
309 count
= ipa_get_cs_argument_count (IPA_EDGE_REF (cs
));
310 for (i
= 0; i
< count
; i
++)
312 struct ipa_jump_func
*jump_func
;
313 enum jump_func_type type
;
315 jump_func
= ipa_get_ith_jump_func (IPA_EDGE_REF (cs
), i
);
316 type
= jump_func
->type
;
318 fprintf (f
, " param %d: ", i
);
319 if (type
== IPA_JF_UNKNOWN
)
320 fprintf (f
, "UNKNOWN\n");
321 else if (type
== IPA_JF_CONST
)
323 tree val
= jump_func
->value
.constant
.value
;
324 fprintf (f
, "CONST: ");
325 print_generic_expr (f
, val
, 0);
326 if (TREE_CODE (val
) == ADDR_EXPR
327 && TREE_CODE (TREE_OPERAND (val
, 0)) == CONST_DECL
)
330 print_generic_expr (f
, DECL_INITIAL (TREE_OPERAND (val
, 0)),
335 else if (type
== IPA_JF_PASS_THROUGH
)
337 fprintf (f
, "PASS THROUGH: ");
338 fprintf (f
, "%d, op %s",
339 jump_func
->value
.pass_through
.formal_id
,
340 get_tree_code_name(jump_func
->value
.pass_through
.operation
));
341 if (jump_func
->value
.pass_through
.operation
!= NOP_EXPR
)
344 print_generic_expr (f
,
345 jump_func
->value
.pass_through
.operand
, 0);
347 if (jump_func
->value
.pass_through
.agg_preserved
)
348 fprintf (f
, ", agg_preserved");
351 else if (type
== IPA_JF_ANCESTOR
)
353 fprintf (f
, "ANCESTOR: ");
354 fprintf (f
, "%d, offset "HOST_WIDE_INT_PRINT_DEC
,
355 jump_func
->value
.ancestor
.formal_id
,
356 jump_func
->value
.ancestor
.offset
);
357 if (jump_func
->value
.ancestor
.agg_preserved
)
358 fprintf (f
, ", agg_preserved");
362 if (jump_func
->agg
.items
)
364 struct ipa_agg_jf_item
*item
;
367 fprintf (f
, " Aggregate passed by %s:\n",
368 jump_func
->agg
.by_ref
? "reference" : "value");
369 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, j
, item
)
371 fprintf (f
, " offset: " HOST_WIDE_INT_PRINT_DEC
", ",
373 if (TYPE_P (item
->value
))
374 fprintf (f
, "clobber of " HOST_WIDE_INT_PRINT_DEC
" bits",
375 tree_to_uhwi (TYPE_SIZE (item
->value
)));
378 fprintf (f
, "cst: ");
379 print_generic_expr (f
, item
->value
, 0);
385 struct ipa_polymorphic_call_context
*ctx
386 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs
), i
);
387 if (ctx
&& !ctx
->useless_p ())
389 fprintf (f
, " Context: ");
390 ctx
->dump (dump_file
);
393 if (jump_func
->alignment
.known
)
395 fprintf (f
, " Alignment: %u, misalignment: %u\n",
396 jump_func
->alignment
.align
,
397 jump_func
->alignment
.misalign
);
400 fprintf (f
, " Unknown alignment\n");
405 /* Print the jump functions of all arguments on all call graph edges going from
409 ipa_print_node_jump_functions (FILE *f
, struct cgraph_node
*node
)
411 struct cgraph_edge
*cs
;
413 fprintf (f
, " Jump functions of caller %s/%i:\n", node
->name (),
415 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
417 if (!ipa_edge_args_info_available_for_edge_p (cs
))
420 fprintf (f
, " callsite %s/%i -> %s/%i : \n",
421 xstrdup_for_dump (node
->name ()), node
->order
,
422 xstrdup_for_dump (cs
->callee
->name ()),
424 ipa_print_node_jump_functions_for_edge (f
, cs
);
427 for (cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
429 struct cgraph_indirect_call_info
*ii
;
430 if (!ipa_edge_args_info_available_for_edge_p (cs
))
433 ii
= cs
->indirect_info
;
434 if (ii
->agg_contents
)
435 fprintf (f
, " indirect %s callsite, calling param %i, "
436 "offset " HOST_WIDE_INT_PRINT_DEC
", %s",
437 ii
->member_ptr
? "member ptr" : "aggregate",
438 ii
->param_index
, ii
->offset
,
439 ii
->by_ref
? "by reference" : "by_value");
441 fprintf (f
, " indirect %s callsite, calling param %i, "
442 "offset " HOST_WIDE_INT_PRINT_DEC
,
443 ii
->polymorphic
? "polymorphic" : "simple", ii
->param_index
,
448 fprintf (f
, ", for stmt ");
449 print_gimple_stmt (f
, cs
->call_stmt
, 0, TDF_SLIM
);
454 ii
->context
.dump (f
);
455 ipa_print_node_jump_functions_for_edge (f
, cs
);
459 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
462 ipa_print_all_jump_functions (FILE *f
)
464 struct cgraph_node
*node
;
466 fprintf (f
, "\nJump functions:\n");
467 FOR_EACH_FUNCTION (node
)
469 ipa_print_node_jump_functions (f
, node
);
473 /* Set jfunc to be a know-really nothing jump function. */
476 ipa_set_jf_unknown (struct ipa_jump_func
*jfunc
)
478 jfunc
->type
= IPA_JF_UNKNOWN
;
479 jfunc
->alignment
.known
= false;
482 /* Set JFUNC to be a copy of another jmp (to be used by jump function
483 combination code). The two functions will share their rdesc. */
486 ipa_set_jf_cst_copy (struct ipa_jump_func
*dst
,
487 struct ipa_jump_func
*src
)
490 gcc_checking_assert (src
->type
== IPA_JF_CONST
);
491 dst
->type
= IPA_JF_CONST
;
492 dst
->value
.constant
= src
->value
.constant
;
495 /* Set JFUNC to be a constant jmp function. */
498 ipa_set_jf_constant (struct ipa_jump_func
*jfunc
, tree constant
,
499 struct cgraph_edge
*cs
)
501 constant
= unshare_expr (constant
);
502 if (constant
&& EXPR_P (constant
))
503 SET_EXPR_LOCATION (constant
, UNKNOWN_LOCATION
);
504 jfunc
->type
= IPA_JF_CONST
;
505 jfunc
->value
.constant
.value
= unshare_expr_without_location (constant
);
507 if (TREE_CODE (constant
) == ADDR_EXPR
508 && TREE_CODE (TREE_OPERAND (constant
, 0)) == FUNCTION_DECL
)
510 struct ipa_cst_ref_desc
*rdesc
;
511 if (!ipa_refdesc_pool
)
512 ipa_refdesc_pool
= create_alloc_pool ("IPA-PROP ref descriptions",
513 sizeof (struct ipa_cst_ref_desc
), 32);
515 rdesc
= (struct ipa_cst_ref_desc
*) pool_alloc (ipa_refdesc_pool
);
517 rdesc
->next_duplicate
= NULL
;
519 jfunc
->value
.constant
.rdesc
= rdesc
;
522 jfunc
->value
.constant
.rdesc
= NULL
;
525 /* Set JFUNC to be a simple pass-through jump function. */
527 ipa_set_jf_simple_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
530 jfunc
->type
= IPA_JF_PASS_THROUGH
;
531 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
532 jfunc
->value
.pass_through
.formal_id
= formal_id
;
533 jfunc
->value
.pass_through
.operation
= NOP_EXPR
;
534 jfunc
->value
.pass_through
.agg_preserved
= agg_preserved
;
537 /* Set JFUNC to be an arithmetic pass through jump function. */
540 ipa_set_jf_arith_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
541 tree operand
, enum tree_code operation
)
543 jfunc
->type
= IPA_JF_PASS_THROUGH
;
544 jfunc
->value
.pass_through
.operand
= unshare_expr_without_location (operand
);
545 jfunc
->value
.pass_through
.formal_id
= formal_id
;
546 jfunc
->value
.pass_through
.operation
= operation
;
547 jfunc
->value
.pass_through
.agg_preserved
= false;
550 /* Set JFUNC to be an ancestor jump function. */
553 ipa_set_ancestor_jf (struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
,
554 int formal_id
, bool agg_preserved
)
556 jfunc
->type
= IPA_JF_ANCESTOR
;
557 jfunc
->value
.ancestor
.formal_id
= formal_id
;
558 jfunc
->value
.ancestor
.offset
= offset
;
559 jfunc
->value
.ancestor
.agg_preserved
= agg_preserved
;
562 /* Get IPA BB information about the given BB. FBI is the context of analyzis
563 of this function body. */
565 static struct ipa_bb_info
*
566 ipa_get_bb_info (struct func_body_info
*fbi
, basic_block bb
)
568 gcc_checking_assert (fbi
);
569 return &fbi
->bb_infos
[bb
->index
];
572 /* Structure to be passed in between detect_type_change and
573 check_stmt_for_type_change. */
575 struct prop_type_change_info
577 /* Offset into the object where there is the virtual method pointer we are
579 HOST_WIDE_INT offset
;
580 /* The declaration or SSA_NAME pointer of the base that we are checking for
583 /* Set to true if dynamic type change has been detected. */
584 bool type_maybe_changed
;
587 /* Return true if STMT can modify a virtual method table pointer.
589 This function makes special assumptions about both constructors and
590 destructors which are all the functions that are allowed to alter the VMT
591 pointers. It assumes that destructors begin with assignment into all VMT
592 pointers and that constructors essentially look in the following way:
594 1) The very first thing they do is that they call constructors of ancestor
595 sub-objects that have them.
597 2) Then VMT pointers of this and all its ancestors is set to new values
598 corresponding to the type corresponding to the constructor.
600 3) Only afterwards, other stuff such as constructor of member sub-objects
601 and the code written by the user is run. Only this may include calling
602 virtual functions, directly or indirectly.
604 There is no way to call a constructor of an ancestor sub-object in any
607 This means that we do not have to care whether constructors get the correct
608 type information because they will always change it (in fact, if we define
609 the type to be given by the VMT pointer, it is undefined).
611 The most important fact to derive from the above is that if, for some
612 statement in the section 3, we try to detect whether the dynamic type has
613 changed, we can safely ignore all calls as we examine the function body
614 backwards until we reach statements in section 2 because these calls cannot
615 be ancestor constructors or destructors (if the input is not bogus) and so
616 do not change the dynamic type (this holds true only for automatically
617 allocated objects but at the moment we devirtualize only these). We then
618 must detect that statements in section 2 change the dynamic type and can try
619 to derive the new type. That is enough and we can stop, we will never see
620 the calls into constructors of sub-objects in this code. Therefore we can
621 safely ignore all call statements that we traverse.
625 stmt_may_be_vtbl_ptr_store (gimple stmt
)
627 if (is_gimple_call (stmt
))
629 if (gimple_clobber_p (stmt
))
631 else if (is_gimple_assign (stmt
))
633 tree lhs
= gimple_assign_lhs (stmt
);
635 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs
)))
637 if (flag_strict_aliasing
638 && !POINTER_TYPE_P (TREE_TYPE (lhs
)))
641 if (TREE_CODE (lhs
) == COMPONENT_REF
642 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
644 /* In the future we might want to use get_base_ref_and_offset to find
645 if there is a field corresponding to the offset and if so, proceed
646 almost like if it was a component ref. */
652 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
653 to check whether a particular statement may modify the virtual table
654 pointerIt stores its result into DATA, which points to a
655 prop_type_change_info structure. */
658 check_stmt_for_type_change (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef
, void *data
)
660 gimple stmt
= SSA_NAME_DEF_STMT (vdef
);
661 struct prop_type_change_info
*tci
= (struct prop_type_change_info
*) data
;
663 if (stmt_may_be_vtbl_ptr_store (stmt
))
665 tci
->type_maybe_changed
= true;
672 /* See if ARG is PARAM_DECl describing instance passed by pointer
673 or reference in FUNCTION. Return false if the dynamic type may change
674 in between beggining of the function until CALL is invoked.
676 Generally functions are not allowed to change type of such instances,
677 but they call destructors. We assume that methods can not destroy the THIS
678 pointer. Also as a special cases, constructor and destructors may change
679 type of the THIS pointer. */
682 param_type_may_change_p (tree function
, tree arg
, gimple call
)
684 /* Pure functions can not do any changes on the dynamic type;
685 that require writting to memory. */
686 if (flags_from_decl_or_type (function
) & (ECF_PURE
| ECF_CONST
))
688 /* We need to check if we are within inlined consturctor
689 or destructor (ideally we would have way to check that the
690 inline cdtor is actually working on ARG, but we don't have
691 easy tie on this, so punt on all non-pure cdtors.
692 We may also record the types of cdtors and once we know type
693 of the instance match them.
695 Also code unification optimizations may merge calls from
696 different blocks making return values unreliable. So
697 do nothing during late optimization. */
698 if (DECL_STRUCT_FUNCTION (function
)->after_inlining
)
700 if (TREE_CODE (arg
) == SSA_NAME
701 && SSA_NAME_IS_DEFAULT_DEF (arg
)
702 && TREE_CODE (SSA_NAME_VAR (arg
)) == PARM_DECL
)
704 /* Normal (non-THIS) argument. */
705 if ((SSA_NAME_VAR (arg
) != DECL_ARGUMENTS (function
)
706 || TREE_CODE (TREE_TYPE (function
)) != METHOD_TYPE
)
707 /* THIS pointer of an method - here we we want to watch constructors
708 and destructors as those definitely may change the dynamic
710 || (TREE_CODE (TREE_TYPE (function
)) == METHOD_TYPE
711 && !DECL_CXX_CONSTRUCTOR_P (function
)
712 && !DECL_CXX_DESTRUCTOR_P (function
)
713 && (SSA_NAME_VAR (arg
) == DECL_ARGUMENTS (function
))))
715 /* Walk the inline stack and watch out for ctors/dtors. */
716 for (tree block
= gimple_block (call
); block
&& TREE_CODE (block
) == BLOCK
;
717 block
= BLOCK_SUPERCONTEXT (block
))
718 if (BLOCK_ABSTRACT_ORIGIN (block
)
719 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block
)) == FUNCTION_DECL
)
721 tree fn
= BLOCK_ABSTRACT_ORIGIN (block
);
723 if (flags_from_decl_or_type (fn
) & (ECF_PURE
| ECF_CONST
))
725 if (TREE_CODE (TREE_TYPE (fn
)) == METHOD_TYPE
726 && (DECL_CXX_CONSTRUCTOR_P (fn
)
727 || DECL_CXX_DESTRUCTOR_P (fn
)))
736 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
737 callsite CALL) by looking for assignments to its virtual table pointer. If
738 it is, return true and fill in the jump function JFUNC with relevant type
739 information or set it to unknown. ARG is the object itself (not a pointer
740 to it, unless dereferenced). BASE is the base of the memory access as
741 returned by get_ref_base_and_extent, as is the offset.
743 This is helper function for detect_type_change and detect_type_change_ssa
744 that does the heavy work which is usually unnecesary. */
747 detect_type_change_from_memory_writes (tree arg
, tree base
, tree comp_type
,
748 gcall
*call
, struct ipa_jump_func
*jfunc
,
749 HOST_WIDE_INT offset
)
751 struct prop_type_change_info tci
;
753 bool entry_reached
= false;
755 gcc_checking_assert (DECL_P (arg
)
756 || TREE_CODE (arg
) == MEM_REF
757 || handled_component_p (arg
));
759 comp_type
= TYPE_MAIN_VARIANT (comp_type
);
761 /* Const calls cannot call virtual methods through VMT and so type changes do
763 if (!flag_devirtualize
|| !gimple_vuse (call
)
764 /* Be sure expected_type is polymorphic. */
766 || TREE_CODE (comp_type
) != RECORD_TYPE
767 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))
768 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))))
771 ao_ref_init (&ao
, arg
);
774 ao
.size
= POINTER_SIZE
;
775 ao
.max_size
= ao
.size
;
778 tci
.object
= get_base_address (arg
);
779 tci
.type_maybe_changed
= false;
781 walk_aliased_vdefs (&ao
, gimple_vuse (call
), check_stmt_for_type_change
,
782 &tci
, NULL
, &entry_reached
);
783 if (!tci
.type_maybe_changed
)
786 ipa_set_jf_unknown (jfunc
);
790 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
791 If it is, return true and fill in the jump function JFUNC with relevant type
792 information or set it to unknown. ARG is the object itself (not a pointer
793 to it, unless dereferenced). BASE is the base of the memory access as
794 returned by get_ref_base_and_extent, as is the offset. */
797 detect_type_change (tree arg
, tree base
, tree comp_type
, gcall
*call
,
798 struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
)
800 if (!flag_devirtualize
)
803 if (TREE_CODE (base
) == MEM_REF
804 && !param_type_may_change_p (current_function_decl
,
805 TREE_OPERAND (base
, 0),
808 return detect_type_change_from_memory_writes (arg
, base
, comp_type
,
809 call
, jfunc
, offset
);
812 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
813 SSA name (its dereference will become the base and the offset is assumed to
817 detect_type_change_ssa (tree arg
, tree comp_type
,
818 gcall
*call
, struct ipa_jump_func
*jfunc
)
820 gcc_checking_assert (TREE_CODE (arg
) == SSA_NAME
);
821 if (!flag_devirtualize
822 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
825 if (!param_type_may_change_p (current_function_decl
, arg
, call
))
828 arg
= build2 (MEM_REF
, ptr_type_node
, arg
,
829 build_int_cst (ptr_type_node
, 0));
831 return detect_type_change_from_memory_writes (arg
, arg
, comp_type
,
835 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
836 boolean variable pointed to by DATA. */
839 mark_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
842 bool *b
= (bool *) data
;
847 /* Return true if we have already walked so many statements in AA that we
848 should really just start giving up. */
851 aa_overwalked (struct func_body_info
*fbi
)
853 gcc_checking_assert (fbi
);
854 return fbi
->aa_walked
> (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS
);
857 /* Find the nearest valid aa status for parameter specified by INDEX that
860 static struct param_aa_status
*
861 find_dominating_aa_status (struct func_body_info
*fbi
, basic_block bb
,
866 bb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
869 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
870 if (!bi
->param_aa_statuses
.is_empty ()
871 && bi
->param_aa_statuses
[index
].valid
)
872 return &bi
->param_aa_statuses
[index
];
876 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
877 structures and/or intialize the result with a dominating description as
880 static struct param_aa_status
*
881 parm_bb_aa_status_for_bb (struct func_body_info
*fbi
, basic_block bb
,
884 gcc_checking_assert (fbi
);
885 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
886 if (bi
->param_aa_statuses
.is_empty ())
887 bi
->param_aa_statuses
.safe_grow_cleared (fbi
->param_count
);
888 struct param_aa_status
*paa
= &bi
->param_aa_statuses
[index
];
891 gcc_checking_assert (!paa
->parm_modified
892 && !paa
->ref_modified
893 && !paa
->pt_modified
);
894 struct param_aa_status
*dom_paa
;
895 dom_paa
= find_dominating_aa_status (fbi
, bb
, index
);
905 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
906 a value known not to be modified in this function before reaching the
907 statement STMT. FBI holds information about the function we have so far
908 gathered but do not survive the summary building stage. */
911 parm_preserved_before_stmt_p (struct func_body_info
*fbi
, int index
,
912 gimple stmt
, tree parm_load
)
914 struct param_aa_status
*paa
;
915 bool modified
= false;
918 /* FIXME: FBI can be NULL if we are being called from outside
919 ipa_node_analysis or ipcp_transform_function, which currently happens
920 during inlining analysis. It would be great to extend fbi's lifetime and
921 always have it. Currently, we are just not afraid of too much walking in
925 if (aa_overwalked (fbi
))
927 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
928 if (paa
->parm_modified
)
934 gcc_checking_assert (gimple_vuse (stmt
) != NULL_TREE
);
935 ao_ref_init (&refd
, parm_load
);
936 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
939 fbi
->aa_walked
+= walked
;
941 paa
->parm_modified
= true;
945 /* If STMT is an assignment that loads a value from an parameter declaration,
946 return the index of the parameter in ipa_node_params which has not been
947 modified. Otherwise return -1. */
950 load_from_unmodified_param (struct func_body_info
*fbi
,
951 vec
<ipa_param_descriptor
> descriptors
,
957 if (!gimple_assign_single_p (stmt
))
960 op1
= gimple_assign_rhs1 (stmt
);
961 if (TREE_CODE (op1
) != PARM_DECL
)
964 index
= ipa_get_param_decl_index_1 (descriptors
, op1
);
966 || !parm_preserved_before_stmt_p (fbi
, index
, stmt
, op1
))
972 /* Return true if memory reference REF (which must be a load through parameter
973 with INDEX) loads data that are known to be unmodified in this function
974 before reaching statement STMT. */
977 parm_ref_data_preserved_p (struct func_body_info
*fbi
,
978 int index
, gimple stmt
, tree ref
)
980 struct param_aa_status
*paa
;
981 bool modified
= false;
984 /* FIXME: FBI can be NULL if we are being called from outside
985 ipa_node_analysis or ipcp_transform_function, which currently happens
986 during inlining analysis. It would be great to extend fbi's lifetime and
987 always have it. Currently, we are just not afraid of too much walking in
991 if (aa_overwalked (fbi
))
993 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
994 if (paa
->ref_modified
)
1000 gcc_checking_assert (gimple_vuse (stmt
));
1001 ao_ref_init (&refd
, ref
);
1002 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
1005 fbi
->aa_walked
+= walked
;
1006 if (paa
&& modified
)
1007 paa
->ref_modified
= true;
1011 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1012 is known to be unmodified in this function before reaching call statement
1013 CALL into which it is passed. FBI describes the function body. */
1016 parm_ref_data_pass_through_p (struct func_body_info
*fbi
, int index
,
1017 gimple call
, tree parm
)
1019 bool modified
= false;
1022 /* It's unnecessary to calculate anything about memory contnets for a const
1023 function because it is not goin to use it. But do not cache the result
1024 either. Also, no such calculations for non-pointers. */
1025 if (!gimple_vuse (call
)
1026 || !POINTER_TYPE_P (TREE_TYPE (parm
))
1027 || aa_overwalked (fbi
))
1030 struct param_aa_status
*paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (call
),
1032 if (paa
->pt_modified
)
1035 ao_ref_init_from_ptr_and_size (&refd
, parm
, NULL_TREE
);
1036 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (call
), mark_modified
,
1038 fbi
->aa_walked
+= walked
;
1040 paa
->pt_modified
= true;
1044 /* Return true if we can prove that OP is a memory reference loading unmodified
1045 data from an aggregate passed as a parameter and if the aggregate is passed
1046 by reference, that the alias type of the load corresponds to the type of the
1047 formal parameter (so that we can rely on this type for TBAA in callers).
1048 INFO and PARMS_AINFO describe parameters of the current function (but the
1049 latter can be NULL), STMT is the load statement. If function returns true,
1050 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1051 within the aggregate and whether it is a load from a value passed by
1052 reference respectively. */
1055 ipa_load_from_parm_agg_1 (struct func_body_info
*fbi
,
1056 vec
<ipa_param_descriptor
> descriptors
,
1057 gimple stmt
, tree op
, int *index_p
,
1058 HOST_WIDE_INT
*offset_p
, HOST_WIDE_INT
*size_p
,
1062 HOST_WIDE_INT size
, max_size
;
1063 tree base
= get_ref_base_and_extent (op
, offset_p
, &size
, &max_size
);
1065 if (max_size
== -1 || max_size
!= size
|| *offset_p
< 0)
1070 int index
= ipa_get_param_decl_index_1 (descriptors
, base
);
1072 && parm_preserved_before_stmt_p (fbi
, index
, stmt
, op
))
1083 if (TREE_CODE (base
) != MEM_REF
1084 || TREE_CODE (TREE_OPERAND (base
, 0)) != SSA_NAME
1085 || !integer_zerop (TREE_OPERAND (base
, 1)))
1088 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base
, 0)))
1090 tree parm
= SSA_NAME_VAR (TREE_OPERAND (base
, 0));
1091 index
= ipa_get_param_decl_index_1 (descriptors
, parm
);
1095 /* This branch catches situations where a pointer parameter is not a
1096 gimple register, for example:
1098 void hip7(S*) (struct S * p)
1100 void (*<T2e4>) (struct S *) D.1867;
1105 D.1867_2 = p.1_1->f;
1110 gimple def
= SSA_NAME_DEF_STMT (TREE_OPERAND (base
, 0));
1111 index
= load_from_unmodified_param (fbi
, descriptors
, def
);
1115 && parm_ref_data_preserved_p (fbi
, index
, stmt
, op
))
1126 /* Just like the previous function, just without the param_analysis_info
1127 pointer, for users outside of this file. */
1130 ipa_load_from_parm_agg (struct ipa_node_params
*info
, gimple stmt
,
1131 tree op
, int *index_p
, HOST_WIDE_INT
*offset_p
,
1134 return ipa_load_from_parm_agg_1 (NULL
, info
->descriptors
, stmt
, op
, index_p
,
1135 offset_p
, NULL
, by_ref_p
);
1138 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1139 of an assignment statement STMT, try to determine whether we are actually
1140 handling any of the following cases and construct an appropriate jump
1141 function into JFUNC if so:
1143 1) The passed value is loaded from a formal parameter which is not a gimple
1144 register (most probably because it is addressable, the value has to be
1145 scalar) and we can guarantee the value has not changed. This case can
1146 therefore be described by a simple pass-through jump function. For example:
1155 2) The passed value can be described by a simple arithmetic pass-through
1162 D.2064_4 = a.1(D) + 4;
1165 This case can also occur in combination of the previous one, e.g.:
1173 D.2064_4 = a.0_3 + 4;
1176 3) The passed value is an address of an object within another one (which
1177 also passed by reference). Such situations are described by an ancestor
1178 jump function and describe situations such as:
1180 B::foo() (struct B * const this)
1184 D.1845_2 = &this_1(D)->D.1748;
1187 INFO is the structure describing individual parameters access different
1188 stages of IPA optimizations. PARMS_AINFO contains the information that is
1189 only needed for intraprocedural analysis. */
1192 compute_complex_assign_jump_func (struct func_body_info
*fbi
,
1193 struct ipa_node_params
*info
,
1194 struct ipa_jump_func
*jfunc
,
1195 gcall
*call
, gimple stmt
, tree name
,
1198 HOST_WIDE_INT offset
, size
, max_size
;
1199 tree op1
, tc_ssa
, base
, ssa
;
1202 op1
= gimple_assign_rhs1 (stmt
);
1204 if (TREE_CODE (op1
) == SSA_NAME
)
1206 if (SSA_NAME_IS_DEFAULT_DEF (op1
))
1207 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (op1
));
1209 index
= load_from_unmodified_param (fbi
, info
->descriptors
,
1210 SSA_NAME_DEF_STMT (op1
));
1215 index
= load_from_unmodified_param (fbi
, info
->descriptors
, stmt
);
1216 tc_ssa
= gimple_assign_lhs (stmt
);
1221 tree op2
= gimple_assign_rhs2 (stmt
);
1225 if (!is_gimple_ip_invariant (op2
)
1226 || (TREE_CODE_CLASS (gimple_expr_code (stmt
)) != tcc_comparison
1227 && !useless_type_conversion_p (TREE_TYPE (name
),
1231 ipa_set_jf_arith_pass_through (jfunc
, index
, op2
,
1232 gimple_assign_rhs_code (stmt
));
1234 else if (gimple_assign_single_p (stmt
))
1236 bool agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
, tc_ssa
);
1237 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1242 if (TREE_CODE (op1
) != ADDR_EXPR
)
1244 op1
= TREE_OPERAND (op1
, 0);
1245 if (TREE_CODE (TREE_TYPE (op1
)) != RECORD_TYPE
)
1247 base
= get_ref_base_and_extent (op1
, &offset
, &size
, &max_size
);
1248 if (TREE_CODE (base
) != MEM_REF
1249 /* If this is a varying address, punt. */
1251 || max_size
!= size
)
1253 offset
+= mem_ref_offset (base
).to_short_addr () * BITS_PER_UNIT
;
1254 ssa
= TREE_OPERAND (base
, 0);
1255 if (TREE_CODE (ssa
) != SSA_NAME
1256 || !SSA_NAME_IS_DEFAULT_DEF (ssa
)
1260 /* Dynamic types are changed in constructors and destructors. */
1261 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (ssa
));
1262 if (index
>= 0 && param_type
&& POINTER_TYPE_P (param_type
))
1263 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1264 parm_ref_data_pass_through_p (fbi
, index
, call
, ssa
));
1267 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1270 iftmp.1_3 = &obj_2(D)->D.1762;
1272 The base of the MEM_REF must be a default definition SSA NAME of a
1273 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1274 whole MEM_REF expression is returned and the offset calculated from any
1275 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1276 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1279 get_ancestor_addr_info (gimple assign
, tree
*obj_p
, HOST_WIDE_INT
*offset
)
1281 HOST_WIDE_INT size
, max_size
;
1282 tree expr
, parm
, obj
;
1284 if (!gimple_assign_single_p (assign
))
1286 expr
= gimple_assign_rhs1 (assign
);
1288 if (TREE_CODE (expr
) != ADDR_EXPR
)
1290 expr
= TREE_OPERAND (expr
, 0);
1292 expr
= get_ref_base_and_extent (expr
, offset
, &size
, &max_size
);
1294 if (TREE_CODE (expr
) != MEM_REF
1295 /* If this is a varying address, punt. */
1300 parm
= TREE_OPERAND (expr
, 0);
1301 if (TREE_CODE (parm
) != SSA_NAME
1302 || !SSA_NAME_IS_DEFAULT_DEF (parm
)
1303 || TREE_CODE (SSA_NAME_VAR (parm
)) != PARM_DECL
)
1306 *offset
+= mem_ref_offset (expr
).to_short_addr () * BITS_PER_UNIT
;
1312 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1313 statement PHI, try to find out whether NAME is in fact a
1314 multiple-inheritance typecast from a descendant into an ancestor of a formal
1315 parameter and thus can be described by an ancestor jump function and if so,
1316 write the appropriate function into JFUNC.
1318 Essentially we want to match the following pattern:
1326 iftmp.1_3 = &obj_2(D)->D.1762;
1329 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1330 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1334 compute_complex_ancestor_jump_func (struct func_body_info
*fbi
,
1335 struct ipa_node_params
*info
,
1336 struct ipa_jump_func
*jfunc
,
1337 gcall
*call
, gphi
*phi
)
1339 HOST_WIDE_INT offset
;
1340 gimple assign
, cond
;
1341 basic_block phi_bb
, assign_bb
, cond_bb
;
1342 tree tmp
, parm
, expr
, obj
;
1345 if (gimple_phi_num_args (phi
) != 2)
1348 if (integer_zerop (PHI_ARG_DEF (phi
, 1)))
1349 tmp
= PHI_ARG_DEF (phi
, 0);
1350 else if (integer_zerop (PHI_ARG_DEF (phi
, 0)))
1351 tmp
= PHI_ARG_DEF (phi
, 1);
1354 if (TREE_CODE (tmp
) != SSA_NAME
1355 || SSA_NAME_IS_DEFAULT_DEF (tmp
)
1356 || !POINTER_TYPE_P (TREE_TYPE (tmp
))
1357 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp
))) != RECORD_TYPE
)
1360 assign
= SSA_NAME_DEF_STMT (tmp
);
1361 assign_bb
= gimple_bb (assign
);
1362 if (!single_pred_p (assign_bb
))
1364 expr
= get_ancestor_addr_info (assign
, &obj
, &offset
);
1367 parm
= TREE_OPERAND (expr
, 0);
1368 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (parm
));
1372 cond_bb
= single_pred (assign_bb
);
1373 cond
= last_stmt (cond_bb
);
1375 || gimple_code (cond
) != GIMPLE_COND
1376 || gimple_cond_code (cond
) != NE_EXPR
1377 || gimple_cond_lhs (cond
) != parm
1378 || !integer_zerop (gimple_cond_rhs (cond
)))
1381 phi_bb
= gimple_bb (phi
);
1382 for (i
= 0; i
< 2; i
++)
1384 basic_block pred
= EDGE_PRED (phi_bb
, i
)->src
;
1385 if (pred
!= assign_bb
&& pred
!= cond_bb
)
1389 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1390 parm_ref_data_pass_through_p (fbi
, index
, call
, parm
));
1393 /* Inspect the given TYPE and return true iff it has the same structure (the
1394 same number of fields of the same types) as a C++ member pointer. If
1395 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1396 corresponding fields there. */
1399 type_like_member_ptr_p (tree type
, tree
*method_ptr
, tree
*delta
)
1403 if (TREE_CODE (type
) != RECORD_TYPE
)
1406 fld
= TYPE_FIELDS (type
);
1407 if (!fld
|| !POINTER_TYPE_P (TREE_TYPE (fld
))
1408 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld
))) != METHOD_TYPE
1409 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1415 fld
= DECL_CHAIN (fld
);
1416 if (!fld
|| INTEGRAL_TYPE_P (fld
)
1417 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1422 if (DECL_CHAIN (fld
))
1428 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1429 return the rhs of its defining statement. Otherwise return RHS as it
1433 get_ssa_def_if_simple_copy (tree rhs
)
1435 while (TREE_CODE (rhs
) == SSA_NAME
&& !SSA_NAME_IS_DEFAULT_DEF (rhs
))
1437 gimple def_stmt
= SSA_NAME_DEF_STMT (rhs
);
1439 if (gimple_assign_single_p (def_stmt
))
1440 rhs
= gimple_assign_rhs1 (def_stmt
);
1447 /* Simple linked list, describing known contents of an aggregate beforere
1450 struct ipa_known_agg_contents_list
1452 /* Offset and size of the described part of the aggregate. */
1453 HOST_WIDE_INT offset
, size
;
1454 /* Known constant value or NULL if the contents is known to be unknown. */
1456 /* Pointer to the next structure in the list. */
1457 struct ipa_known_agg_contents_list
*next
;
1460 /* Find the proper place in linked list of ipa_known_agg_contents_list
1461 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1462 unless there is a partial overlap, in which case return NULL, or such
1463 element is already there, in which case set *ALREADY_THERE to true. */
1465 static struct ipa_known_agg_contents_list
**
1466 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list
**list
,
1467 HOST_WIDE_INT lhs_offset
,
1468 HOST_WIDE_INT lhs_size
,
1469 bool *already_there
)
1471 struct ipa_known_agg_contents_list
**p
= list
;
1472 while (*p
&& (*p
)->offset
< lhs_offset
)
1474 if ((*p
)->offset
+ (*p
)->size
> lhs_offset
)
1479 if (*p
&& (*p
)->offset
< lhs_offset
+ lhs_size
)
1481 if ((*p
)->offset
== lhs_offset
&& (*p
)->size
== lhs_size
)
1482 /* We already know this value is subsequently overwritten with
1484 *already_there
= true;
1486 /* Otherwise this is a partial overlap which we cannot
1493 /* Build aggregate jump function from LIST, assuming there are exactly
1494 CONST_COUNT constant entries there and that th offset of the passed argument
1495 is ARG_OFFSET and store it into JFUNC. */
1498 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list
*list
,
1499 int const_count
, HOST_WIDE_INT arg_offset
,
1500 struct ipa_jump_func
*jfunc
)
1502 vec_alloc (jfunc
->agg
.items
, const_count
);
1507 struct ipa_agg_jf_item item
;
1508 item
.offset
= list
->offset
- arg_offset
;
1509 gcc_assert ((item
.offset
% BITS_PER_UNIT
) == 0);
1510 item
.value
= unshare_expr_without_location (list
->constant
);
1511 jfunc
->agg
.items
->quick_push (item
);
1517 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1518 in ARG is filled in with constant values. ARG can either be an aggregate
1519 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1520 aggregate. JFUNC is the jump function into which the constants are
1521 subsequently stored. */
1524 determine_locally_known_aggregate_parts (gcall
*call
, tree arg
,
1526 struct ipa_jump_func
*jfunc
)
1528 struct ipa_known_agg_contents_list
*list
= NULL
;
1529 int item_count
= 0, const_count
= 0;
1530 HOST_WIDE_INT arg_offset
, arg_size
;
1531 gimple_stmt_iterator gsi
;
1533 bool check_ref
, by_ref
;
1536 /* The function operates in three stages. First, we prepare check_ref, r,
1537 arg_base and arg_offset based on what is actually passed as an actual
1540 if (POINTER_TYPE_P (arg_type
))
1543 if (TREE_CODE (arg
) == SSA_NAME
)
1546 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type
))))
1551 type_size
= TYPE_SIZE (TREE_TYPE (arg_type
));
1552 arg_size
= tree_to_uhwi (type_size
);
1553 ao_ref_init_from_ptr_and_size (&r
, arg_base
, NULL_TREE
);
1555 else if (TREE_CODE (arg
) == ADDR_EXPR
)
1557 HOST_WIDE_INT arg_max_size
;
1559 arg
= TREE_OPERAND (arg
, 0);
1560 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1562 if (arg_max_size
== -1
1563 || arg_max_size
!= arg_size
1566 if (DECL_P (arg_base
))
1569 ao_ref_init (&r
, arg_base
);
1579 HOST_WIDE_INT arg_max_size
;
1581 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg
)));
1585 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1587 if (arg_max_size
== -1
1588 || arg_max_size
!= arg_size
1592 ao_ref_init (&r
, arg
);
1595 /* Second stage walks back the BB, looks at individual statements and as long
1596 as it is confident of how the statements affect contents of the
1597 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1599 gsi
= gsi_for_stmt (call
);
1601 for (; !gsi_end_p (gsi
); gsi_prev (&gsi
))
1603 struct ipa_known_agg_contents_list
*n
, **p
;
1604 gimple stmt
= gsi_stmt (gsi
);
1605 HOST_WIDE_INT lhs_offset
, lhs_size
, lhs_max_size
;
1606 tree lhs
, rhs
, lhs_base
;
1608 if (!stmt_may_clobber_ref_p_1 (stmt
, &r
))
1610 if (!gimple_assign_single_p (stmt
))
1613 lhs
= gimple_assign_lhs (stmt
);
1614 rhs
= gimple_assign_rhs1 (stmt
);
1615 if (!is_gimple_reg_type (TREE_TYPE (rhs
))
1616 || TREE_CODE (lhs
) == BIT_FIELD_REF
1617 || contains_bitfld_component_ref_p (lhs
))
1620 lhs_base
= get_ref_base_and_extent (lhs
, &lhs_offset
, &lhs_size
,
1622 if (lhs_max_size
== -1
1623 || lhs_max_size
!= lhs_size
)
1628 if (TREE_CODE (lhs_base
) != MEM_REF
1629 || TREE_OPERAND (lhs_base
, 0) != arg_base
1630 || !integer_zerop (TREE_OPERAND (lhs_base
, 1)))
1633 else if (lhs_base
!= arg_base
)
1635 if (DECL_P (lhs_base
))
1641 bool already_there
= false;
1642 p
= get_place_in_agg_contents_list (&list
, lhs_offset
, lhs_size
,
1649 rhs
= get_ssa_def_if_simple_copy (rhs
);
1650 n
= XALLOCA (struct ipa_known_agg_contents_list
);
1652 n
->offset
= lhs_offset
;
1653 if (is_gimple_ip_invariant (rhs
))
1659 n
->constant
= NULL_TREE
;
1664 if (const_count
== PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
)
1665 || item_count
== 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
))
1669 /* Third stage just goes over the list and creates an appropriate vector of
1670 ipa_agg_jf_item structures out of it, of sourse only if there are
1671 any known constants to begin with. */
1675 jfunc
->agg
.by_ref
= by_ref
;
1676 build_agg_jump_func_from_list (list
, const_count
, arg_offset
, jfunc
);
1681 ipa_get_callee_param_type (struct cgraph_edge
*e
, int i
)
1684 tree type
= (e
->callee
1685 ? TREE_TYPE (e
->callee
->decl
)
1686 : gimple_call_fntype (e
->call_stmt
));
1687 tree t
= TYPE_ARG_TYPES (type
);
1689 for (n
= 0; n
< i
; n
++)
1696 return TREE_VALUE (t
);
1699 t
= DECL_ARGUMENTS (e
->callee
->decl
);
1700 for (n
= 0; n
< i
; n
++)
1707 return TREE_TYPE (t
);
1711 /* Compute jump function for all arguments of callsite CS and insert the
1712 information in the jump_functions array in the ipa_edge_args corresponding
1713 to this callsite. */
1716 ipa_compute_jump_functions_for_edge (struct func_body_info
*fbi
,
1717 struct cgraph_edge
*cs
)
1719 struct ipa_node_params
*info
= IPA_NODE_REF (cs
->caller
);
1720 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
1721 gcall
*call
= cs
->call_stmt
;
1722 int n
, arg_num
= gimple_call_num_args (call
);
1723 bool useful_context
= false;
1725 if (arg_num
== 0 || args
->jump_functions
)
1727 vec_safe_grow_cleared (args
->jump_functions
, arg_num
);
1728 if (flag_devirtualize
)
1729 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, arg_num
);
1731 if (gimple_call_internal_p (call
))
1733 if (ipa_func_spec_opts_forbid_analysis_p (cs
->caller
))
1736 for (n
= 0; n
< arg_num
; n
++)
1738 struct ipa_jump_func
*jfunc
= ipa_get_ith_jump_func (args
, n
);
1739 tree arg
= gimple_call_arg (call
, n
);
1740 tree param_type
= ipa_get_callee_param_type (cs
, n
);
1741 if (flag_devirtualize
&& POINTER_TYPE_P (TREE_TYPE (arg
)))
1744 struct ipa_polymorphic_call_context
context (cs
->caller
->decl
,
1747 context
.get_dynamic_type (instance
, arg
, NULL
, cs
->call_stmt
);
1748 *ipa_get_ith_polymorhic_call_context (args
, n
) = context
;
1749 if (!context
.useless_p ())
1750 useful_context
= true;
1753 if (POINTER_TYPE_P (TREE_TYPE(arg
)))
1755 unsigned HOST_WIDE_INT hwi_bitpos
;
1758 if (get_pointer_alignment_1 (arg
, &align
, &hwi_bitpos
)
1759 && align
% BITS_PER_UNIT
== 0
1760 && hwi_bitpos
% BITS_PER_UNIT
== 0)
1762 jfunc
->alignment
.known
= true;
1763 jfunc
->alignment
.align
= align
/ BITS_PER_UNIT
;
1764 jfunc
->alignment
.misalign
= hwi_bitpos
/ BITS_PER_UNIT
;
1767 gcc_assert (!jfunc
->alignment
.known
);
1770 gcc_assert (!jfunc
->alignment
.known
);
1772 if (is_gimple_ip_invariant (arg
))
1773 ipa_set_jf_constant (jfunc
, arg
, cs
);
1774 else if (!is_gimple_reg_type (TREE_TYPE (arg
))
1775 && TREE_CODE (arg
) == PARM_DECL
)
1777 int index
= ipa_get_param_decl_index (info
, arg
);
1779 gcc_assert (index
>=0);
1780 /* Aggregate passed by value, check for pass-through, otherwise we
1781 will attempt to fill in aggregate contents later in this
1783 if (parm_preserved_before_stmt_p (fbi
, index
, call
, arg
))
1785 ipa_set_jf_simple_pass_through (jfunc
, index
, false);
1789 else if (TREE_CODE (arg
) == SSA_NAME
)
1791 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
1793 int index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (arg
));
1797 agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
, arg
);
1798 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1803 gimple stmt
= SSA_NAME_DEF_STMT (arg
);
1804 if (is_gimple_assign (stmt
))
1805 compute_complex_assign_jump_func (fbi
, info
, jfunc
,
1806 call
, stmt
, arg
, param_type
);
1807 else if (gimple_code (stmt
) == GIMPLE_PHI
)
1808 compute_complex_ancestor_jump_func (fbi
, info
, jfunc
,
1810 as_a
<gphi
*> (stmt
));
1814 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1815 passed (because type conversions are ignored in gimple). Usually we can
1816 safely get type from function declaration, but in case of K&R prototypes or
1817 variadic functions we can try our luck with type of the pointer passed.
1818 TODO: Since we look for actual initialization of the memory object, we may better
1819 work out the type based on the memory stores we find. */
1821 param_type
= TREE_TYPE (arg
);
1823 if ((jfunc
->type
!= IPA_JF_PASS_THROUGH
1824 || !ipa_get_jf_pass_through_agg_preserved (jfunc
))
1825 && (jfunc
->type
!= IPA_JF_ANCESTOR
1826 || !ipa_get_jf_ancestor_agg_preserved (jfunc
))
1827 && (AGGREGATE_TYPE_P (TREE_TYPE (arg
))
1828 || POINTER_TYPE_P (param_type
)))
1829 determine_locally_known_aggregate_parts (call
, arg
, param_type
, jfunc
);
1831 if (!useful_context
)
1832 vec_free (args
->polymorphic_call_contexts
);
1835 /* Compute jump functions for all edges - both direct and indirect - outgoing
1839 ipa_compute_jump_functions_for_bb (struct func_body_info
*fbi
, basic_block bb
)
1841 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
1843 struct cgraph_edge
*cs
;
1845 FOR_EACH_VEC_ELT_REVERSE (bi
->cg_edges
, i
, cs
)
1847 struct cgraph_node
*callee
= cs
->callee
;
1851 callee
->ultimate_alias_target ();
1852 /* We do not need to bother analyzing calls to unknown functions
1853 unless they may become known during lto/whopr. */
1854 if (!callee
->definition
&& !flag_lto
)
1857 ipa_compute_jump_functions_for_edge (fbi
, cs
);
1861 /* If STMT looks like a statement loading a value from a member pointer formal
1862 parameter, return that parameter and store the offset of the field to
1863 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1864 might be clobbered). If USE_DELTA, then we look for a use of the delta
1865 field rather than the pfn. */
1868 ipa_get_stmt_member_ptr_load_param (gimple stmt
, bool use_delta
,
1869 HOST_WIDE_INT
*offset_p
)
1871 tree rhs
, rec
, ref_field
, ref_offset
, fld
, ptr_field
, delta_field
;
1873 if (!gimple_assign_single_p (stmt
))
1876 rhs
= gimple_assign_rhs1 (stmt
);
1877 if (TREE_CODE (rhs
) == COMPONENT_REF
)
1879 ref_field
= TREE_OPERAND (rhs
, 1);
1880 rhs
= TREE_OPERAND (rhs
, 0);
1883 ref_field
= NULL_TREE
;
1884 if (TREE_CODE (rhs
) != MEM_REF
)
1886 rec
= TREE_OPERAND (rhs
, 0);
1887 if (TREE_CODE (rec
) != ADDR_EXPR
)
1889 rec
= TREE_OPERAND (rec
, 0);
1890 if (TREE_CODE (rec
) != PARM_DECL
1891 || !type_like_member_ptr_p (TREE_TYPE (rec
), &ptr_field
, &delta_field
))
1893 ref_offset
= TREE_OPERAND (rhs
, 1);
1900 *offset_p
= int_bit_position (fld
);
1904 if (integer_nonzerop (ref_offset
))
1906 return ref_field
== fld
? rec
: NULL_TREE
;
1909 return tree_int_cst_equal (byte_position (fld
), ref_offset
) ? rec
1913 /* Returns true iff T is an SSA_NAME defined by a statement. */
1916 ipa_is_ssa_with_stmt_def (tree t
)
1918 if (TREE_CODE (t
) == SSA_NAME
1919 && !SSA_NAME_IS_DEFAULT_DEF (t
))
1925 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1926 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1927 indirect call graph edge. */
1929 static struct cgraph_edge
*
1930 ipa_note_param_call (struct cgraph_node
*node
, int param_index
,
1933 struct cgraph_edge
*cs
;
1935 cs
= node
->get_edge (stmt
);
1936 cs
->indirect_info
->param_index
= param_index
;
1937 cs
->indirect_info
->agg_contents
= 0;
1938 cs
->indirect_info
->member_ptr
= 0;
1942 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1943 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1944 intermediate information about each formal parameter. Currently it checks
1945 whether the call calls a pointer that is a formal parameter and if so, the
1946 parameter is marked with the called flag and an indirect call graph edge
1947 describing the call is created. This is very simple for ordinary pointers
1948 represented in SSA but not-so-nice when it comes to member pointers. The
1949 ugly part of this function does nothing more than trying to match the
1950 pattern of such a call. An example of such a pattern is the gimple dump
1951 below, the call is on the last line:
1954 f$__delta_5 = f.__delta;
1955 f$__pfn_24 = f.__pfn;
1959 f$__delta_5 = MEM[(struct *)&f];
1960 f$__pfn_24 = MEM[(struct *)&f + 4B];
1962 and a few lines below:
1965 D.2496_3 = (int) f$__pfn_24;
1966 D.2497_4 = D.2496_3 & 1;
1973 D.2500_7 = (unsigned int) f$__delta_5;
1974 D.2501_8 = &S + D.2500_7;
1975 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1976 D.2503_10 = *D.2502_9;
1977 D.2504_12 = f$__pfn_24 + -1;
1978 D.2505_13 = (unsigned int) D.2504_12;
1979 D.2506_14 = D.2503_10 + D.2505_13;
1980 D.2507_15 = *D.2506_14;
1981 iftmp.11_16 = (String:: *) D.2507_15;
1984 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1985 D.2500_19 = (unsigned int) f$__delta_5;
1986 D.2508_20 = &S + D.2500_19;
1987 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1989 Such patterns are results of simple calls to a member pointer:
1991 int doprinting (int (MyString::* f)(int) const)
1993 MyString S ("somestring");
1998 Moreover, the function also looks for called pointers loaded from aggregates
1999 passed by value or reference. */
2002 ipa_analyze_indirect_call_uses (struct func_body_info
*fbi
, gcall
*call
,
2005 struct ipa_node_params
*info
= fbi
->info
;
2006 HOST_WIDE_INT offset
;
2009 if (SSA_NAME_IS_DEFAULT_DEF (target
))
2011 tree var
= SSA_NAME_VAR (target
);
2012 int index
= ipa_get_param_decl_index (info
, var
);
2014 ipa_note_param_call (fbi
->node
, index
, call
);
2019 gimple def
= SSA_NAME_DEF_STMT (target
);
2020 if (gimple_assign_single_p (def
)
2021 && ipa_load_from_parm_agg_1 (fbi
, info
->descriptors
, def
,
2022 gimple_assign_rhs1 (def
), &index
, &offset
,
2025 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2026 cs
->indirect_info
->offset
= offset
;
2027 cs
->indirect_info
->agg_contents
= 1;
2028 cs
->indirect_info
->by_ref
= by_ref
;
2032 /* Now we need to try to match the complex pattern of calling a member
2034 if (gimple_code (def
) != GIMPLE_PHI
2035 || gimple_phi_num_args (def
) != 2
2036 || !POINTER_TYPE_P (TREE_TYPE (target
))
2037 || TREE_CODE (TREE_TYPE (TREE_TYPE (target
))) != METHOD_TYPE
)
2040 /* First, we need to check whether one of these is a load from a member
2041 pointer that is a parameter to this function. */
2042 tree n1
= PHI_ARG_DEF (def
, 0);
2043 tree n2
= PHI_ARG_DEF (def
, 1);
2044 if (!ipa_is_ssa_with_stmt_def (n1
) || !ipa_is_ssa_with_stmt_def (n2
))
2046 gimple d1
= SSA_NAME_DEF_STMT (n1
);
2047 gimple d2
= SSA_NAME_DEF_STMT (n2
);
2050 basic_block bb
, virt_bb
;
2051 basic_block join
= gimple_bb (def
);
2052 if ((rec
= ipa_get_stmt_member_ptr_load_param (d1
, false, &offset
)))
2054 if (ipa_get_stmt_member_ptr_load_param (d2
, false, NULL
))
2057 bb
= EDGE_PRED (join
, 0)->src
;
2058 virt_bb
= gimple_bb (d2
);
2060 else if ((rec
= ipa_get_stmt_member_ptr_load_param (d2
, false, &offset
)))
2062 bb
= EDGE_PRED (join
, 1)->src
;
2063 virt_bb
= gimple_bb (d1
);
2068 /* Second, we need to check that the basic blocks are laid out in the way
2069 corresponding to the pattern. */
2071 if (!single_pred_p (virt_bb
) || !single_succ_p (virt_bb
)
2072 || single_pred (virt_bb
) != bb
2073 || single_succ (virt_bb
) != join
)
2076 /* Third, let's see that the branching is done depending on the least
2077 significant bit of the pfn. */
2079 gimple branch
= last_stmt (bb
);
2080 if (!branch
|| gimple_code (branch
) != GIMPLE_COND
)
2083 if ((gimple_cond_code (branch
) != NE_EXPR
2084 && gimple_cond_code (branch
) != EQ_EXPR
)
2085 || !integer_zerop (gimple_cond_rhs (branch
)))
2088 tree cond
= gimple_cond_lhs (branch
);
2089 if (!ipa_is_ssa_with_stmt_def (cond
))
2092 def
= SSA_NAME_DEF_STMT (cond
);
2093 if (!is_gimple_assign (def
)
2094 || gimple_assign_rhs_code (def
) != BIT_AND_EXPR
2095 || !integer_onep (gimple_assign_rhs2 (def
)))
2098 cond
= gimple_assign_rhs1 (def
);
2099 if (!ipa_is_ssa_with_stmt_def (cond
))
2102 def
= SSA_NAME_DEF_STMT (cond
);
2104 if (is_gimple_assign (def
)
2105 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
2107 cond
= gimple_assign_rhs1 (def
);
2108 if (!ipa_is_ssa_with_stmt_def (cond
))
2110 def
= SSA_NAME_DEF_STMT (cond
);
2114 rec2
= ipa_get_stmt_member_ptr_load_param (def
,
2115 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2116 == ptrmemfunc_vbit_in_delta
),
2121 index
= ipa_get_param_decl_index (info
, rec
);
2123 && parm_preserved_before_stmt_p (fbi
, index
, call
, rec
))
2125 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2126 cs
->indirect_info
->offset
= offset
;
2127 cs
->indirect_info
->agg_contents
= 1;
2128 cs
->indirect_info
->member_ptr
= 1;
2134 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2135 object referenced in the expression is a formal parameter of the caller
2136 FBI->node (described by FBI->info), create a call note for the
2140 ipa_analyze_virtual_call_uses (struct func_body_info
*fbi
,
2141 gcall
*call
, tree target
)
2143 tree obj
= OBJ_TYPE_REF_OBJECT (target
);
2145 HOST_WIDE_INT anc_offset
;
2147 if (!flag_devirtualize
)
2150 if (TREE_CODE (obj
) != SSA_NAME
)
2153 struct ipa_node_params
*info
= fbi
->info
;
2154 if (SSA_NAME_IS_DEFAULT_DEF (obj
))
2156 struct ipa_jump_func jfunc
;
2157 if (TREE_CODE (SSA_NAME_VAR (obj
)) != PARM_DECL
)
2161 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (obj
));
2162 gcc_assert (index
>= 0);
2163 if (detect_type_change_ssa (obj
, obj_type_ref_class (target
),
2169 struct ipa_jump_func jfunc
;
2170 gimple stmt
= SSA_NAME_DEF_STMT (obj
);
2173 expr
= get_ancestor_addr_info (stmt
, &obj
, &anc_offset
);
2176 index
= ipa_get_param_decl_index (info
,
2177 SSA_NAME_VAR (TREE_OPERAND (expr
, 0)));
2178 gcc_assert (index
>= 0);
2179 if (detect_type_change (obj
, expr
, obj_type_ref_class (target
),
2180 call
, &jfunc
, anc_offset
))
2184 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2185 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
2186 ii
->offset
= anc_offset
;
2187 ii
->otr_token
= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target
));
2188 ii
->otr_type
= obj_type_ref_class (target
);
2189 ii
->polymorphic
= 1;
2192 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2193 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2194 containing intermediate information about each formal parameter. */
2197 ipa_analyze_call_uses (struct func_body_info
*fbi
, gcall
*call
)
2199 tree target
= gimple_call_fn (call
);
2202 || (TREE_CODE (target
) != SSA_NAME
2203 && !virtual_method_call_p (target
)))
2206 struct cgraph_edge
*cs
= fbi
->node
->get_edge (call
);
2207 /* If we previously turned the call into a direct call, there is
2208 no need to analyze. */
2209 if (cs
&& !cs
->indirect_unknown_callee
)
2212 if (cs
->indirect_info
->polymorphic
&& flag_devirtualize
)
2215 tree target
= gimple_call_fn (call
);
2216 ipa_polymorphic_call_context
context (current_function_decl
,
2217 target
, call
, &instance
);
2219 gcc_checking_assert (cs
->indirect_info
->otr_type
2220 == obj_type_ref_class (target
));
2221 gcc_checking_assert (cs
->indirect_info
->otr_token
2222 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target
)));
2224 cs
->indirect_info
->vptr_changed
2225 = !context
.get_dynamic_type (instance
,
2226 OBJ_TYPE_REF_OBJECT (target
),
2227 obj_type_ref_class (target
), call
);
2228 cs
->indirect_info
->context
= context
;
2231 if (TREE_CODE (target
) == SSA_NAME
)
2232 ipa_analyze_indirect_call_uses (fbi
, call
, target
);
2233 else if (virtual_method_call_p (target
))
2234 ipa_analyze_virtual_call_uses (fbi
, call
, target
);
2238 /* Analyze the call statement STMT with respect to formal parameters (described
2239 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2240 formal parameters are called. */
2243 ipa_analyze_stmt_uses (struct func_body_info
*fbi
, gimple stmt
)
2245 if (is_gimple_call (stmt
))
2246 ipa_analyze_call_uses (fbi
, as_a
<gcall
*> (stmt
));
2249 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2250 If OP is a parameter declaration, mark it as used in the info structure
2254 visit_ref_for_mod_analysis (gimple
, tree op
, tree
, void *data
)
2256 struct ipa_node_params
*info
= (struct ipa_node_params
*) data
;
2258 op
= get_base_address (op
);
2260 && TREE_CODE (op
) == PARM_DECL
)
2262 int index
= ipa_get_param_decl_index (info
, op
);
2263 gcc_assert (index
>= 0);
2264 ipa_set_param_used (info
, index
, true);
2270 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2271 the findings in various structures of the associated ipa_node_params
2272 structure, such as parameter flags, notes etc. FBI holds various data about
2273 the function being analyzed. */
2276 ipa_analyze_params_uses_in_bb (struct func_body_info
*fbi
, basic_block bb
)
2278 gimple_stmt_iterator gsi
;
2279 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2281 gimple stmt
= gsi_stmt (gsi
);
2283 if (is_gimple_debug (stmt
))
2286 ipa_analyze_stmt_uses (fbi
, stmt
);
2287 walk_stmt_load_store_addr_ops (stmt
, fbi
->info
,
2288 visit_ref_for_mod_analysis
,
2289 visit_ref_for_mod_analysis
,
2290 visit_ref_for_mod_analysis
);
2292 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2293 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), fbi
->info
,
2294 visit_ref_for_mod_analysis
,
2295 visit_ref_for_mod_analysis
,
2296 visit_ref_for_mod_analysis
);
2299 /* Calculate controlled uses of parameters of NODE. */
2302 ipa_analyze_controlled_uses (struct cgraph_node
*node
)
2304 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
2306 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2308 tree parm
= ipa_get_param (info
, i
);
2309 int controlled_uses
= 0;
2311 /* For SSA regs see if parameter is used. For non-SSA we compute
2312 the flag during modification analysis. */
2313 if (is_gimple_reg (parm
))
2315 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
),
2317 if (ddef
&& !has_zero_uses (ddef
))
2319 imm_use_iterator imm_iter
;
2320 use_operand_p use_p
;
2322 ipa_set_param_used (info
, i
, true);
2323 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, ddef
)
2324 if (!is_gimple_call (USE_STMT (use_p
)))
2326 if (!is_gimple_debug (USE_STMT (use_p
)))
2328 controlled_uses
= IPA_UNDESCRIBED_USE
;
2336 controlled_uses
= 0;
2339 controlled_uses
= IPA_UNDESCRIBED_USE
;
2340 ipa_set_controlled_uses (info
, i
, controlled_uses
);
2344 /* Free stuff in BI. */
2347 free_ipa_bb_info (struct ipa_bb_info
*bi
)
2349 bi
->cg_edges
.release ();
2350 bi
->param_aa_statuses
.release ();
2353 /* Dominator walker driving the analysis. */
2355 class analysis_dom_walker
: public dom_walker
2358 analysis_dom_walker (struct func_body_info
*fbi
)
2359 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
) {}
2361 virtual void before_dom_children (basic_block
);
2364 struct func_body_info
*m_fbi
;
2368 analysis_dom_walker::before_dom_children (basic_block bb
)
2370 ipa_analyze_params_uses_in_bb (m_fbi
, bb
);
2371 ipa_compute_jump_functions_for_bb (m_fbi
, bb
);
2374 /* Initialize the array describing properties of of formal parameters
2375 of NODE, analyze their uses and compute jump functions associated
2376 with actual arguments of calls from within NODE. */
2379 ipa_analyze_node (struct cgraph_node
*node
)
2381 struct func_body_info fbi
;
2382 struct ipa_node_params
*info
;
2384 ipa_check_create_node_params ();
2385 ipa_check_create_edge_args ();
2386 info
= IPA_NODE_REF (node
);
2388 if (info
->analysis_done
)
2390 info
->analysis_done
= 1;
2392 if (ipa_func_spec_opts_forbid_analysis_p (node
))
2394 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2396 ipa_set_param_used (info
, i
, true);
2397 ipa_set_controlled_uses (info
, i
, IPA_UNDESCRIBED_USE
);
2402 struct function
*func
= DECL_STRUCT_FUNCTION (node
->decl
);
2404 calculate_dominance_info (CDI_DOMINATORS
);
2405 ipa_initialize_node_params (node
);
2406 ipa_analyze_controlled_uses (node
);
2409 fbi
.info
= IPA_NODE_REF (node
);
2410 fbi
.bb_infos
= vNULL
;
2411 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
));
2412 fbi
.param_count
= ipa_get_param_count (info
);
2415 for (struct cgraph_edge
*cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
2417 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
2418 bi
->cg_edges
.safe_push (cs
);
2421 for (struct cgraph_edge
*cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
2423 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
2424 bi
->cg_edges
.safe_push (cs
);
2427 analysis_dom_walker (&fbi
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
2430 struct ipa_bb_info
*bi
;
2431 FOR_EACH_VEC_ELT (fbi
.bb_infos
, i
, bi
)
2432 free_ipa_bb_info (bi
);
2433 fbi
.bb_infos
.release ();
2434 free_dominance_info (CDI_DOMINATORS
);
2438 /* Update the jump functions associated with call graph edge E when the call
2439 graph edge CS is being inlined, assuming that E->caller is already (possibly
2440 indirectly) inlined into CS->callee and that E has not been inlined. */
2443 update_jump_functions_after_inlining (struct cgraph_edge
*cs
,
2444 struct cgraph_edge
*e
)
2446 struct ipa_edge_args
*top
= IPA_EDGE_REF (cs
);
2447 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
2448 int count
= ipa_get_cs_argument_count (args
);
2451 for (i
= 0; i
< count
; i
++)
2453 struct ipa_jump_func
*dst
= ipa_get_ith_jump_func (args
, i
);
2454 struct ipa_polymorphic_call_context
*dst_ctx
2455 = ipa_get_ith_polymorhic_call_context (args
, i
);
2457 if (dst
->type
== IPA_JF_ANCESTOR
)
2459 struct ipa_jump_func
*src
;
2460 int dst_fid
= dst
->value
.ancestor
.formal_id
;
2461 struct ipa_polymorphic_call_context
*src_ctx
2462 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
2464 /* Variable number of arguments can cause havoc if we try to access
2465 one that does not exist in the inlined edge. So make sure we
2467 if (dst_fid
>= ipa_get_cs_argument_count (top
))
2469 ipa_set_jf_unknown (dst
);
2473 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2475 if (src_ctx
&& !src_ctx
->useless_p ())
2477 struct ipa_polymorphic_call_context ctx
= *src_ctx
;
2479 /* TODO: Make type preserved safe WRT contexts. */
2480 if (!ipa_get_jf_ancestor_type_preserved (dst
))
2481 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
2482 ctx
.offset_by (dst
->value
.ancestor
.offset
);
2483 if (!ctx
.useless_p ())
2485 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
2487 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
2489 dst_ctx
->combine_with (ctx
);
2493 && (dst
->value
.ancestor
.agg_preserved
|| !src
->agg
.by_ref
))
2495 struct ipa_agg_jf_item
*item
;
2498 /* Currently we do not produce clobber aggregate jump functions,
2499 replace with merging when we do. */
2500 gcc_assert (!dst
->agg
.items
);
2502 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2503 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2504 FOR_EACH_VEC_SAFE_ELT (dst
->agg
.items
, j
, item
)
2505 item
->offset
-= dst
->value
.ancestor
.offset
;
2508 if (src
->type
== IPA_JF_PASS_THROUGH
2509 && src
->value
.pass_through
.operation
== NOP_EXPR
)
2511 dst
->value
.ancestor
.formal_id
= src
->value
.pass_through
.formal_id
;
2512 dst
->value
.ancestor
.agg_preserved
&=
2513 src
->value
.pass_through
.agg_preserved
;
2515 else if (src
->type
== IPA_JF_ANCESTOR
)
2517 dst
->value
.ancestor
.formal_id
= src
->value
.ancestor
.formal_id
;
2518 dst
->value
.ancestor
.offset
+= src
->value
.ancestor
.offset
;
2519 dst
->value
.ancestor
.agg_preserved
&=
2520 src
->value
.ancestor
.agg_preserved
;
2523 ipa_set_jf_unknown (dst
);
2525 else if (dst
->type
== IPA_JF_PASS_THROUGH
)
2527 struct ipa_jump_func
*src
;
2528 /* We must check range due to calls with variable number of arguments
2529 and we cannot combine jump functions with operations. */
2530 if (dst
->value
.pass_through
.operation
== NOP_EXPR
2531 && (dst
->value
.pass_through
.formal_id
2532 < ipa_get_cs_argument_count (top
)))
2534 int dst_fid
= dst
->value
.pass_through
.formal_id
;
2535 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2536 bool dst_agg_p
= ipa_get_jf_pass_through_agg_preserved (dst
);
2537 struct ipa_polymorphic_call_context
*src_ctx
2538 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
2540 if (src_ctx
&& !src_ctx
->useless_p ())
2542 struct ipa_polymorphic_call_context ctx
= *src_ctx
;
2544 /* TODO: Make type preserved safe WRT contexts. */
2545 if (!ipa_get_jf_pass_through_type_preserved (dst
))
2546 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
2547 if (!ctx
.useless_p ())
2551 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
2553 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
2555 dst_ctx
->combine_with (ctx
);
2560 case IPA_JF_UNKNOWN
:
2561 ipa_set_jf_unknown (dst
);
2564 ipa_set_jf_cst_copy (dst
, src
);
2567 case IPA_JF_PASS_THROUGH
:
2569 int formal_id
= ipa_get_jf_pass_through_formal_id (src
);
2570 enum tree_code operation
;
2571 operation
= ipa_get_jf_pass_through_operation (src
);
2573 if (operation
== NOP_EXPR
)
2577 && ipa_get_jf_pass_through_agg_preserved (src
);
2578 ipa_set_jf_simple_pass_through (dst
, formal_id
, agg_p
);
2582 tree operand
= ipa_get_jf_pass_through_operand (src
);
2583 ipa_set_jf_arith_pass_through (dst
, formal_id
, operand
,
2588 case IPA_JF_ANCESTOR
:
2592 && ipa_get_jf_ancestor_agg_preserved (src
);
2593 ipa_set_ancestor_jf (dst
,
2594 ipa_get_jf_ancestor_offset (src
),
2595 ipa_get_jf_ancestor_formal_id (src
),
2604 && (dst_agg_p
|| !src
->agg
.by_ref
))
2606 /* Currently we do not produce clobber aggregate jump
2607 functions, replace with merging when we do. */
2608 gcc_assert (!dst
->agg
.items
);
2610 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2611 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2615 ipa_set_jf_unknown (dst
);
2620 /* If TARGET is an addr_expr of a function declaration, make it the
2621 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2622 Otherwise, return NULL. */
2624 struct cgraph_edge
*
2625 ipa_make_edge_direct_to_target (struct cgraph_edge
*ie
, tree target
,
2628 struct cgraph_node
*callee
;
2629 struct inline_edge_summary
*es
= inline_edge_summary (ie
);
2630 bool unreachable
= false;
2632 if (TREE_CODE (target
) == ADDR_EXPR
)
2633 target
= TREE_OPERAND (target
, 0);
2634 if (TREE_CODE (target
) != FUNCTION_DECL
)
2636 target
= canonicalize_constructor_val (target
, NULL
);
2637 if (!target
|| TREE_CODE (target
) != FUNCTION_DECL
)
2639 if (ie
->indirect_info
->member_ptr
)
2640 /* Member pointer call that goes through a VMT lookup. */
2643 if (dump_enabled_p ())
2645 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2646 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2647 "discovered direct call to non-function in %s/%i, "
2648 "making it __builtin_unreachable\n",
2649 ie
->caller
->name (), ie
->caller
->order
);
2652 target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2653 callee
= cgraph_node::get_create (target
);
2657 callee
= cgraph_node::get (target
);
2660 callee
= cgraph_node::get (target
);
2662 /* Because may-edges are not explicitely represented and vtable may be external,
2663 we may create the first reference to the object in the unit. */
2664 if (!callee
|| callee
->global
.inlined_to
)
2667 /* We are better to ensure we can refer to it.
2668 In the case of static functions we are out of luck, since we already
2669 removed its body. In the case of public functions we may or may
2670 not introduce the reference. */
2671 if (!canonicalize_constructor_val (target
, NULL
)
2672 || !TREE_PUBLIC (target
))
2675 fprintf (dump_file
, "ipa-prop: Discovered call to a known target "
2676 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2677 xstrdup_for_dump (ie
->caller
->name ()),
2679 xstrdup_for_dump (ie
->callee
->name ()),
2683 callee
= cgraph_node::get_create (target
);
2686 /* If the edge is already speculated. */
2687 if (speculative
&& ie
->speculative
)
2689 struct cgraph_edge
*e2
;
2690 struct ipa_ref
*ref
;
2691 ie
->speculative_call_info (e2
, ie
, ref
);
2692 if (e2
->callee
->ultimate_alias_target ()
2693 != callee
->ultimate_alias_target ())
2696 fprintf (dump_file
, "ipa-prop: Discovered call to a speculative target "
2697 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2698 xstrdup_for_dump (ie
->caller
->name ()),
2700 xstrdup_for_dump (callee
->name ()),
2702 xstrdup_for_dump (e2
->callee
->name ()),
2708 fprintf (dump_file
, "ipa-prop: Discovered call to a speculative target "
2709 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2710 xstrdup_for_dump (ie
->caller
->name ()),
2712 xstrdup_for_dump (callee
->name ()),
2718 if (!dbg_cnt (devirt
))
2721 ipa_check_create_node_params ();
2723 /* We can not make edges to inline clones. It is bug that someone removed
2724 the cgraph node too early. */
2725 gcc_assert (!callee
->global
.inlined_to
);
2727 if (dump_file
&& !unreachable
)
2729 fprintf (dump_file
, "ipa-prop: Discovered %s call to a %s target "
2730 "(%s/%i -> %s/%i), for stmt ",
2731 ie
->indirect_info
->polymorphic
? "a virtual" : "an indirect",
2732 speculative
? "speculative" : "known",
2733 xstrdup_for_dump (ie
->caller
->name ()),
2735 xstrdup_for_dump (callee
->name ()),
2738 print_gimple_stmt (dump_file
, ie
->call_stmt
, 2, TDF_SLIM
);
2740 fprintf (dump_file
, "with uid %i\n", ie
->lto_stmt_uid
);
2742 if (dump_enabled_p ())
2744 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2746 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2747 "converting indirect call in %s to direct call to %s\n",
2748 ie
->caller
->name (), callee
->name ());
2752 struct cgraph_edge
*orig
= ie
;
2753 ie
= ie
->make_direct (callee
);
2754 /* If we resolved speculative edge the cost is already up to date
2755 for direct call (adjusted by inline_edge_duplication_hook). */
2758 es
= inline_edge_summary (ie
);
2759 es
->call_stmt_size
-= (eni_size_weights
.indirect_call_cost
2760 - eni_size_weights
.call_cost
);
2761 es
->call_stmt_time
-= (eni_time_weights
.indirect_call_cost
2762 - eni_time_weights
.call_cost
);
2767 if (!callee
->can_be_discarded_p ())
2770 alias
= dyn_cast
<cgraph_node
*> (callee
->noninterposable_alias ());
2774 /* make_speculative will update ie's cost to direct call cost. */
2775 ie
= ie
->make_speculative
2776 (callee
, ie
->count
* 8 / 10, ie
->frequency
* 8 / 10);
2782 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2783 return NULL if there is not any. BY_REF specifies whether the value has to
2784 be passed by reference or by value. */
2787 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function
*agg
,
2788 HOST_WIDE_INT offset
, bool by_ref
)
2790 struct ipa_agg_jf_item
*item
;
2793 if (by_ref
!= agg
->by_ref
)
2796 FOR_EACH_VEC_SAFE_ELT (agg
->items
, i
, item
)
2797 if (item
->offset
== offset
)
2799 /* Currently we do not have clobber values, return NULL for them once
2801 gcc_checking_assert (is_gimple_ip_invariant (item
->value
));
2807 /* Remove a reference to SYMBOL from the list of references of a node given by
2808 reference description RDESC. Return true if the reference has been
2809 successfully found and removed. */
2812 remove_described_reference (symtab_node
*symbol
, struct ipa_cst_ref_desc
*rdesc
)
2814 struct ipa_ref
*to_del
;
2815 struct cgraph_edge
*origin
;
2820 to_del
= origin
->caller
->find_reference (symbol
, origin
->call_stmt
,
2821 origin
->lto_stmt_uid
);
2825 to_del
->remove_reference ();
2827 fprintf (dump_file
, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2828 xstrdup_for_dump (origin
->caller
->name ()),
2829 origin
->caller
->order
, xstrdup_for_dump (symbol
->name ()));
2833 /* If JFUNC has a reference description with refcount different from
2834 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2835 NULL. JFUNC must be a constant jump function. */
2837 static struct ipa_cst_ref_desc
*
2838 jfunc_rdesc_usable (struct ipa_jump_func
*jfunc
)
2840 struct ipa_cst_ref_desc
*rdesc
= ipa_get_jf_constant_rdesc (jfunc
);
2841 if (rdesc
&& rdesc
->refcount
!= IPA_UNDESCRIBED_USE
)
2847 /* If the value of constant jump function JFUNC is an address of a function
2848 declaration, return the associated call graph node. Otherwise return
2851 static cgraph_node
*
2852 cgraph_node_for_jfunc (struct ipa_jump_func
*jfunc
)
2854 gcc_checking_assert (jfunc
->type
== IPA_JF_CONST
);
2855 tree cst
= ipa_get_jf_constant (jfunc
);
2856 if (TREE_CODE (cst
) != ADDR_EXPR
2857 || TREE_CODE (TREE_OPERAND (cst
, 0)) != FUNCTION_DECL
)
2860 return cgraph_node::get (TREE_OPERAND (cst
, 0));
2864 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2865 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2866 the edge specified in the rdesc. Return false if either the symbol or the
2867 reference could not be found, otherwise return true. */
2870 try_decrement_rdesc_refcount (struct ipa_jump_func
*jfunc
)
2872 struct ipa_cst_ref_desc
*rdesc
;
2873 if (jfunc
->type
== IPA_JF_CONST
2874 && (rdesc
= jfunc_rdesc_usable (jfunc
))
2875 && --rdesc
->refcount
== 0)
2877 symtab_node
*symbol
= cgraph_node_for_jfunc (jfunc
);
2881 return remove_described_reference (symbol
, rdesc
);
2886 /* Try to find a destination for indirect edge IE that corresponds to a simple
2887 call or a call of a member function pointer and where the destination is a
2888 pointer formal parameter described by jump function JFUNC. If it can be
2889 determined, return the newly direct edge, otherwise return NULL.
2890 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2892 static struct cgraph_edge
*
2893 try_make_edge_direct_simple_call (struct cgraph_edge
*ie
,
2894 struct ipa_jump_func
*jfunc
,
2895 struct ipa_node_params
*new_root_info
)
2897 struct cgraph_edge
*cs
;
2899 bool agg_contents
= ie
->indirect_info
->agg_contents
;
2901 if (ie
->indirect_info
->agg_contents
)
2902 target
= ipa_find_agg_cst_for_param (&jfunc
->agg
,
2903 ie
->indirect_info
->offset
,
2904 ie
->indirect_info
->by_ref
);
2906 target
= ipa_value_from_jfunc (new_root_info
, jfunc
);
2909 cs
= ipa_make_edge_direct_to_target (ie
, target
);
2911 if (cs
&& !agg_contents
)
2914 gcc_checking_assert (cs
->callee
2916 || jfunc
->type
!= IPA_JF_CONST
2917 || !cgraph_node_for_jfunc (jfunc
)
2918 || cs
->callee
== cgraph_node_for_jfunc (jfunc
)));
2919 ok
= try_decrement_rdesc_refcount (jfunc
);
2920 gcc_checking_assert (ok
);
2926 /* Return the target to be used in cases of impossible devirtualization. IE
2927 and target (the latter can be NULL) are dumped when dumping is enabled. */
2930 ipa_impossible_devirt_target (struct cgraph_edge
*ie
, tree target
)
2936 "Type inconsistent devirtualization: %s/%i->%s\n",
2937 ie
->caller
->name (), ie
->caller
->order
,
2938 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target
)));
2941 "No devirtualization target in %s/%i\n",
2942 ie
->caller
->name (), ie
->caller
->order
);
2944 tree new_target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2945 cgraph_node::get_create (new_target
);
2949 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2950 call based on a formal parameter which is described by jump function JFUNC
2951 and if it can be determined, make it direct and return the direct edge.
2952 Otherwise, return NULL. CTX describes the polymorphic context that the
2953 parameter the call is based on brings along with it. */
2955 static struct cgraph_edge
*
2956 try_make_edge_direct_virtual_call (struct cgraph_edge
*ie
,
2957 struct ipa_jump_func
*jfunc
,
2958 struct ipa_polymorphic_call_context ctx
)
2961 bool speculative
= false;
2963 if (!opt_for_fn (ie
->caller
->decl
, flag_devirtualize
))
2966 gcc_assert (!ie
->indirect_info
->by_ref
);
2968 /* Try to do lookup via known virtual table pointer value. */
2969 if (!ie
->indirect_info
->vptr_changed
2970 || opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
))
2973 unsigned HOST_WIDE_INT offset
;
2974 tree t
= ipa_find_agg_cst_for_param (&jfunc
->agg
,
2975 ie
->indirect_info
->offset
,
2977 if (t
&& vtable_pointer_value_to_vtable (t
, &vtable
, &offset
))
2979 t
= gimple_get_virt_method_for_vtable (ie
->indirect_info
->otr_token
,
2983 if ((TREE_CODE (TREE_TYPE (t
)) == FUNCTION_TYPE
2984 && DECL_FUNCTION_CODE (t
) == BUILT_IN_UNREACHABLE
)
2985 || !possible_polymorphic_call_target_p
2986 (ie
, cgraph_node::get (t
)))
2988 /* Do not speculate builtin_unreachable, it is stpid! */
2989 if (!ie
->indirect_info
->vptr_changed
)
2990 target
= ipa_impossible_devirt_target (ie
, target
);
2995 speculative
= ie
->indirect_info
->vptr_changed
;
3001 ipa_polymorphic_call_context
ie_context (ie
);
3002 vec
<cgraph_node
*>targets
;
3005 ctx
.offset_by (ie
->indirect_info
->offset
);
3006 if (ie
->indirect_info
->vptr_changed
)
3007 ctx
.possible_dynamic_type_change (ie
->in_polymorphic_cdtor
,
3008 ie
->indirect_info
->otr_type
);
3009 ctx
.combine_with (ie_context
, ie
->indirect_info
->otr_type
);
3010 targets
= possible_polymorphic_call_targets
3011 (ie
->indirect_info
->otr_type
,
3012 ie
->indirect_info
->otr_token
,
3014 if (final
&& targets
.length () <= 1)
3016 if (targets
.length () == 1)
3017 target
= targets
[0]->decl
;
3019 target
= ipa_impossible_devirt_target (ie
, NULL_TREE
);
3021 else if (!target
&& opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
)
3022 && !ie
->speculative
&& ie
->maybe_hot_p ())
3025 n
= try_speculative_devirtualization (ie
->indirect_info
->otr_type
,
3026 ie
->indirect_info
->otr_token
,
3027 ie
->indirect_info
->context
);
3037 if (!possible_polymorphic_call_target_p
3038 (ie
, cgraph_node::get_create (target
)))
3042 target
= ipa_impossible_devirt_target (ie
, target
);
3044 return ipa_make_edge_direct_to_target (ie
, target
, speculative
);
3050 /* Update the param called notes associated with NODE when CS is being inlined,
3051 assuming NODE is (potentially indirectly) inlined into CS->callee.
3052 Moreover, if the callee is discovered to be constant, create a new cgraph
3053 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3054 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3057 update_indirect_edges_after_inlining (struct cgraph_edge
*cs
,
3058 struct cgraph_node
*node
,
3059 vec
<cgraph_edge
*> *new_edges
)
3061 struct ipa_edge_args
*top
;
3062 struct cgraph_edge
*ie
, *next_ie
, *new_direct_edge
;
3063 struct ipa_node_params
*new_root_info
;
3066 ipa_check_create_edge_args ();
3067 top
= IPA_EDGE_REF (cs
);
3068 new_root_info
= IPA_NODE_REF (cs
->caller
->global
.inlined_to
3069 ? cs
->caller
->global
.inlined_to
3072 for (ie
= node
->indirect_calls
; ie
; ie
= next_ie
)
3074 struct cgraph_indirect_call_info
*ici
= ie
->indirect_info
;
3075 struct ipa_jump_func
*jfunc
;
3078 next_ie
= ie
->next_callee
;
3080 if (ici
->param_index
== -1)
3083 /* We must check range due to calls with variable number of arguments: */
3084 if (ici
->param_index
>= ipa_get_cs_argument_count (top
))
3086 ici
->param_index
= -1;
3090 param_index
= ici
->param_index
;
3091 jfunc
= ipa_get_ith_jump_func (top
, param_index
);
3093 if (!opt_for_fn (node
->decl
, flag_indirect_inlining
))
3094 new_direct_edge
= NULL
;
3095 else if (ici
->polymorphic
)
3097 ipa_polymorphic_call_context ctx
;
3098 ctx
= ipa_context_from_jfunc (new_root_info
, cs
, param_index
, jfunc
);
3099 new_direct_edge
= try_make_edge_direct_virtual_call (ie
, jfunc
, ctx
);
3102 new_direct_edge
= try_make_edge_direct_simple_call (ie
, jfunc
,
3104 /* If speculation was removed, then we need to do nothing. */
3105 if (new_direct_edge
&& new_direct_edge
!= ie
)
3107 new_direct_edge
->indirect_inlining_edge
= 1;
3108 top
= IPA_EDGE_REF (cs
);
3111 else if (new_direct_edge
)
3113 new_direct_edge
->indirect_inlining_edge
= 1;
3114 if (new_direct_edge
->call_stmt
)
3115 new_direct_edge
->call_stmt_cannot_inline_p
3116 = !gimple_check_call_matching_types (
3117 new_direct_edge
->call_stmt
,
3118 new_direct_edge
->callee
->decl
, false);
3121 new_edges
->safe_push (new_direct_edge
);
3124 top
= IPA_EDGE_REF (cs
);
3126 else if (jfunc
->type
== IPA_JF_PASS_THROUGH
3127 && ipa_get_jf_pass_through_operation (jfunc
) == NOP_EXPR
)
3129 if ((ici
->agg_contents
3130 && !ipa_get_jf_pass_through_agg_preserved (jfunc
))
3131 || (ici
->polymorphic
3132 && !ipa_get_jf_pass_through_type_preserved (jfunc
)))
3133 ici
->param_index
= -1;
3135 ici
->param_index
= ipa_get_jf_pass_through_formal_id (jfunc
);
3137 else if (jfunc
->type
== IPA_JF_ANCESTOR
)
3139 if ((ici
->agg_contents
3140 && !ipa_get_jf_ancestor_agg_preserved (jfunc
))
3141 || (ici
->polymorphic
3142 && !ipa_get_jf_ancestor_type_preserved (jfunc
)))
3143 ici
->param_index
= -1;
3146 ici
->param_index
= ipa_get_jf_ancestor_formal_id (jfunc
);
3147 ici
->offset
+= ipa_get_jf_ancestor_offset (jfunc
);
3151 /* Either we can find a destination for this edge now or never. */
3152 ici
->param_index
= -1;
3158 /* Recursively traverse subtree of NODE (including node) made of inlined
3159 cgraph_edges when CS has been inlined and invoke
3160 update_indirect_edges_after_inlining on all nodes and
3161 update_jump_functions_after_inlining on all non-inlined edges that lead out
3162 of this subtree. Newly discovered indirect edges will be added to
3163 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3167 propagate_info_to_inlined_callees (struct cgraph_edge
*cs
,
3168 struct cgraph_node
*node
,
3169 vec
<cgraph_edge
*> *new_edges
)
3171 struct cgraph_edge
*e
;
3174 res
= update_indirect_edges_after_inlining (cs
, node
, new_edges
);
3176 for (e
= node
->callees
; e
; e
= e
->next_callee
)
3177 if (!e
->inline_failed
)
3178 res
|= propagate_info_to_inlined_callees (cs
, e
->callee
, new_edges
);
3180 update_jump_functions_after_inlining (cs
, e
);
3181 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
3182 update_jump_functions_after_inlining (cs
, e
);
3187 /* Combine two controlled uses counts as done during inlining. */
3190 combine_controlled_uses_counters (int c
, int d
)
3192 if (c
== IPA_UNDESCRIBED_USE
|| d
== IPA_UNDESCRIBED_USE
)
3193 return IPA_UNDESCRIBED_USE
;
3198 /* Propagate number of controlled users from CS->caleee to the new root of the
3199 tree of inlined nodes. */
3202 propagate_controlled_uses (struct cgraph_edge
*cs
)
3204 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
3205 struct cgraph_node
*new_root
= cs
->caller
->global
.inlined_to
3206 ? cs
->caller
->global
.inlined_to
: cs
->caller
;
3207 struct ipa_node_params
*new_root_info
= IPA_NODE_REF (new_root
);
3208 struct ipa_node_params
*old_root_info
= IPA_NODE_REF (cs
->callee
);
3211 count
= MIN (ipa_get_cs_argument_count (args
),
3212 ipa_get_param_count (old_root_info
));
3213 for (i
= 0; i
< count
; i
++)
3215 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
3216 struct ipa_cst_ref_desc
*rdesc
;
3218 if (jf
->type
== IPA_JF_PASS_THROUGH
)
3221 src_idx
= ipa_get_jf_pass_through_formal_id (jf
);
3222 c
= ipa_get_controlled_uses (new_root_info
, src_idx
);
3223 d
= ipa_get_controlled_uses (old_root_info
, i
);
3225 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf
)
3226 == NOP_EXPR
|| c
== IPA_UNDESCRIBED_USE
);
3227 c
= combine_controlled_uses_counters (c
, d
);
3228 ipa_set_controlled_uses (new_root_info
, src_idx
, c
);
3229 if (c
== 0 && new_root_info
->ipcp_orig_node
)
3231 struct cgraph_node
*n
;
3232 struct ipa_ref
*ref
;
3233 tree t
= new_root_info
->known_csts
[src_idx
];
3235 if (t
&& TREE_CODE (t
) == ADDR_EXPR
3236 && TREE_CODE (TREE_OPERAND (t
, 0)) == FUNCTION_DECL
3237 && (n
= cgraph_node::get (TREE_OPERAND (t
, 0)))
3238 && (ref
= new_root
->find_reference (n
, NULL
, 0)))
3241 fprintf (dump_file
, "ipa-prop: Removing cloning-created "
3242 "reference from %s/%i to %s/%i.\n",
3243 xstrdup_for_dump (new_root
->name ()),
3245 xstrdup_for_dump (n
->name ()), n
->order
);
3246 ref
->remove_reference ();
3250 else if (jf
->type
== IPA_JF_CONST
3251 && (rdesc
= jfunc_rdesc_usable (jf
)))
3253 int d
= ipa_get_controlled_uses (old_root_info
, i
);
3254 int c
= rdesc
->refcount
;
3255 rdesc
->refcount
= combine_controlled_uses_counters (c
, d
);
3256 if (rdesc
->refcount
== 0)
3258 tree cst
= ipa_get_jf_constant (jf
);
3259 struct cgraph_node
*n
;
3260 gcc_checking_assert (TREE_CODE (cst
) == ADDR_EXPR
3261 && TREE_CODE (TREE_OPERAND (cst
, 0))
3263 n
= cgraph_node::get (TREE_OPERAND (cst
, 0));
3266 struct cgraph_node
*clone
;
3268 ok
= remove_described_reference (n
, rdesc
);
3269 gcc_checking_assert (ok
);
3272 while (clone
->global
.inlined_to
3273 && clone
!= rdesc
->cs
->caller
3274 && IPA_NODE_REF (clone
)->ipcp_orig_node
)
3276 struct ipa_ref
*ref
;
3277 ref
= clone
->find_reference (n
, NULL
, 0);
3281 fprintf (dump_file
, "ipa-prop: Removing "
3282 "cloning-created reference "
3283 "from %s/%i to %s/%i.\n",
3284 xstrdup_for_dump (clone
->name ()),
3286 xstrdup_for_dump (n
->name ()),
3288 ref
->remove_reference ();
3290 clone
= clone
->callers
->caller
;
3297 for (i
= ipa_get_param_count (old_root_info
);
3298 i
< ipa_get_cs_argument_count (args
);
3301 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
3303 if (jf
->type
== IPA_JF_CONST
)
3305 struct ipa_cst_ref_desc
*rdesc
= jfunc_rdesc_usable (jf
);
3307 rdesc
->refcount
= IPA_UNDESCRIBED_USE
;
3309 else if (jf
->type
== IPA_JF_PASS_THROUGH
)
3310 ipa_set_controlled_uses (new_root_info
,
3311 jf
->value
.pass_through
.formal_id
,
3312 IPA_UNDESCRIBED_USE
);
3316 /* Update jump functions and call note functions on inlining the call site CS.
3317 CS is expected to lead to a node already cloned by
3318 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3319 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3323 ipa_propagate_indirect_call_infos (struct cgraph_edge
*cs
,
3324 vec
<cgraph_edge
*> *new_edges
)
3327 /* Do nothing if the preparation phase has not been carried out yet
3328 (i.e. during early inlining). */
3329 if (!ipa_node_params_sum
)
3331 gcc_assert (ipa_edge_args_vector
);
3333 propagate_controlled_uses (cs
);
3334 changed
= propagate_info_to_inlined_callees (cs
, cs
->callee
, new_edges
);
3339 /* Frees all dynamically allocated structures that the argument info points
3343 ipa_free_edge_args_substructures (struct ipa_edge_args
*args
)
3345 vec_free (args
->jump_functions
);
3346 memset (args
, 0, sizeof (*args
));
3349 /* Free all ipa_edge structures. */
3352 ipa_free_all_edge_args (void)
3355 struct ipa_edge_args
*args
;
3357 if (!ipa_edge_args_vector
)
3360 FOR_EACH_VEC_ELT (*ipa_edge_args_vector
, i
, args
)
3361 ipa_free_edge_args_substructures (args
);
3363 vec_free (ipa_edge_args_vector
);
3366 /* Frees all dynamically allocated structures that the param info points
3369 ipa_node_params::~ipa_node_params ()
3371 descriptors
.release ();
3373 /* Lattice values and their sources are deallocated with their alocation
3375 known_contexts
.release ();
3378 ipcp_orig_node
= NULL
;
3381 do_clone_for_all_contexts
= 0;
3382 is_all_contexts_clone
= 0;
3386 /* Free all ipa_node_params structures. */
3389 ipa_free_all_node_params (void)
3391 delete ipa_node_params_sum
;
3392 ipa_node_params_sum
= NULL
;
3395 /* Grow ipcp_transformations if necessary. */
3398 ipcp_grow_transformations_if_necessary (void)
3400 if (vec_safe_length (ipcp_transformations
)
3401 <= (unsigned) symtab
->cgraph_max_uid
)
3402 vec_safe_grow_cleared (ipcp_transformations
, symtab
->cgraph_max_uid
+ 1);
3405 /* Set the aggregate replacements of NODE to be AGGVALS. */
3408 ipa_set_node_agg_value_chain (struct cgraph_node
*node
,
3409 struct ipa_agg_replacement_value
*aggvals
)
3411 ipcp_grow_transformations_if_necessary ();
3412 (*ipcp_transformations
)[node
->uid
].agg_values
= aggvals
;
3415 /* Hook that is called by cgraph.c when an edge is removed. */
3418 ipa_edge_removal_hook (struct cgraph_edge
*cs
, void *data ATTRIBUTE_UNUSED
)
3420 struct ipa_edge_args
*args
;
3422 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3423 if (vec_safe_length (ipa_edge_args_vector
) <= (unsigned)cs
->uid
)
3426 args
= IPA_EDGE_REF (cs
);
3427 if (args
->jump_functions
)
3429 struct ipa_jump_func
*jf
;
3431 FOR_EACH_VEC_ELT (*args
->jump_functions
, i
, jf
)
3433 struct ipa_cst_ref_desc
*rdesc
;
3434 try_decrement_rdesc_refcount (jf
);
3435 if (jf
->type
== IPA_JF_CONST
3436 && (rdesc
= ipa_get_jf_constant_rdesc (jf
))
3442 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs
));
3445 /* Hook that is called by cgraph.c when an edge is duplicated. */
3448 ipa_edge_duplication_hook (struct cgraph_edge
*src
, struct cgraph_edge
*dst
,
3451 struct ipa_edge_args
*old_args
, *new_args
;
3454 ipa_check_create_edge_args ();
3456 old_args
= IPA_EDGE_REF (src
);
3457 new_args
= IPA_EDGE_REF (dst
);
3459 new_args
->jump_functions
= vec_safe_copy (old_args
->jump_functions
);
3460 if (old_args
->polymorphic_call_contexts
)
3461 new_args
->polymorphic_call_contexts
3462 = vec_safe_copy (old_args
->polymorphic_call_contexts
);
3464 for (i
= 0; i
< vec_safe_length (old_args
->jump_functions
); i
++)
3466 struct ipa_jump_func
*src_jf
= ipa_get_ith_jump_func (old_args
, i
);
3467 struct ipa_jump_func
*dst_jf
= ipa_get_ith_jump_func (new_args
, i
);
3469 dst_jf
->agg
.items
= vec_safe_copy (dst_jf
->agg
.items
);
3471 if (src_jf
->type
== IPA_JF_CONST
)
3473 struct ipa_cst_ref_desc
*src_rdesc
= jfunc_rdesc_usable (src_jf
);
3476 dst_jf
->value
.constant
.rdesc
= NULL
;
3477 else if (src
->caller
== dst
->caller
)
3479 struct ipa_ref
*ref
;
3480 symtab_node
*n
= cgraph_node_for_jfunc (src_jf
);
3481 gcc_checking_assert (n
);
3482 ref
= src
->caller
->find_reference (n
, src
->call_stmt
,
3484 gcc_checking_assert (ref
);
3485 dst
->caller
->clone_reference (ref
, ref
->stmt
);
3487 gcc_checking_assert (ipa_refdesc_pool
);
3488 struct ipa_cst_ref_desc
*dst_rdesc
3489 = (struct ipa_cst_ref_desc
*) pool_alloc (ipa_refdesc_pool
);
3490 dst_rdesc
->cs
= dst
;
3491 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3492 dst_rdesc
->next_duplicate
= NULL
;
3493 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3495 else if (src_rdesc
->cs
== src
)
3497 struct ipa_cst_ref_desc
*dst_rdesc
;
3498 gcc_checking_assert (ipa_refdesc_pool
);
3500 = (struct ipa_cst_ref_desc
*) pool_alloc (ipa_refdesc_pool
);
3501 dst_rdesc
->cs
= dst
;
3502 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3503 dst_rdesc
->next_duplicate
= src_rdesc
->next_duplicate
;
3504 src_rdesc
->next_duplicate
= dst_rdesc
;
3505 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3509 struct ipa_cst_ref_desc
*dst_rdesc
;
3510 /* This can happen during inlining, when a JFUNC can refer to a
3511 reference taken in a function up in the tree of inline clones.
3512 We need to find the duplicate that refers to our tree of
3515 gcc_assert (dst
->caller
->global
.inlined_to
);
3516 for (dst_rdesc
= src_rdesc
->next_duplicate
;
3518 dst_rdesc
= dst_rdesc
->next_duplicate
)
3520 struct cgraph_node
*top
;
3521 top
= dst_rdesc
->cs
->caller
->global
.inlined_to
3522 ? dst_rdesc
->cs
->caller
->global
.inlined_to
3523 : dst_rdesc
->cs
->caller
;
3524 if (dst
->caller
->global
.inlined_to
== top
)
3527 gcc_assert (dst_rdesc
);
3528 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3531 else if (dst_jf
->type
== IPA_JF_PASS_THROUGH
3532 && src
->caller
== dst
->caller
)
3534 struct cgraph_node
*inline_root
= dst
->caller
->global
.inlined_to
3535 ? dst
->caller
->global
.inlined_to
: dst
->caller
;
3536 struct ipa_node_params
*root_info
= IPA_NODE_REF (inline_root
);
3537 int idx
= ipa_get_jf_pass_through_formal_id (dst_jf
);
3539 int c
= ipa_get_controlled_uses (root_info
, idx
);
3540 if (c
!= IPA_UNDESCRIBED_USE
)
3543 ipa_set_controlled_uses (root_info
, idx
, c
);
3549 /* Analyze newly added function into callgraph. */
3552 ipa_add_new_function (cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
3554 if (node
->has_gimple_body_p ())
3555 ipa_analyze_node (node
);
3558 /* Hook that is called by summary when a node is duplicated. */
3561 ipa_node_params_t::duplicate(cgraph_node
*src
, cgraph_node
*dst
,
3562 ipa_node_params
*old_info
,
3563 ipa_node_params
*new_info
)
3565 ipa_agg_replacement_value
*old_av
, *new_av
;
3567 new_info
->descriptors
= old_info
->descriptors
.copy ();
3568 new_info
->lattices
= NULL
;
3569 new_info
->ipcp_orig_node
= old_info
->ipcp_orig_node
;
3571 new_info
->analysis_done
= old_info
->analysis_done
;
3572 new_info
->node_enqueued
= old_info
->node_enqueued
;
3574 old_av
= ipa_get_agg_replacements_for_node (src
);
3580 struct ipa_agg_replacement_value
*v
;
3582 v
= ggc_alloc
<ipa_agg_replacement_value
> ();
3583 memcpy (v
, old_av
, sizeof (*v
));
3586 old_av
= old_av
->next
;
3588 ipa_set_node_agg_value_chain (dst
, new_av
);
3591 ipcp_transformation_summary
*src_trans
= ipcp_get_transformation_summary (src
);
3593 if (src_trans
&& vec_safe_length (src_trans
->alignments
) > 0)
3595 ipcp_grow_transformations_if_necessary ();
3596 src_trans
= ipcp_get_transformation_summary (src
);
3597 const vec
<ipa_alignment
, va_gc
> *src_alignments
= src_trans
->alignments
;
3598 vec
<ipa_alignment
, va_gc
> *&dst_alignments
3599 = ipcp_get_transformation_summary (dst
)->alignments
;
3600 vec_safe_reserve_exact (dst_alignments
, src_alignments
->length ());
3601 for (unsigned i
= 0; i
< src_alignments
->length (); ++i
)
3602 dst_alignments
->quick_push ((*src_alignments
)[i
]);
3606 /* Register our cgraph hooks if they are not already there. */
3609 ipa_register_cgraph_hooks (void)
3611 ipa_check_create_node_params ();
3613 if (!edge_removal_hook_holder
)
3614 edge_removal_hook_holder
=
3615 symtab
->add_edge_removal_hook (&ipa_edge_removal_hook
, NULL
);
3616 if (!edge_duplication_hook_holder
)
3617 edge_duplication_hook_holder
=
3618 symtab
->add_edge_duplication_hook (&ipa_edge_duplication_hook
, NULL
);
3619 function_insertion_hook_holder
=
3620 symtab
->add_cgraph_insertion_hook (&ipa_add_new_function
, NULL
);
3623 /* Unregister our cgraph hooks if they are not already there. */
3626 ipa_unregister_cgraph_hooks (void)
3628 symtab
->remove_edge_removal_hook (edge_removal_hook_holder
);
3629 edge_removal_hook_holder
= NULL
;
3630 symtab
->remove_edge_duplication_hook (edge_duplication_hook_holder
);
3631 edge_duplication_hook_holder
= NULL
;
3632 symtab
->remove_cgraph_insertion_hook (function_insertion_hook_holder
);
3633 function_insertion_hook_holder
= NULL
;
3636 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3637 longer needed after ipa-cp. */
3640 ipa_free_all_structures_after_ipa_cp (void)
3642 if (!optimize
&& !in_lto_p
)
3644 ipa_free_all_edge_args ();
3645 ipa_free_all_node_params ();
3646 free_alloc_pool (ipcp_sources_pool
);
3647 free_alloc_pool (ipcp_cst_values_pool
);
3648 free_alloc_pool (ipcp_poly_ctx_values_pool
);
3649 free_alloc_pool (ipcp_agg_lattice_pool
);
3650 ipa_unregister_cgraph_hooks ();
3651 if (ipa_refdesc_pool
)
3652 free_alloc_pool (ipa_refdesc_pool
);
3656 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3657 longer needed after indirect inlining. */
3660 ipa_free_all_structures_after_iinln (void)
3662 ipa_free_all_edge_args ();
3663 ipa_free_all_node_params ();
3664 ipa_unregister_cgraph_hooks ();
3665 if (ipcp_sources_pool
)
3666 free_alloc_pool (ipcp_sources_pool
);
3667 if (ipcp_cst_values_pool
)
3668 free_alloc_pool (ipcp_cst_values_pool
);
3669 if (ipcp_poly_ctx_values_pool
)
3670 free_alloc_pool (ipcp_poly_ctx_values_pool
);
3671 if (ipcp_agg_lattice_pool
)
3672 free_alloc_pool (ipcp_agg_lattice_pool
);
3673 if (ipa_refdesc_pool
)
3674 free_alloc_pool (ipa_refdesc_pool
);
3677 /* Print ipa_tree_map data structures of all functions in the
3681 ipa_print_node_params (FILE *f
, struct cgraph_node
*node
)
3684 struct ipa_node_params
*info
;
3686 if (!node
->definition
)
3688 info
= IPA_NODE_REF (node
);
3689 fprintf (f
, " function %s/%i parameter descriptors:\n",
3690 node
->name (), node
->order
);
3691 count
= ipa_get_param_count (info
);
3692 for (i
= 0; i
< count
; i
++)
3697 ipa_dump_param (f
, info
, i
);
3698 if (ipa_is_param_used (info
, i
))
3699 fprintf (f
, " used");
3700 c
= ipa_get_controlled_uses (info
, i
);
3701 if (c
== IPA_UNDESCRIBED_USE
)
3702 fprintf (f
, " undescribed_use");
3704 fprintf (f
, " controlled_uses=%i", c
);
3709 /* Print ipa_tree_map data structures of all functions in the
3713 ipa_print_all_params (FILE * f
)
3715 struct cgraph_node
*node
;
3717 fprintf (f
, "\nFunction parameters:\n");
3718 FOR_EACH_FUNCTION (node
)
3719 ipa_print_node_params (f
, node
);
3722 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3725 ipa_get_vector_of_formal_parms (tree fndecl
)
3731 gcc_assert (!flag_wpa
);
3732 count
= count_formal_params (fndecl
);
3733 args
.create (count
);
3734 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
3735 args
.quick_push (parm
);
3740 /* Return a heap allocated vector containing types of formal parameters of
3741 function type FNTYPE. */
3744 ipa_get_vector_of_formal_parm_types (tree fntype
)
3750 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
3753 types
.create (count
);
3754 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
3755 types
.quick_push (TREE_VALUE (t
));
3760 /* Modify the function declaration FNDECL and its type according to the plan in
3761 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3762 to reflect the actual parameters being modified which are determined by the
3763 base_index field. */
3766 ipa_modify_formal_parameters (tree fndecl
, ipa_parm_adjustment_vec adjustments
)
3768 vec
<tree
> oparms
= ipa_get_vector_of_formal_parms (fndecl
);
3769 tree orig_type
= TREE_TYPE (fndecl
);
3770 tree old_arg_types
= TYPE_ARG_TYPES (orig_type
);
3772 /* The following test is an ugly hack, some functions simply don't have any
3773 arguments in their type. This is probably a bug but well... */
3774 bool care_for_types
= (old_arg_types
!= NULL_TREE
);
3775 bool last_parm_void
;
3779 last_parm_void
= (TREE_VALUE (tree_last (old_arg_types
))
3781 otypes
= ipa_get_vector_of_formal_parm_types (orig_type
);
3783 gcc_assert (oparms
.length () + 1 == otypes
.length ());
3785 gcc_assert (oparms
.length () == otypes
.length ());
3789 last_parm_void
= false;
3793 int len
= adjustments
.length ();
3794 tree
*link
= &DECL_ARGUMENTS (fndecl
);
3795 tree new_arg_types
= NULL
;
3796 for (int i
= 0; i
< len
; i
++)
3798 struct ipa_parm_adjustment
*adj
;
3801 adj
= &adjustments
[i
];
3803 if (adj
->op
== IPA_PARM_OP_NEW
)
3806 parm
= oparms
[adj
->base_index
];
3809 if (adj
->op
== IPA_PARM_OP_COPY
)
3812 new_arg_types
= tree_cons (NULL_TREE
, otypes
[adj
->base_index
],
3815 link
= &DECL_CHAIN (parm
);
3817 else if (adj
->op
!= IPA_PARM_OP_REMOVE
)
3823 ptype
= build_pointer_type (adj
->type
);
3827 if (is_gimple_reg_type (ptype
))
3829 unsigned malign
= GET_MODE_ALIGNMENT (TYPE_MODE (ptype
));
3830 if (TYPE_ALIGN (ptype
) < malign
)
3831 ptype
= build_aligned_type (ptype
, malign
);
3836 new_arg_types
= tree_cons (NULL_TREE
, ptype
, new_arg_types
);
3838 new_parm
= build_decl (UNKNOWN_LOCATION
, PARM_DECL
, NULL_TREE
,
3840 const char *prefix
= adj
->arg_prefix
? adj
->arg_prefix
: "SYNTH";
3841 DECL_NAME (new_parm
) = create_tmp_var_name (prefix
);
3842 DECL_ARTIFICIAL (new_parm
) = 1;
3843 DECL_ARG_TYPE (new_parm
) = ptype
;
3844 DECL_CONTEXT (new_parm
) = fndecl
;
3845 TREE_USED (new_parm
) = 1;
3846 DECL_IGNORED_P (new_parm
) = 1;
3847 layout_decl (new_parm
, 0);
3849 if (adj
->op
== IPA_PARM_OP_NEW
)
3853 adj
->new_decl
= new_parm
;
3856 link
= &DECL_CHAIN (new_parm
);
3862 tree new_reversed
= NULL
;
3865 new_reversed
= nreverse (new_arg_types
);
3869 TREE_CHAIN (new_arg_types
) = void_list_node
;
3871 new_reversed
= void_list_node
;
3875 /* Use copy_node to preserve as much as possible from original type
3876 (debug info, attribute lists etc.)
3877 Exception is METHOD_TYPEs must have THIS argument.
3878 When we are asked to remove it, we need to build new FUNCTION_TYPE
3880 tree new_type
= NULL
;
3881 if (TREE_CODE (orig_type
) != METHOD_TYPE
3882 || (adjustments
[0].op
== IPA_PARM_OP_COPY
3883 && adjustments
[0].base_index
== 0))
3885 new_type
= build_distinct_type_copy (orig_type
);
3886 TYPE_ARG_TYPES (new_type
) = new_reversed
;
3891 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type
),
3893 TYPE_CONTEXT (new_type
) = TYPE_CONTEXT (orig_type
);
3894 DECL_VINDEX (fndecl
) = NULL_TREE
;
3897 /* When signature changes, we need to clear builtin info. */
3898 if (DECL_BUILT_IN (fndecl
))
3900 DECL_BUILT_IN_CLASS (fndecl
) = NOT_BUILT_IN
;
3901 DECL_FUNCTION_CODE (fndecl
) = (enum built_in_function
) 0;
3904 TREE_TYPE (fndecl
) = new_type
;
3905 DECL_VIRTUAL_P (fndecl
) = 0;
3906 DECL_LANG_SPECIFIC (fndecl
) = NULL
;
3911 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3912 If this is a directly recursive call, CS must be NULL. Otherwise it must
3913 contain the corresponding call graph edge. */
3916 ipa_modify_call_arguments (struct cgraph_edge
*cs
, gcall
*stmt
,
3917 ipa_parm_adjustment_vec adjustments
)
3919 struct cgraph_node
*current_node
= cgraph_node::get (current_function_decl
);
3921 vec
<tree
, va_gc
> **debug_args
= NULL
;
3923 gimple_stmt_iterator gsi
, prev_gsi
;
3927 len
= adjustments
.length ();
3929 callee_decl
= !cs
? gimple_call_fndecl (stmt
) : cs
->callee
->decl
;
3930 current_node
->remove_stmt_references (stmt
);
3932 gsi
= gsi_for_stmt (stmt
);
3934 gsi_prev (&prev_gsi
);
3935 for (i
= 0; i
< len
; i
++)
3937 struct ipa_parm_adjustment
*adj
;
3939 adj
= &adjustments
[i
];
3941 if (adj
->op
== IPA_PARM_OP_COPY
)
3943 tree arg
= gimple_call_arg (stmt
, adj
->base_index
);
3945 vargs
.quick_push (arg
);
3947 else if (adj
->op
!= IPA_PARM_OP_REMOVE
)
3949 tree expr
, base
, off
;
3951 unsigned int deref_align
= 0;
3952 bool deref_base
= false;
3954 /* We create a new parameter out of the value of the old one, we can
3955 do the following kind of transformations:
3957 - A scalar passed by reference is converted to a scalar passed by
3958 value. (adj->by_ref is false and the type of the original
3959 actual argument is a pointer to a scalar).
3961 - A part of an aggregate is passed instead of the whole aggregate.
3962 The part can be passed either by value or by reference, this is
3963 determined by value of adj->by_ref. Moreover, the code below
3964 handles both situations when the original aggregate is passed by
3965 value (its type is not a pointer) and when it is passed by
3966 reference (it is a pointer to an aggregate).
3968 When the new argument is passed by reference (adj->by_ref is true)
3969 it must be a part of an aggregate and therefore we form it by
3970 simply taking the address of a reference inside the original
3973 gcc_checking_assert (adj
->offset
% BITS_PER_UNIT
== 0);
3974 base
= gimple_call_arg (stmt
, adj
->base_index
);
3975 loc
= DECL_P (base
) ? DECL_SOURCE_LOCATION (base
)
3976 : EXPR_LOCATION (base
);
3978 if (TREE_CODE (base
) != ADDR_EXPR
3979 && POINTER_TYPE_P (TREE_TYPE (base
)))
3980 off
= build_int_cst (adj
->alias_ptr_type
,
3981 adj
->offset
/ BITS_PER_UNIT
);
3984 HOST_WIDE_INT base_offset
;
3988 if (TREE_CODE (base
) == ADDR_EXPR
)
3990 base
= TREE_OPERAND (base
, 0);
3996 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
3997 /* Aggregate arguments can have non-invariant addresses. */
4000 base
= build_fold_addr_expr (prev_base
);
4001 off
= build_int_cst (adj
->alias_ptr_type
,
4002 adj
->offset
/ BITS_PER_UNIT
);
4004 else if (TREE_CODE (base
) == MEM_REF
)
4009 deref_align
= TYPE_ALIGN (TREE_TYPE (base
));
4011 off
= build_int_cst (adj
->alias_ptr_type
,
4013 + adj
->offset
/ BITS_PER_UNIT
);
4014 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1),
4016 base
= TREE_OPERAND (base
, 0);
4020 off
= build_int_cst (adj
->alias_ptr_type
,
4022 + adj
->offset
/ BITS_PER_UNIT
);
4023 base
= build_fold_addr_expr (base
);
4029 tree type
= adj
->type
;
4031 unsigned HOST_WIDE_INT misalign
;
4035 align
= deref_align
;
4040 get_pointer_alignment_1 (base
, &align
, &misalign
);
4041 if (TYPE_ALIGN (type
) > align
)
4042 align
= TYPE_ALIGN (type
);
4044 misalign
+= (offset_int::from (off
, SIGNED
).to_short_addr ()
4046 misalign
= misalign
& (align
- 1);
4048 align
= (misalign
& -misalign
);
4049 if (align
< TYPE_ALIGN (type
))
4050 type
= build_aligned_type (type
, align
);
4051 base
= force_gimple_operand_gsi (&gsi
, base
,
4052 true, NULL
, true, GSI_SAME_STMT
);
4053 expr
= fold_build2_loc (loc
, MEM_REF
, type
, base
, off
);
4054 /* If expr is not a valid gimple call argument emit
4055 a load into a temporary. */
4056 if (is_gimple_reg_type (TREE_TYPE (expr
)))
4058 gimple tem
= gimple_build_assign (NULL_TREE
, expr
);
4059 if (gimple_in_ssa_p (cfun
))
4061 gimple_set_vuse (tem
, gimple_vuse (stmt
));
4062 expr
= make_ssa_name (TREE_TYPE (expr
), tem
);
4065 expr
= create_tmp_reg (TREE_TYPE (expr
));
4066 gimple_assign_set_lhs (tem
, expr
);
4067 gsi_insert_before (&gsi
, tem
, GSI_SAME_STMT
);
4072 expr
= fold_build2_loc (loc
, MEM_REF
, adj
->type
, base
, off
);
4073 expr
= build_fold_addr_expr (expr
);
4074 expr
= force_gimple_operand_gsi (&gsi
, expr
,
4075 true, NULL
, true, GSI_SAME_STMT
);
4077 vargs
.quick_push (expr
);
4079 if (adj
->op
!= IPA_PARM_OP_COPY
&& MAY_HAVE_DEBUG_STMTS
)
4082 tree ddecl
= NULL_TREE
, origin
= DECL_ORIGIN (adj
->base
), arg
;
4085 arg
= gimple_call_arg (stmt
, adj
->base_index
);
4086 if (!useless_type_conversion_p (TREE_TYPE (origin
), TREE_TYPE (arg
)))
4088 if (!fold_convertible_p (TREE_TYPE (origin
), arg
))
4090 arg
= fold_convert_loc (gimple_location (stmt
),
4091 TREE_TYPE (origin
), arg
);
4093 if (debug_args
== NULL
)
4094 debug_args
= decl_debug_args_insert (callee_decl
);
4095 for (ix
= 0; vec_safe_iterate (*debug_args
, ix
, &ddecl
); ix
+= 2)
4096 if (ddecl
== origin
)
4098 ddecl
= (**debug_args
)[ix
+ 1];
4103 ddecl
= make_node (DEBUG_EXPR_DECL
);
4104 DECL_ARTIFICIAL (ddecl
) = 1;
4105 TREE_TYPE (ddecl
) = TREE_TYPE (origin
);
4106 DECL_MODE (ddecl
) = DECL_MODE (origin
);
4108 vec_safe_push (*debug_args
, origin
);
4109 vec_safe_push (*debug_args
, ddecl
);
4111 def_temp
= gimple_build_debug_bind (ddecl
, unshare_expr (arg
), stmt
);
4112 gsi_insert_before (&gsi
, def_temp
, GSI_SAME_STMT
);
4116 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4118 fprintf (dump_file
, "replacing stmt:");
4119 print_gimple_stmt (dump_file
, gsi_stmt (gsi
), 0, 0);
4122 new_stmt
= gimple_build_call_vec (callee_decl
, vargs
);
4124 if (gimple_call_lhs (stmt
))
4125 gimple_call_set_lhs (new_stmt
, gimple_call_lhs (stmt
));
4127 gimple_set_block (new_stmt
, gimple_block (stmt
));
4128 if (gimple_has_location (stmt
))
4129 gimple_set_location (new_stmt
, gimple_location (stmt
));
4130 gimple_call_set_chain (new_stmt
, gimple_call_chain (stmt
));
4131 gimple_call_copy_flags (new_stmt
, stmt
);
4132 if (gimple_in_ssa_p (cfun
))
4134 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
4135 if (gimple_vdef (stmt
))
4137 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
4138 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
4142 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4144 fprintf (dump_file
, "with stmt:");
4145 print_gimple_stmt (dump_file
, new_stmt
, 0, 0);
4146 fprintf (dump_file
, "\n");
4148 gsi_replace (&gsi
, new_stmt
, true);
4150 cs
->set_call_stmt (new_stmt
);
4153 current_node
->record_stmt_references (gsi_stmt (gsi
));
4156 while (gsi_stmt (gsi
) != gsi_stmt (prev_gsi
));
4159 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4160 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4161 specifies whether the function should care about type incompatibility the
4162 current and new expressions. If it is false, the function will leave
4163 incompatibility issues to the caller. Return true iff the expression
4167 ipa_modify_expr (tree
*expr
, bool convert
,
4168 ipa_parm_adjustment_vec adjustments
)
4170 struct ipa_parm_adjustment
*cand
4171 = ipa_get_adjustment_candidate (&expr
, &convert
, adjustments
, false);
4177 src
= build_simple_mem_ref (cand
->new_decl
);
4179 src
= cand
->new_decl
;
4181 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4183 fprintf (dump_file
, "About to replace expr ");
4184 print_generic_expr (dump_file
, *expr
, 0);
4185 fprintf (dump_file
, " with ");
4186 print_generic_expr (dump_file
, src
, 0);
4187 fprintf (dump_file
, "\n");
4190 if (convert
&& !useless_type_conversion_p (TREE_TYPE (*expr
), cand
->type
))
4192 tree vce
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (*expr
), src
);
4200 /* If T is an SSA_NAME, return NULL if it is not a default def or
4201 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4202 the base variable is always returned, regardless if it is a default
4203 def. Return T if it is not an SSA_NAME. */
4206 get_ssa_base_param (tree t
, bool ignore_default_def
)
4208 if (TREE_CODE (t
) == SSA_NAME
)
4210 if (ignore_default_def
|| SSA_NAME_IS_DEFAULT_DEF (t
))
4211 return SSA_NAME_VAR (t
);
4218 /* Given an expression, return an adjustment entry specifying the
4219 transformation to be done on EXPR. If no suitable adjustment entry
4220 was found, returns NULL.
4222 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4223 default def, otherwise bail on them.
4225 If CONVERT is non-NULL, this function will set *CONVERT if the
4226 expression provided is a component reference. ADJUSTMENTS is the
4227 adjustments vector. */
4229 ipa_parm_adjustment
*
4230 ipa_get_adjustment_candidate (tree
**expr
, bool *convert
,
4231 ipa_parm_adjustment_vec adjustments
,
4232 bool ignore_default_def
)
4234 if (TREE_CODE (**expr
) == BIT_FIELD_REF
4235 || TREE_CODE (**expr
) == IMAGPART_EXPR
4236 || TREE_CODE (**expr
) == REALPART_EXPR
)
4238 *expr
= &TREE_OPERAND (**expr
, 0);
4243 HOST_WIDE_INT offset
, size
, max_size
;
4244 tree base
= get_ref_base_and_extent (**expr
, &offset
, &size
, &max_size
);
4245 if (!base
|| size
== -1 || max_size
== -1)
4248 if (TREE_CODE (base
) == MEM_REF
)
4250 offset
+= mem_ref_offset (base
).to_short_addr () * BITS_PER_UNIT
;
4251 base
= TREE_OPERAND (base
, 0);
4254 base
= get_ssa_base_param (base
, ignore_default_def
);
4255 if (!base
|| TREE_CODE (base
) != PARM_DECL
)
4258 struct ipa_parm_adjustment
*cand
= NULL
;
4259 unsigned int len
= adjustments
.length ();
4260 for (unsigned i
= 0; i
< len
; i
++)
4262 struct ipa_parm_adjustment
*adj
= &adjustments
[i
];
4264 if (adj
->base
== base
4265 && (adj
->offset
== offset
|| adj
->op
== IPA_PARM_OP_REMOVE
))
4272 if (!cand
|| cand
->op
== IPA_PARM_OP_COPY
|| cand
->op
== IPA_PARM_OP_REMOVE
)
4277 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4280 index_in_adjustments_multiple_times_p (int base_index
,
4281 ipa_parm_adjustment_vec adjustments
)
4283 int i
, len
= adjustments
.length ();
4286 for (i
= 0; i
< len
; i
++)
4288 struct ipa_parm_adjustment
*adj
;
4289 adj
= &adjustments
[i
];
4291 if (adj
->base_index
== base_index
)
4303 /* Return adjustments that should have the same effect on function parameters
4304 and call arguments as if they were first changed according to adjustments in
4305 INNER and then by adjustments in OUTER. */
4307 ipa_parm_adjustment_vec
4308 ipa_combine_adjustments (ipa_parm_adjustment_vec inner
,
4309 ipa_parm_adjustment_vec outer
)
4311 int i
, outlen
= outer
.length ();
4312 int inlen
= inner
.length ();
4314 ipa_parm_adjustment_vec adjustments
, tmp
;
4317 for (i
= 0; i
< inlen
; i
++)
4319 struct ipa_parm_adjustment
*n
;
4322 if (n
->op
== IPA_PARM_OP_REMOVE
)
4326 /* FIXME: Handling of new arguments are not implemented yet. */
4327 gcc_assert (n
->op
!= IPA_PARM_OP_NEW
);
4328 tmp
.quick_push (*n
);
4332 adjustments
.create (outlen
+ removals
);
4333 for (i
= 0; i
< outlen
; i
++)
4335 struct ipa_parm_adjustment r
;
4336 struct ipa_parm_adjustment
*out
= &outer
[i
];
4337 struct ipa_parm_adjustment
*in
= &tmp
[out
->base_index
];
4339 memset (&r
, 0, sizeof (r
));
4340 gcc_assert (in
->op
!= IPA_PARM_OP_REMOVE
);
4341 if (out
->op
== IPA_PARM_OP_REMOVE
)
4343 if (!index_in_adjustments_multiple_times_p (in
->base_index
, tmp
))
4345 r
.op
= IPA_PARM_OP_REMOVE
;
4346 adjustments
.quick_push (r
);
4352 /* FIXME: Handling of new arguments are not implemented yet. */
4353 gcc_assert (out
->op
!= IPA_PARM_OP_NEW
);
4356 r
.base_index
= in
->base_index
;
4359 /* FIXME: Create nonlocal value too. */
4361 if (in
->op
== IPA_PARM_OP_COPY
&& out
->op
== IPA_PARM_OP_COPY
)
4362 r
.op
= IPA_PARM_OP_COPY
;
4363 else if (in
->op
== IPA_PARM_OP_COPY
)
4364 r
.offset
= out
->offset
;
4365 else if (out
->op
== IPA_PARM_OP_COPY
)
4366 r
.offset
= in
->offset
;
4368 r
.offset
= in
->offset
+ out
->offset
;
4369 adjustments
.quick_push (r
);
4372 for (i
= 0; i
< inlen
; i
++)
4374 struct ipa_parm_adjustment
*n
= &inner
[i
];
4376 if (n
->op
== IPA_PARM_OP_REMOVE
)
4377 adjustments
.quick_push (*n
);
4384 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4385 friendly way, assuming they are meant to be applied to FNDECL. */
4388 ipa_dump_param_adjustments (FILE *file
, ipa_parm_adjustment_vec adjustments
,
4391 int i
, len
= adjustments
.length ();
4393 vec
<tree
> parms
= ipa_get_vector_of_formal_parms (fndecl
);
4395 fprintf (file
, "IPA param adjustments: ");
4396 for (i
= 0; i
< len
; i
++)
4398 struct ipa_parm_adjustment
*adj
;
4399 adj
= &adjustments
[i
];
4402 fprintf (file
, " ");
4406 fprintf (file
, "%i. base_index: %i - ", i
, adj
->base_index
);
4407 print_generic_expr (file
, parms
[adj
->base_index
], 0);
4410 fprintf (file
, ", base: ");
4411 print_generic_expr (file
, adj
->base
, 0);
4415 fprintf (file
, ", new_decl: ");
4416 print_generic_expr (file
, adj
->new_decl
, 0);
4418 if (adj
->new_ssa_base
)
4420 fprintf (file
, ", new_ssa_base: ");
4421 print_generic_expr (file
, adj
->new_ssa_base
, 0);
4424 if (adj
->op
== IPA_PARM_OP_COPY
)
4425 fprintf (file
, ", copy_param");
4426 else if (adj
->op
== IPA_PARM_OP_REMOVE
)
4427 fprintf (file
, ", remove_param");
4429 fprintf (file
, ", offset %li", (long) adj
->offset
);
4431 fprintf (file
, ", by_ref");
4432 print_node_brief (file
, ", type: ", adj
->type
, 0);
4433 fprintf (file
, "\n");
4438 /* Dump the AV linked list. */
4441 ipa_dump_agg_replacement_values (FILE *f
, struct ipa_agg_replacement_value
*av
)
4444 fprintf (f
, " Aggregate replacements:");
4445 for (; av
; av
= av
->next
)
4447 fprintf (f
, "%s %i[" HOST_WIDE_INT_PRINT_DEC
"]=", comma
? "," : "",
4448 av
->index
, av
->offset
);
4449 print_generic_expr (f
, av
->value
, 0);
4455 /* Stream out jump function JUMP_FUNC to OB. */
4458 ipa_write_jump_function (struct output_block
*ob
,
4459 struct ipa_jump_func
*jump_func
)
4461 struct ipa_agg_jf_item
*item
;
4462 struct bitpack_d bp
;
4465 streamer_write_uhwi (ob
, jump_func
->type
);
4466 switch (jump_func
->type
)
4468 case IPA_JF_UNKNOWN
:
4472 EXPR_LOCATION (jump_func
->value
.constant
.value
) == UNKNOWN_LOCATION
);
4473 stream_write_tree (ob
, jump_func
->value
.constant
.value
, true);
4475 case IPA_JF_PASS_THROUGH
:
4476 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.operation
);
4477 if (jump_func
->value
.pass_through
.operation
== NOP_EXPR
)
4479 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4480 bp
= bitpack_create (ob
->main_stream
);
4481 bp_pack_value (&bp
, jump_func
->value
.pass_through
.agg_preserved
, 1);
4482 streamer_write_bitpack (&bp
);
4486 stream_write_tree (ob
, jump_func
->value
.pass_through
.operand
, true);
4487 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4490 case IPA_JF_ANCESTOR
:
4491 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.offset
);
4492 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.formal_id
);
4493 bp
= bitpack_create (ob
->main_stream
);
4494 bp_pack_value (&bp
, jump_func
->value
.ancestor
.agg_preserved
, 1);
4495 streamer_write_bitpack (&bp
);
4499 count
= vec_safe_length (jump_func
->agg
.items
);
4500 streamer_write_uhwi (ob
, count
);
4503 bp
= bitpack_create (ob
->main_stream
);
4504 bp_pack_value (&bp
, jump_func
->agg
.by_ref
, 1);
4505 streamer_write_bitpack (&bp
);
4508 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, i
, item
)
4510 streamer_write_uhwi (ob
, item
->offset
);
4511 stream_write_tree (ob
, item
->value
, true);
4514 bp
= bitpack_create (ob
->main_stream
);
4515 bp_pack_value (&bp
, jump_func
->alignment
.known
, 1);
4516 streamer_write_bitpack (&bp
);
4517 if (jump_func
->alignment
.known
)
4519 streamer_write_uhwi (ob
, jump_func
->alignment
.align
);
4520 streamer_write_uhwi (ob
, jump_func
->alignment
.misalign
);
4524 /* Read in jump function JUMP_FUNC from IB. */
4527 ipa_read_jump_function (struct lto_input_block
*ib
,
4528 struct ipa_jump_func
*jump_func
,
4529 struct cgraph_edge
*cs
,
4530 struct data_in
*data_in
)
4532 enum jump_func_type jftype
;
4533 enum tree_code operation
;
4536 jftype
= (enum jump_func_type
) streamer_read_uhwi (ib
);
4539 case IPA_JF_UNKNOWN
:
4540 ipa_set_jf_unknown (jump_func
);
4543 ipa_set_jf_constant (jump_func
, stream_read_tree (ib
, data_in
), cs
);
4545 case IPA_JF_PASS_THROUGH
:
4546 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
4547 if (operation
== NOP_EXPR
)
4549 int formal_id
= streamer_read_uhwi (ib
);
4550 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4551 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4552 ipa_set_jf_simple_pass_through (jump_func
, formal_id
, agg_preserved
);
4556 tree operand
= stream_read_tree (ib
, data_in
);
4557 int formal_id
= streamer_read_uhwi (ib
);
4558 ipa_set_jf_arith_pass_through (jump_func
, formal_id
, operand
,
4562 case IPA_JF_ANCESTOR
:
4564 HOST_WIDE_INT offset
= streamer_read_uhwi (ib
);
4565 int formal_id
= streamer_read_uhwi (ib
);
4566 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4567 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4568 ipa_set_ancestor_jf (jump_func
, offset
, formal_id
, agg_preserved
);
4573 count
= streamer_read_uhwi (ib
);
4574 vec_alloc (jump_func
->agg
.items
, count
);
4577 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4578 jump_func
->agg
.by_ref
= bp_unpack_value (&bp
, 1);
4580 for (i
= 0; i
< count
; i
++)
4582 struct ipa_agg_jf_item item
;
4583 item
.offset
= streamer_read_uhwi (ib
);
4584 item
.value
= stream_read_tree (ib
, data_in
);
4585 jump_func
->agg
.items
->quick_push (item
);
4588 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4589 bool alignment_known
= bp_unpack_value (&bp
, 1);
4590 if (alignment_known
)
4592 jump_func
->alignment
.known
= true;
4593 jump_func
->alignment
.align
= streamer_read_uhwi (ib
);
4594 jump_func
->alignment
.misalign
= streamer_read_uhwi (ib
);
4597 jump_func
->alignment
.known
= false;
4600 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4601 relevant to indirect inlining to OB. */
4604 ipa_write_indirect_edge_info (struct output_block
*ob
,
4605 struct cgraph_edge
*cs
)
4607 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4608 struct bitpack_d bp
;
4610 streamer_write_hwi (ob
, ii
->param_index
);
4611 bp
= bitpack_create (ob
->main_stream
);
4612 bp_pack_value (&bp
, ii
->polymorphic
, 1);
4613 bp_pack_value (&bp
, ii
->agg_contents
, 1);
4614 bp_pack_value (&bp
, ii
->member_ptr
, 1);
4615 bp_pack_value (&bp
, ii
->by_ref
, 1);
4616 bp_pack_value (&bp
, ii
->vptr_changed
, 1);
4617 streamer_write_bitpack (&bp
);
4618 if (ii
->agg_contents
|| ii
->polymorphic
)
4619 streamer_write_hwi (ob
, ii
->offset
);
4621 gcc_assert (ii
->offset
== 0);
4623 if (ii
->polymorphic
)
4625 streamer_write_hwi (ob
, ii
->otr_token
);
4626 stream_write_tree (ob
, ii
->otr_type
, true);
4627 ii
->context
.stream_out (ob
);
4631 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4632 relevant to indirect inlining from IB. */
4635 ipa_read_indirect_edge_info (struct lto_input_block
*ib
,
4636 struct data_in
*data_in
,
4637 struct cgraph_edge
*cs
)
4639 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4640 struct bitpack_d bp
;
4642 ii
->param_index
= (int) streamer_read_hwi (ib
);
4643 bp
= streamer_read_bitpack (ib
);
4644 ii
->polymorphic
= bp_unpack_value (&bp
, 1);
4645 ii
->agg_contents
= bp_unpack_value (&bp
, 1);
4646 ii
->member_ptr
= bp_unpack_value (&bp
, 1);
4647 ii
->by_ref
= bp_unpack_value (&bp
, 1);
4648 ii
->vptr_changed
= bp_unpack_value (&bp
, 1);
4649 if (ii
->agg_contents
|| ii
->polymorphic
)
4650 ii
->offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4653 if (ii
->polymorphic
)
4655 ii
->otr_token
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4656 ii
->otr_type
= stream_read_tree (ib
, data_in
);
4657 ii
->context
.stream_in (ib
, data_in
);
4661 /* Stream out NODE info to OB. */
4664 ipa_write_node_info (struct output_block
*ob
, struct cgraph_node
*node
)
4667 lto_symtab_encoder_t encoder
;
4668 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
4670 struct cgraph_edge
*e
;
4671 struct bitpack_d bp
;
4673 encoder
= ob
->decl_state
->symtab_node_encoder
;
4674 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4675 streamer_write_uhwi (ob
, node_ref
);
4677 streamer_write_uhwi (ob
, ipa_get_param_count (info
));
4678 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4679 streamer_write_uhwi (ob
, ipa_get_param_move_cost (info
, j
));
4680 bp
= bitpack_create (ob
->main_stream
);
4681 gcc_assert (info
->analysis_done
4682 || ipa_get_param_count (info
) == 0);
4683 gcc_assert (!info
->node_enqueued
);
4684 gcc_assert (!info
->ipcp_orig_node
);
4685 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4686 bp_pack_value (&bp
, ipa_is_param_used (info
, j
), 1);
4687 streamer_write_bitpack (&bp
);
4688 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4689 streamer_write_hwi (ob
, ipa_get_controlled_uses (info
, j
));
4690 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4692 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4694 streamer_write_uhwi (ob
,
4695 ipa_get_cs_argument_count (args
) * 2
4696 + (args
->polymorphic_call_contexts
!= NULL
));
4697 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4699 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4700 if (args
->polymorphic_call_contexts
!= NULL
)
4701 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
4704 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4706 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4708 streamer_write_uhwi (ob
,
4709 ipa_get_cs_argument_count (args
) * 2
4710 + (args
->polymorphic_call_contexts
!= NULL
));
4711 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4713 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4714 if (args
->polymorphic_call_contexts
!= NULL
)
4715 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
4717 ipa_write_indirect_edge_info (ob
, e
);
4721 /* Stream in NODE info from IB. */
4724 ipa_read_node_info (struct lto_input_block
*ib
, struct cgraph_node
*node
,
4725 struct data_in
*data_in
)
4727 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
4729 struct cgraph_edge
*e
;
4730 struct bitpack_d bp
;
4732 ipa_alloc_node_params (node
, streamer_read_uhwi (ib
));
4734 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4735 info
->descriptors
[k
].move_cost
= streamer_read_uhwi (ib
);
4737 bp
= streamer_read_bitpack (ib
);
4738 if (ipa_get_param_count (info
) != 0)
4739 info
->analysis_done
= true;
4740 info
->node_enqueued
= false;
4741 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4742 ipa_set_param_used (info
, k
, bp_unpack_value (&bp
, 1));
4743 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4744 ipa_set_controlled_uses (info
, k
, streamer_read_hwi (ib
));
4745 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4747 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4748 int count
= streamer_read_uhwi (ib
);
4749 bool contexts_computed
= count
& 1;
4754 vec_safe_grow_cleared (args
->jump_functions
, count
);
4755 if (contexts_computed
)
4756 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
);
4758 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
4760 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4762 if (contexts_computed
)
4763 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in (ib
, data_in
);
4766 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4768 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4769 int count
= streamer_read_uhwi (ib
);
4770 bool contexts_computed
= count
& 1;
4775 vec_safe_grow_cleared (args
->jump_functions
, count
);
4776 if (contexts_computed
)
4777 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
);
4778 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
4780 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4782 if (contexts_computed
)
4783 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in (ib
, data_in
);
4786 ipa_read_indirect_edge_info (ib
, data_in
, e
);
4790 /* Write jump functions for nodes in SET. */
4793 ipa_prop_write_jump_functions (void)
4795 struct cgraph_node
*node
;
4796 struct output_block
*ob
;
4797 unsigned int count
= 0;
4798 lto_symtab_encoder_iterator lsei
;
4799 lto_symtab_encoder_t encoder
;
4801 if (!ipa_node_params_sum
)
4804 ob
= create_output_block (LTO_section_jump_functions
);
4805 encoder
= ob
->decl_state
->symtab_node_encoder
;
4807 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4808 lsei_next_function_in_partition (&lsei
))
4810 node
= lsei_cgraph_node (lsei
);
4811 if (node
->has_gimple_body_p ()
4812 && IPA_NODE_REF (node
) != NULL
)
4816 streamer_write_uhwi (ob
, count
);
4818 /* Process all of the functions. */
4819 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4820 lsei_next_function_in_partition (&lsei
))
4822 node
= lsei_cgraph_node (lsei
);
4823 if (node
->has_gimple_body_p ()
4824 && IPA_NODE_REF (node
) != NULL
)
4825 ipa_write_node_info (ob
, node
);
4827 streamer_write_char_stream (ob
->main_stream
, 0);
4828 produce_asm (ob
, NULL
);
4829 destroy_output_block (ob
);
4832 /* Read section in file FILE_DATA of length LEN with data DATA. */
4835 ipa_prop_read_section (struct lto_file_decl_data
*file_data
, const char *data
,
4838 const struct lto_function_header
*header
=
4839 (const struct lto_function_header
*) data
;
4840 const int cfg_offset
= sizeof (struct lto_function_header
);
4841 const int main_offset
= cfg_offset
+ header
->cfg_size
;
4842 const int string_offset
= main_offset
+ header
->main_size
;
4843 struct data_in
*data_in
;
4847 lto_input_block
ib_main ((const char *) data
+ main_offset
,
4851 lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
4852 header
->string_size
, vNULL
);
4853 count
= streamer_read_uhwi (&ib_main
);
4855 for (i
= 0; i
< count
; i
++)
4858 struct cgraph_node
*node
;
4859 lto_symtab_encoder_t encoder
;
4861 index
= streamer_read_uhwi (&ib_main
);
4862 encoder
= file_data
->symtab_node_encoder
;
4863 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
4865 gcc_assert (node
->definition
);
4866 ipa_read_node_info (&ib_main
, node
, data_in
);
4868 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
4870 lto_data_in_delete (data_in
);
4873 /* Read ipcp jump functions. */
4876 ipa_prop_read_jump_functions (void)
4878 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
4879 struct lto_file_decl_data
*file_data
;
4882 ipa_check_create_node_params ();
4883 ipa_check_create_edge_args ();
4884 ipa_register_cgraph_hooks ();
4886 while ((file_data
= file_data_vec
[j
++]))
4889 const char *data
= lto_get_section_data (file_data
, LTO_section_jump_functions
, NULL
, &len
);
4892 ipa_prop_read_section (file_data
, data
, len
);
4896 /* After merging units, we can get mismatch in argument counts.
4897 Also decl merging might've rendered parameter lists obsolete.
4898 Also compute called_with_variable_arg info. */
4901 ipa_update_after_lto_read (void)
4903 ipa_check_create_node_params ();
4904 ipa_check_create_edge_args ();
4908 write_ipcp_transformation_info (output_block
*ob
, cgraph_node
*node
)
4911 unsigned int count
= 0;
4912 lto_symtab_encoder_t encoder
;
4913 struct ipa_agg_replacement_value
*aggvals
, *av
;
4915 aggvals
= ipa_get_agg_replacements_for_node (node
);
4916 encoder
= ob
->decl_state
->symtab_node_encoder
;
4917 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4918 streamer_write_uhwi (ob
, node_ref
);
4920 for (av
= aggvals
; av
; av
= av
->next
)
4922 streamer_write_uhwi (ob
, count
);
4924 for (av
= aggvals
; av
; av
= av
->next
)
4926 struct bitpack_d bp
;
4928 streamer_write_uhwi (ob
, av
->offset
);
4929 streamer_write_uhwi (ob
, av
->index
);
4930 stream_write_tree (ob
, av
->value
, true);
4932 bp
= bitpack_create (ob
->main_stream
);
4933 bp_pack_value (&bp
, av
->by_ref
, 1);
4934 streamer_write_bitpack (&bp
);
4937 ipcp_transformation_summary
*ts
= ipcp_get_transformation_summary (node
);
4938 if (ts
&& vec_safe_length (ts
->alignments
) > 0)
4940 count
= ts
->alignments
->length ();
4942 streamer_write_uhwi (ob
, count
);
4943 for (unsigned i
= 0; i
< count
; ++i
)
4945 ipa_alignment
*parm_al
= &(*ts
->alignments
)[i
];
4947 struct bitpack_d bp
;
4948 bp
= bitpack_create (ob
->main_stream
);
4949 bp_pack_value (&bp
, parm_al
->known
, 1);
4950 streamer_write_bitpack (&bp
);
4953 streamer_write_uhwi (ob
, parm_al
->align
);
4954 streamer_write_hwi_in_range (ob
->main_stream
, 0, parm_al
->align
,
4960 streamer_write_uhwi (ob
, 0);
4963 /* Stream in the aggregate value replacement chain for NODE from IB. */
4966 read_ipcp_transformation_info (lto_input_block
*ib
, cgraph_node
*node
,
4969 struct ipa_agg_replacement_value
*aggvals
= NULL
;
4970 unsigned int count
, i
;
4972 count
= streamer_read_uhwi (ib
);
4973 for (i
= 0; i
<count
; i
++)
4975 struct ipa_agg_replacement_value
*av
;
4976 struct bitpack_d bp
;
4978 av
= ggc_alloc
<ipa_agg_replacement_value
> ();
4979 av
->offset
= streamer_read_uhwi (ib
);
4980 av
->index
= streamer_read_uhwi (ib
);
4981 av
->value
= stream_read_tree (ib
, data_in
);
4982 bp
= streamer_read_bitpack (ib
);
4983 av
->by_ref
= bp_unpack_value (&bp
, 1);
4987 ipa_set_node_agg_value_chain (node
, aggvals
);
4989 count
= streamer_read_uhwi (ib
);
4992 ipcp_grow_transformations_if_necessary ();
4994 ipcp_transformation_summary
*ts
= ipcp_get_transformation_summary (node
);
4995 vec_safe_grow_cleared (ts
->alignments
, count
);
4997 for (i
= 0; i
< count
; i
++)
4999 ipa_alignment
*parm_al
;
5000 parm_al
= &(*ts
->alignments
)[i
];
5001 struct bitpack_d bp
;
5002 bp
= streamer_read_bitpack (ib
);
5003 parm_al
->known
= bp_unpack_value (&bp
, 1);
5006 parm_al
->align
= streamer_read_uhwi (ib
);
5008 = streamer_read_hwi_in_range (ib
, "ipa-prop misalign",
5015 /* Write all aggregate replacement for nodes in set. */
5018 ipcp_write_transformation_summaries (void)
5020 struct cgraph_node
*node
;
5021 struct output_block
*ob
;
5022 unsigned int count
= 0;
5023 lto_symtab_encoder_iterator lsei
;
5024 lto_symtab_encoder_t encoder
;
5026 ob
= create_output_block (LTO_section_ipcp_transform
);
5027 encoder
= ob
->decl_state
->symtab_node_encoder
;
5029 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5030 lsei_next_function_in_partition (&lsei
))
5032 node
= lsei_cgraph_node (lsei
);
5033 if (node
->has_gimple_body_p ())
5037 streamer_write_uhwi (ob
, count
);
5039 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5040 lsei_next_function_in_partition (&lsei
))
5042 node
= lsei_cgraph_node (lsei
);
5043 if (node
->has_gimple_body_p ())
5044 write_ipcp_transformation_info (ob
, node
);
5046 streamer_write_char_stream (ob
->main_stream
, 0);
5047 produce_asm (ob
, NULL
);
5048 destroy_output_block (ob
);
5051 /* Read replacements section in file FILE_DATA of length LEN with data
5055 read_replacements_section (struct lto_file_decl_data
*file_data
,
5059 const struct lto_function_header
*header
=
5060 (const struct lto_function_header
*) data
;
5061 const int cfg_offset
= sizeof (struct lto_function_header
);
5062 const int main_offset
= cfg_offset
+ header
->cfg_size
;
5063 const int string_offset
= main_offset
+ header
->main_size
;
5064 struct data_in
*data_in
;
5068 lto_input_block
ib_main ((const char *) data
+ main_offset
,
5071 data_in
= lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
5072 header
->string_size
, vNULL
);
5073 count
= streamer_read_uhwi (&ib_main
);
5075 for (i
= 0; i
< count
; i
++)
5078 struct cgraph_node
*node
;
5079 lto_symtab_encoder_t encoder
;
5081 index
= streamer_read_uhwi (&ib_main
);
5082 encoder
= file_data
->symtab_node_encoder
;
5083 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
5085 gcc_assert (node
->definition
);
5086 read_ipcp_transformation_info (&ib_main
, node
, data_in
);
5088 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
5090 lto_data_in_delete (data_in
);
5093 /* Read IPA-CP aggregate replacements. */
5096 ipcp_read_transformation_summaries (void)
5098 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
5099 struct lto_file_decl_data
*file_data
;
5102 while ((file_data
= file_data_vec
[j
++]))
5105 const char *data
= lto_get_section_data (file_data
,
5106 LTO_section_ipcp_transform
,
5109 read_replacements_section (file_data
, data
, len
);
5113 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5117 adjust_agg_replacement_values (struct cgraph_node
*node
,
5118 struct ipa_agg_replacement_value
*aggval
)
5120 struct ipa_agg_replacement_value
*v
;
5121 int i
, c
= 0, d
= 0, *adj
;
5123 if (!node
->clone
.combined_args_to_skip
)
5126 for (v
= aggval
; v
; v
= v
->next
)
5128 gcc_assert (v
->index
>= 0);
5134 adj
= XALLOCAVEC (int, c
);
5135 for (i
= 0; i
< c
; i
++)
5136 if (bitmap_bit_p (node
->clone
.combined_args_to_skip
, i
))
5144 for (v
= aggval
; v
; v
= v
->next
)
5145 v
->index
= adj
[v
->index
];
5148 /* Dominator walker driving the ipcp modification phase. */
5150 class ipcp_modif_dom_walker
: public dom_walker
5153 ipcp_modif_dom_walker (struct func_body_info
*fbi
,
5154 vec
<ipa_param_descriptor
> descs
,
5155 struct ipa_agg_replacement_value
*av
,
5157 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
), m_descriptors (descs
),
5158 m_aggval (av
), m_something_changed (sc
), m_cfg_changed (cc
) {}
5160 virtual void before_dom_children (basic_block
);
5163 struct func_body_info
*m_fbi
;
5164 vec
<ipa_param_descriptor
> m_descriptors
;
5165 struct ipa_agg_replacement_value
*m_aggval
;
5166 bool *m_something_changed
, *m_cfg_changed
;
5170 ipcp_modif_dom_walker::before_dom_children (basic_block bb
)
5172 gimple_stmt_iterator gsi
;
5173 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5175 struct ipa_agg_replacement_value
*v
;
5176 gimple stmt
= gsi_stmt (gsi
);
5178 HOST_WIDE_INT offset
, size
;
5182 if (!gimple_assign_load_p (stmt
))
5184 rhs
= gimple_assign_rhs1 (stmt
);
5185 if (!is_gimple_reg_type (TREE_TYPE (rhs
)))
5190 while (handled_component_p (t
))
5192 /* V_C_E can do things like convert an array of integers to one
5193 bigger integer and similar things we do not handle below. */
5194 if (TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
)
5199 t
= TREE_OPERAND (t
, 0);
5204 if (!ipa_load_from_parm_agg_1 (m_fbi
, m_descriptors
, stmt
, rhs
, &index
,
5205 &offset
, &size
, &by_ref
))
5207 for (v
= m_aggval
; v
; v
= v
->next
)
5208 if (v
->index
== index
5209 && v
->offset
== offset
)
5212 || v
->by_ref
!= by_ref
5213 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v
->value
))) != size
)
5216 gcc_checking_assert (is_gimple_ip_invariant (v
->value
));
5217 if (!useless_type_conversion_p (TREE_TYPE (rhs
), TREE_TYPE (v
->value
)))
5219 if (fold_convertible_p (TREE_TYPE (rhs
), v
->value
))
5220 val
= fold_build1 (NOP_EXPR
, TREE_TYPE (rhs
), v
->value
);
5221 else if (TYPE_SIZE (TREE_TYPE (rhs
))
5222 == TYPE_SIZE (TREE_TYPE (v
->value
)))
5223 val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (rhs
), v
->value
);
5228 fprintf (dump_file
, " const ");
5229 print_generic_expr (dump_file
, v
->value
, 0);
5230 fprintf (dump_file
, " can't be converted to type of ");
5231 print_generic_expr (dump_file
, rhs
, 0);
5232 fprintf (dump_file
, "\n");
5240 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5242 fprintf (dump_file
, "Modifying stmt:\n ");
5243 print_gimple_stmt (dump_file
, stmt
, 0, 0);
5245 gimple_assign_set_rhs_from_tree (&gsi
, val
);
5248 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5250 fprintf (dump_file
, "into:\n ");
5251 print_gimple_stmt (dump_file
, stmt
, 0, 0);
5252 fprintf (dump_file
, "\n");
5255 *m_something_changed
= true;
5256 if (maybe_clean_eh_stmt (stmt
)
5257 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
5258 *m_cfg_changed
= true;
5263 /* Update alignment of formal parameters as described in
5264 ipcp_transformation_summary. */
5267 ipcp_update_alignments (struct cgraph_node
*node
)
5269 tree fndecl
= node
->decl
;
5270 tree parm
= DECL_ARGUMENTS (fndecl
);
5271 tree next_parm
= parm
;
5272 ipcp_transformation_summary
*ts
= ipcp_get_transformation_summary (node
);
5273 if (!ts
|| vec_safe_length (ts
->alignments
) == 0)
5275 const vec
<ipa_alignment
, va_gc
> &alignments
= *ts
->alignments
;
5276 unsigned count
= alignments
.length ();
5278 for (unsigned i
= 0; i
< count
; ++i
, parm
= next_parm
)
5280 if (node
->clone
.combined_args_to_skip
5281 && bitmap_bit_p (node
->clone
.combined_args_to_skip
, i
))
5283 gcc_checking_assert (parm
);
5284 next_parm
= DECL_CHAIN (parm
);
5286 if (!alignments
[i
].known
|| !is_gimple_reg (parm
))
5288 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
), parm
);
5293 fprintf (dump_file
, " Adjusting alignment of param %u to %u, "
5294 "misalignment to %u\n", i
, alignments
[i
].align
,
5295 alignments
[i
].misalign
);
5297 struct ptr_info_def
*pi
= get_ptr_info (ddef
);
5298 gcc_checking_assert (pi
);
5300 unsigned old_misalign
;
5301 bool old_known
= get_ptr_info_alignment (pi
, &old_align
, &old_misalign
);
5304 && old_align
>= alignments
[i
].align
)
5307 fprintf (dump_file
, " But the alignment was already %u.\n",
5311 set_ptr_info_alignment (pi
, alignments
[i
].align
, alignments
[i
].misalign
);
5315 /* IPCP transformation phase doing propagation of aggregate values. */
5318 ipcp_transform_function (struct cgraph_node
*node
)
5320 vec
<ipa_param_descriptor
> descriptors
= vNULL
;
5321 struct func_body_info fbi
;
5322 struct ipa_agg_replacement_value
*aggval
;
5324 bool cfg_changed
= false, something_changed
= false;
5326 gcc_checking_assert (cfun
);
5327 gcc_checking_assert (current_function_decl
);
5330 fprintf (dump_file
, "Modification phase of node %s/%i\n",
5331 node
->name (), node
->order
);
5333 ipcp_update_alignments (node
);
5334 aggval
= ipa_get_agg_replacements_for_node (node
);
5337 param_count
= count_formal_params (node
->decl
);
5338 if (param_count
== 0)
5340 adjust_agg_replacement_values (node
, aggval
);
5342 ipa_dump_agg_replacement_values (dump_file
, aggval
);
5346 fbi
.bb_infos
= vNULL
;
5347 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
));
5348 fbi
.param_count
= param_count
;
5351 descriptors
.safe_grow_cleared (param_count
);
5352 ipa_populate_param_decls (node
, descriptors
);
5353 calculate_dominance_info (CDI_DOMINATORS
);
5354 ipcp_modif_dom_walker (&fbi
, descriptors
, aggval
, &something_changed
,
5355 &cfg_changed
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5358 struct ipa_bb_info
*bi
;
5359 FOR_EACH_VEC_ELT (fbi
.bb_infos
, i
, bi
)
5360 free_ipa_bb_info (bi
);
5361 fbi
.bb_infos
.release ();
5362 free_dominance_info (CDI_DOMINATORS
);
5363 (*ipcp_transformations
)[node
->uid
].agg_values
= NULL
;
5364 (*ipcp_transformations
)[node
->uid
].alignments
= NULL
;
5365 descriptors
.release ();
5367 if (!something_changed
)
5369 else if (cfg_changed
)
5370 return TODO_update_ssa_only_virtuals
| TODO_cleanup_cfg
;
5372 return TODO_update_ssa_only_virtuals
;