1 /* Interprocedural analyses.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
26 #include "double-int.h"
34 #include "fold-const.h"
37 #include "hard-reg-set.h"
39 #include "dominance.h"
41 #include "basic-block.h"
42 #include "tree-ssa-alias.h"
43 #include "internal-fn.h"
44 #include "gimple-fold.h"
46 #include "gimple-expr.h"
52 #include "statistics.h"
54 #include "fixed-value.h"
55 #include "insn-config.h"
64 #include "stor-layout.h"
65 #include "print-tree.h"
67 #include "gimple-iterator.h"
68 #include "gimplify-me.h"
69 #include "gimple-walk.h"
70 #include "langhooks.h"
73 #include "plugin-api.h"
76 #include "alloc-pool.h"
77 #include "symbol-summary.h"
80 #include "gimple-ssa.h"
82 #include "tree-phinodes.h"
83 #include "ssa-iterators.h"
84 #include "tree-into-ssa.h"
86 #include "tree-pass.h"
87 #include "tree-inline.h"
88 #include "ipa-inline.h"
89 #include "diagnostic.h"
90 #include "gimple-pretty-print.h"
91 #include "lto-streamer.h"
92 #include "data-streamer.h"
93 #include "tree-streamer.h"
95 #include "ipa-utils.h"
96 #include "stringpool.h"
97 #include "tree-ssanames.h"
100 #include "builtins.h"
102 /* Intermediate information that we get from alias analysis about a particular
103 parameter in a particular basic_block. When a parameter or the memory it
104 references is marked modified, we use that information in all dominatd
105 blocks without cosulting alias analysis oracle. */
107 struct param_aa_status
109 /* Set when this structure contains meaningful information. If not, the
110 structure describing a dominating BB should be used instead. */
113 /* Whether we have seen something which might have modified the data in
114 question. PARM is for the parameter itself, REF is for data it points to
115 but using the alias type of individual accesses and PT is the same thing
116 but for computing aggregate pass-through functions using a very inclusive
118 bool parm_modified
, ref_modified
, pt_modified
;
121 /* Information related to a given BB that used only when looking at function
126 /* Call graph edges going out of this BB. */
127 vec
<cgraph_edge
*> cg_edges
;
128 /* Alias analysis statuses of each formal parameter at this bb. */
129 vec
<param_aa_status
> param_aa_statuses
;
132 /* Structure with global information that is only used when looking at function
135 struct func_body_info
137 /* The node that is being analyzed. */
141 struct ipa_node_params
*info
;
143 /* Information about individual BBs. */
144 vec
<ipa_bb_info
> bb_infos
;
146 /* Number of parameters. */
149 /* Number of statements already walked by when analyzing this function. */
150 unsigned int aa_walked
;
153 /* Function summary where the parameter infos are actually stored. */
154 ipa_node_params_t
*ipa_node_params_sum
= NULL
;
155 /* Vector of IPA-CP transformation data for each clone. */
156 vec
<ipcp_transformation_summary
, va_gc
> *ipcp_transformations
;
157 /* Vector where the parameter infos are actually stored. */
158 vec
<ipa_edge_args
, va_gc
> *ipa_edge_args_vector
;
160 /* Holders of ipa cgraph hooks: */
161 static struct cgraph_edge_hook_list
*edge_removal_hook_holder
;
162 static struct cgraph_2edge_hook_list
*edge_duplication_hook_holder
;
163 static struct cgraph_node_hook_list
*function_insertion_hook_holder
;
165 /* Description of a reference to an IPA constant. */
166 struct ipa_cst_ref_desc
168 /* Edge that corresponds to the statement which took the reference. */
169 struct cgraph_edge
*cs
;
170 /* Linked list of duplicates created when call graph edges are cloned. */
171 struct ipa_cst_ref_desc
*next_duplicate
;
172 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
173 if out of control. */
177 /* Allocation pool for reference descriptions. */
179 static alloc_pool ipa_refdesc_pool
;
181 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
182 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
185 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node
*node
)
187 tree fs_opts
= DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node
->decl
);
191 return !opt_for_fn (node
->decl
, optimize
) || !opt_for_fn (node
->decl
, flag_ipa_cp
);
194 /* Return index of the formal whose tree is PTREE in function which corresponds
198 ipa_get_param_decl_index_1 (vec
<ipa_param_descriptor
> descriptors
, tree ptree
)
202 count
= descriptors
.length ();
203 for (i
= 0; i
< count
; i
++)
204 if (descriptors
[i
].decl
== ptree
)
210 /* Return index of the formal whose tree is PTREE in function which corresponds
214 ipa_get_param_decl_index (struct ipa_node_params
*info
, tree ptree
)
216 return ipa_get_param_decl_index_1 (info
->descriptors
, ptree
);
219 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
223 ipa_populate_param_decls (struct cgraph_node
*node
,
224 vec
<ipa_param_descriptor
> &descriptors
)
232 gcc_assert (gimple_has_body_p (fndecl
));
233 fnargs
= DECL_ARGUMENTS (fndecl
);
235 for (parm
= fnargs
; parm
; parm
= DECL_CHAIN (parm
))
237 descriptors
[param_num
].decl
= parm
;
238 descriptors
[param_num
].move_cost
= estimate_move_cost (TREE_TYPE (parm
),
244 /* Return how many formal parameters FNDECL has. */
247 count_formal_params (tree fndecl
)
251 gcc_assert (gimple_has_body_p (fndecl
));
253 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
259 /* Return the declaration of Ith formal parameter of the function corresponding
260 to INFO. Note there is no setter function as this array is built just once
261 using ipa_initialize_node_params. */
264 ipa_dump_param (FILE *file
, struct ipa_node_params
*info
, int i
)
266 fprintf (file
, "param #%i", i
);
267 if (info
->descriptors
[i
].decl
)
270 print_generic_expr (file
, info
->descriptors
[i
].decl
, 0);
274 /* Initialize the ipa_node_params structure associated with NODE
275 to hold PARAM_COUNT parameters. */
278 ipa_alloc_node_params (struct cgraph_node
*node
, int param_count
)
280 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
282 if (!info
->descriptors
.exists () && param_count
)
283 info
->descriptors
.safe_grow_cleared (param_count
);
286 /* Initialize the ipa_node_params structure associated with NODE by counting
287 the function parameters, creating the descriptors and populating their
291 ipa_initialize_node_params (struct cgraph_node
*node
)
293 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
295 if (!info
->descriptors
.exists ())
297 ipa_alloc_node_params (node
, count_formal_params (node
->decl
));
298 ipa_populate_param_decls (node
, info
->descriptors
);
302 /* Print the jump functions associated with call graph edge CS to file F. */
305 ipa_print_node_jump_functions_for_edge (FILE *f
, struct cgraph_edge
*cs
)
309 count
= ipa_get_cs_argument_count (IPA_EDGE_REF (cs
));
310 for (i
= 0; i
< count
; i
++)
312 struct ipa_jump_func
*jump_func
;
313 enum jump_func_type type
;
315 jump_func
= ipa_get_ith_jump_func (IPA_EDGE_REF (cs
), i
);
316 type
= jump_func
->type
;
318 fprintf (f
, " param %d: ", i
);
319 if (type
== IPA_JF_UNKNOWN
)
320 fprintf (f
, "UNKNOWN\n");
321 else if (type
== IPA_JF_CONST
)
323 tree val
= jump_func
->value
.constant
.value
;
324 fprintf (f
, "CONST: ");
325 print_generic_expr (f
, val
, 0);
326 if (TREE_CODE (val
) == ADDR_EXPR
327 && TREE_CODE (TREE_OPERAND (val
, 0)) == CONST_DECL
)
330 print_generic_expr (f
, DECL_INITIAL (TREE_OPERAND (val
, 0)),
335 else if (type
== IPA_JF_PASS_THROUGH
)
337 fprintf (f
, "PASS THROUGH: ");
338 fprintf (f
, "%d, op %s",
339 jump_func
->value
.pass_through
.formal_id
,
340 get_tree_code_name(jump_func
->value
.pass_through
.operation
));
341 if (jump_func
->value
.pass_through
.operation
!= NOP_EXPR
)
344 print_generic_expr (f
,
345 jump_func
->value
.pass_through
.operand
, 0);
347 if (jump_func
->value
.pass_through
.agg_preserved
)
348 fprintf (f
, ", agg_preserved");
351 else if (type
== IPA_JF_ANCESTOR
)
353 fprintf (f
, "ANCESTOR: ");
354 fprintf (f
, "%d, offset " HOST_WIDE_INT_PRINT_DEC
,
355 jump_func
->value
.ancestor
.formal_id
,
356 jump_func
->value
.ancestor
.offset
);
357 if (jump_func
->value
.ancestor
.agg_preserved
)
358 fprintf (f
, ", agg_preserved");
362 if (jump_func
->agg
.items
)
364 struct ipa_agg_jf_item
*item
;
367 fprintf (f
, " Aggregate passed by %s:\n",
368 jump_func
->agg
.by_ref
? "reference" : "value");
369 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, j
, item
)
371 fprintf (f
, " offset: " HOST_WIDE_INT_PRINT_DEC
", ",
373 if (TYPE_P (item
->value
))
374 fprintf (f
, "clobber of " HOST_WIDE_INT_PRINT_DEC
" bits",
375 tree_to_uhwi (TYPE_SIZE (item
->value
)));
378 fprintf (f
, "cst: ");
379 print_generic_expr (f
, item
->value
, 0);
385 struct ipa_polymorphic_call_context
*ctx
386 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs
), i
);
387 if (ctx
&& !ctx
->useless_p ())
389 fprintf (f
, " Context: ");
390 ctx
->dump (dump_file
);
393 if (jump_func
->alignment
.known
)
395 fprintf (f
, " Alignment: %u, misalignment: %u\n",
396 jump_func
->alignment
.align
,
397 jump_func
->alignment
.misalign
);
400 fprintf (f
, " Unknown alignment\n");
405 /* Print the jump functions of all arguments on all call graph edges going from
409 ipa_print_node_jump_functions (FILE *f
, struct cgraph_node
*node
)
411 struct cgraph_edge
*cs
;
413 fprintf (f
, " Jump functions of caller %s/%i:\n", node
->name (),
415 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
417 if (!ipa_edge_args_info_available_for_edge_p (cs
))
420 fprintf (f
, " callsite %s/%i -> %s/%i : \n",
421 xstrdup_for_dump (node
->name ()), node
->order
,
422 xstrdup_for_dump (cs
->callee
->name ()),
424 ipa_print_node_jump_functions_for_edge (f
, cs
);
427 for (cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
429 struct cgraph_indirect_call_info
*ii
;
430 if (!ipa_edge_args_info_available_for_edge_p (cs
))
433 ii
= cs
->indirect_info
;
434 if (ii
->agg_contents
)
435 fprintf (f
, " indirect %s callsite, calling param %i, "
436 "offset " HOST_WIDE_INT_PRINT_DEC
", %s",
437 ii
->member_ptr
? "member ptr" : "aggregate",
438 ii
->param_index
, ii
->offset
,
439 ii
->by_ref
? "by reference" : "by_value");
441 fprintf (f
, " indirect %s callsite, calling param %i, "
442 "offset " HOST_WIDE_INT_PRINT_DEC
,
443 ii
->polymorphic
? "polymorphic" : "simple", ii
->param_index
,
448 fprintf (f
, ", for stmt ");
449 print_gimple_stmt (f
, cs
->call_stmt
, 0, TDF_SLIM
);
454 ii
->context
.dump (f
);
455 ipa_print_node_jump_functions_for_edge (f
, cs
);
459 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
462 ipa_print_all_jump_functions (FILE *f
)
464 struct cgraph_node
*node
;
466 fprintf (f
, "\nJump functions:\n");
467 FOR_EACH_FUNCTION (node
)
469 ipa_print_node_jump_functions (f
, node
);
473 /* Set jfunc to be a know-really nothing jump function. */
476 ipa_set_jf_unknown (struct ipa_jump_func
*jfunc
)
478 jfunc
->type
= IPA_JF_UNKNOWN
;
479 jfunc
->alignment
.known
= false;
482 /* Set JFUNC to be a copy of another jmp (to be used by jump function
483 combination code). The two functions will share their rdesc. */
486 ipa_set_jf_cst_copy (struct ipa_jump_func
*dst
,
487 struct ipa_jump_func
*src
)
490 gcc_checking_assert (src
->type
== IPA_JF_CONST
);
491 dst
->type
= IPA_JF_CONST
;
492 dst
->value
.constant
= src
->value
.constant
;
495 /* Set JFUNC to be a constant jmp function. */
498 ipa_set_jf_constant (struct ipa_jump_func
*jfunc
, tree constant
,
499 struct cgraph_edge
*cs
)
501 constant
= unshare_expr (constant
);
502 if (constant
&& EXPR_P (constant
))
503 SET_EXPR_LOCATION (constant
, UNKNOWN_LOCATION
);
504 jfunc
->type
= IPA_JF_CONST
;
505 jfunc
->value
.constant
.value
= unshare_expr_without_location (constant
);
507 if (TREE_CODE (constant
) == ADDR_EXPR
508 && TREE_CODE (TREE_OPERAND (constant
, 0)) == FUNCTION_DECL
)
510 struct ipa_cst_ref_desc
*rdesc
;
511 if (!ipa_refdesc_pool
)
512 ipa_refdesc_pool
= create_alloc_pool ("IPA-PROP ref descriptions",
513 sizeof (struct ipa_cst_ref_desc
), 32);
515 rdesc
= (struct ipa_cst_ref_desc
*) pool_alloc (ipa_refdesc_pool
);
517 rdesc
->next_duplicate
= NULL
;
519 jfunc
->value
.constant
.rdesc
= rdesc
;
522 jfunc
->value
.constant
.rdesc
= NULL
;
525 /* Set JFUNC to be a simple pass-through jump function. */
527 ipa_set_jf_simple_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
530 jfunc
->type
= IPA_JF_PASS_THROUGH
;
531 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
532 jfunc
->value
.pass_through
.formal_id
= formal_id
;
533 jfunc
->value
.pass_through
.operation
= NOP_EXPR
;
534 jfunc
->value
.pass_through
.agg_preserved
= agg_preserved
;
537 /* Set JFUNC to be an arithmetic pass through jump function. */
540 ipa_set_jf_arith_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
541 tree operand
, enum tree_code operation
)
543 jfunc
->type
= IPA_JF_PASS_THROUGH
;
544 jfunc
->value
.pass_through
.operand
= unshare_expr_without_location (operand
);
545 jfunc
->value
.pass_through
.formal_id
= formal_id
;
546 jfunc
->value
.pass_through
.operation
= operation
;
547 jfunc
->value
.pass_through
.agg_preserved
= false;
550 /* Set JFUNC to be an ancestor jump function. */
553 ipa_set_ancestor_jf (struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
,
554 int formal_id
, bool agg_preserved
)
556 jfunc
->type
= IPA_JF_ANCESTOR
;
557 jfunc
->value
.ancestor
.formal_id
= formal_id
;
558 jfunc
->value
.ancestor
.offset
= offset
;
559 jfunc
->value
.ancestor
.agg_preserved
= agg_preserved
;
562 /* Get IPA BB information about the given BB. FBI is the context of analyzis
563 of this function body. */
565 static struct ipa_bb_info
*
566 ipa_get_bb_info (struct func_body_info
*fbi
, basic_block bb
)
568 gcc_checking_assert (fbi
);
569 return &fbi
->bb_infos
[bb
->index
];
572 /* Structure to be passed in between detect_type_change and
573 check_stmt_for_type_change. */
575 struct prop_type_change_info
577 /* Offset into the object where there is the virtual method pointer we are
579 HOST_WIDE_INT offset
;
580 /* The declaration or SSA_NAME pointer of the base that we are checking for
583 /* Set to true if dynamic type change has been detected. */
584 bool type_maybe_changed
;
587 /* Return true if STMT can modify a virtual method table pointer.
589 This function makes special assumptions about both constructors and
590 destructors which are all the functions that are allowed to alter the VMT
591 pointers. It assumes that destructors begin with assignment into all VMT
592 pointers and that constructors essentially look in the following way:
594 1) The very first thing they do is that they call constructors of ancestor
595 sub-objects that have them.
597 2) Then VMT pointers of this and all its ancestors is set to new values
598 corresponding to the type corresponding to the constructor.
600 3) Only afterwards, other stuff such as constructor of member sub-objects
601 and the code written by the user is run. Only this may include calling
602 virtual functions, directly or indirectly.
604 There is no way to call a constructor of an ancestor sub-object in any
607 This means that we do not have to care whether constructors get the correct
608 type information because they will always change it (in fact, if we define
609 the type to be given by the VMT pointer, it is undefined).
611 The most important fact to derive from the above is that if, for some
612 statement in the section 3, we try to detect whether the dynamic type has
613 changed, we can safely ignore all calls as we examine the function body
614 backwards until we reach statements in section 2 because these calls cannot
615 be ancestor constructors or destructors (if the input is not bogus) and so
616 do not change the dynamic type (this holds true only for automatically
617 allocated objects but at the moment we devirtualize only these). We then
618 must detect that statements in section 2 change the dynamic type and can try
619 to derive the new type. That is enough and we can stop, we will never see
620 the calls into constructors of sub-objects in this code. Therefore we can
621 safely ignore all call statements that we traverse.
625 stmt_may_be_vtbl_ptr_store (gimple stmt
)
627 if (is_gimple_call (stmt
))
629 if (gimple_clobber_p (stmt
))
631 else if (is_gimple_assign (stmt
))
633 tree lhs
= gimple_assign_lhs (stmt
);
635 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs
)))
637 if (flag_strict_aliasing
638 && !POINTER_TYPE_P (TREE_TYPE (lhs
)))
641 if (TREE_CODE (lhs
) == COMPONENT_REF
642 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
644 /* In the future we might want to use get_base_ref_and_offset to find
645 if there is a field corresponding to the offset and if so, proceed
646 almost like if it was a component ref. */
652 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
653 to check whether a particular statement may modify the virtual table
654 pointerIt stores its result into DATA, which points to a
655 prop_type_change_info structure. */
658 check_stmt_for_type_change (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef
, void *data
)
660 gimple stmt
= SSA_NAME_DEF_STMT (vdef
);
661 struct prop_type_change_info
*tci
= (struct prop_type_change_info
*) data
;
663 if (stmt_may_be_vtbl_ptr_store (stmt
))
665 tci
->type_maybe_changed
= true;
672 /* See if ARG is PARAM_DECl describing instance passed by pointer
673 or reference in FUNCTION. Return false if the dynamic type may change
674 in between beggining of the function until CALL is invoked.
676 Generally functions are not allowed to change type of such instances,
677 but they call destructors. We assume that methods can not destroy the THIS
678 pointer. Also as a special cases, constructor and destructors may change
679 type of the THIS pointer. */
682 param_type_may_change_p (tree function
, tree arg
, gimple call
)
684 /* Pure functions can not do any changes on the dynamic type;
685 that require writting to memory. */
686 if (flags_from_decl_or_type (function
) & (ECF_PURE
| ECF_CONST
))
688 /* We need to check if we are within inlined consturctor
689 or destructor (ideally we would have way to check that the
690 inline cdtor is actually working on ARG, but we don't have
691 easy tie on this, so punt on all non-pure cdtors.
692 We may also record the types of cdtors and once we know type
693 of the instance match them.
695 Also code unification optimizations may merge calls from
696 different blocks making return values unreliable. So
697 do nothing during late optimization. */
698 if (DECL_STRUCT_FUNCTION (function
)->after_inlining
)
700 if (TREE_CODE (arg
) == SSA_NAME
701 && SSA_NAME_IS_DEFAULT_DEF (arg
)
702 && TREE_CODE (SSA_NAME_VAR (arg
)) == PARM_DECL
)
704 /* Normal (non-THIS) argument. */
705 if ((SSA_NAME_VAR (arg
) != DECL_ARGUMENTS (function
)
706 || TREE_CODE (TREE_TYPE (function
)) != METHOD_TYPE
)
707 /* THIS pointer of an method - here we we want to watch constructors
708 and destructors as those definitely may change the dynamic
710 || (TREE_CODE (TREE_TYPE (function
)) == METHOD_TYPE
711 && !DECL_CXX_CONSTRUCTOR_P (function
)
712 && !DECL_CXX_DESTRUCTOR_P (function
)
713 && (SSA_NAME_VAR (arg
) == DECL_ARGUMENTS (function
))))
715 /* Walk the inline stack and watch out for ctors/dtors. */
716 for (tree block
= gimple_block (call
); block
&& TREE_CODE (block
) == BLOCK
;
717 block
= BLOCK_SUPERCONTEXT (block
))
718 if (inlined_polymorphic_ctor_dtor_block_p (block
, false))
726 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
727 callsite CALL) by looking for assignments to its virtual table pointer. If
728 it is, return true and fill in the jump function JFUNC with relevant type
729 information or set it to unknown. ARG is the object itself (not a pointer
730 to it, unless dereferenced). BASE is the base of the memory access as
731 returned by get_ref_base_and_extent, as is the offset.
733 This is helper function for detect_type_change and detect_type_change_ssa
734 that does the heavy work which is usually unnecesary. */
737 detect_type_change_from_memory_writes (tree arg
, tree base
, tree comp_type
,
738 gcall
*call
, struct ipa_jump_func
*jfunc
,
739 HOST_WIDE_INT offset
)
741 struct prop_type_change_info tci
;
743 bool entry_reached
= false;
745 gcc_checking_assert (DECL_P (arg
)
746 || TREE_CODE (arg
) == MEM_REF
747 || handled_component_p (arg
));
749 comp_type
= TYPE_MAIN_VARIANT (comp_type
);
751 /* Const calls cannot call virtual methods through VMT and so type changes do
753 if (!flag_devirtualize
|| !gimple_vuse (call
)
754 /* Be sure expected_type is polymorphic. */
756 || TREE_CODE (comp_type
) != RECORD_TYPE
757 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))
758 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))))
761 ao_ref_init (&ao
, arg
);
764 ao
.size
= POINTER_SIZE
;
765 ao
.max_size
= ao
.size
;
768 tci
.object
= get_base_address (arg
);
769 tci
.type_maybe_changed
= false;
771 walk_aliased_vdefs (&ao
, gimple_vuse (call
), check_stmt_for_type_change
,
772 &tci
, NULL
, &entry_reached
);
773 if (!tci
.type_maybe_changed
)
776 ipa_set_jf_unknown (jfunc
);
780 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
781 If it is, return true and fill in the jump function JFUNC with relevant type
782 information or set it to unknown. ARG is the object itself (not a pointer
783 to it, unless dereferenced). BASE is the base of the memory access as
784 returned by get_ref_base_and_extent, as is the offset. */
787 detect_type_change (tree arg
, tree base
, tree comp_type
, gcall
*call
,
788 struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
)
790 if (!flag_devirtualize
)
793 if (TREE_CODE (base
) == MEM_REF
794 && !param_type_may_change_p (current_function_decl
,
795 TREE_OPERAND (base
, 0),
798 return detect_type_change_from_memory_writes (arg
, base
, comp_type
,
799 call
, jfunc
, offset
);
802 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
803 SSA name (its dereference will become the base and the offset is assumed to
807 detect_type_change_ssa (tree arg
, tree comp_type
,
808 gcall
*call
, struct ipa_jump_func
*jfunc
)
810 gcc_checking_assert (TREE_CODE (arg
) == SSA_NAME
);
811 if (!flag_devirtualize
812 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
815 if (!param_type_may_change_p (current_function_decl
, arg
, call
))
818 arg
= build2 (MEM_REF
, ptr_type_node
, arg
,
819 build_int_cst (ptr_type_node
, 0));
821 return detect_type_change_from_memory_writes (arg
, arg
, comp_type
,
825 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
826 boolean variable pointed to by DATA. */
829 mark_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
832 bool *b
= (bool *) data
;
837 /* Return true if we have already walked so many statements in AA that we
838 should really just start giving up. */
841 aa_overwalked (struct func_body_info
*fbi
)
843 gcc_checking_assert (fbi
);
844 return fbi
->aa_walked
> (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS
);
847 /* Find the nearest valid aa status for parameter specified by INDEX that
850 static struct param_aa_status
*
851 find_dominating_aa_status (struct func_body_info
*fbi
, basic_block bb
,
856 bb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
859 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
860 if (!bi
->param_aa_statuses
.is_empty ()
861 && bi
->param_aa_statuses
[index
].valid
)
862 return &bi
->param_aa_statuses
[index
];
866 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
867 structures and/or intialize the result with a dominating description as
870 static struct param_aa_status
*
871 parm_bb_aa_status_for_bb (struct func_body_info
*fbi
, basic_block bb
,
874 gcc_checking_assert (fbi
);
875 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
876 if (bi
->param_aa_statuses
.is_empty ())
877 bi
->param_aa_statuses
.safe_grow_cleared (fbi
->param_count
);
878 struct param_aa_status
*paa
= &bi
->param_aa_statuses
[index
];
881 gcc_checking_assert (!paa
->parm_modified
882 && !paa
->ref_modified
883 && !paa
->pt_modified
);
884 struct param_aa_status
*dom_paa
;
885 dom_paa
= find_dominating_aa_status (fbi
, bb
, index
);
895 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
896 a value known not to be modified in this function before reaching the
897 statement STMT. FBI holds information about the function we have so far
898 gathered but do not survive the summary building stage. */
901 parm_preserved_before_stmt_p (struct func_body_info
*fbi
, int index
,
902 gimple stmt
, tree parm_load
)
904 struct param_aa_status
*paa
;
905 bool modified
= false;
908 /* FIXME: FBI can be NULL if we are being called from outside
909 ipa_node_analysis or ipcp_transform_function, which currently happens
910 during inlining analysis. It would be great to extend fbi's lifetime and
911 always have it. Currently, we are just not afraid of too much walking in
915 if (aa_overwalked (fbi
))
917 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
918 if (paa
->parm_modified
)
924 gcc_checking_assert (gimple_vuse (stmt
) != NULL_TREE
);
925 ao_ref_init (&refd
, parm_load
);
926 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
929 fbi
->aa_walked
+= walked
;
931 paa
->parm_modified
= true;
935 /* If STMT is an assignment that loads a value from an parameter declaration,
936 return the index of the parameter in ipa_node_params which has not been
937 modified. Otherwise return -1. */
940 load_from_unmodified_param (struct func_body_info
*fbi
,
941 vec
<ipa_param_descriptor
> descriptors
,
947 if (!gimple_assign_single_p (stmt
))
950 op1
= gimple_assign_rhs1 (stmt
);
951 if (TREE_CODE (op1
) != PARM_DECL
)
954 index
= ipa_get_param_decl_index_1 (descriptors
, op1
);
956 || !parm_preserved_before_stmt_p (fbi
, index
, stmt
, op1
))
962 /* Return true if memory reference REF (which must be a load through parameter
963 with INDEX) loads data that are known to be unmodified in this function
964 before reaching statement STMT. */
967 parm_ref_data_preserved_p (struct func_body_info
*fbi
,
968 int index
, gimple stmt
, tree ref
)
970 struct param_aa_status
*paa
;
971 bool modified
= false;
974 /* FIXME: FBI can be NULL if we are being called from outside
975 ipa_node_analysis or ipcp_transform_function, which currently happens
976 during inlining analysis. It would be great to extend fbi's lifetime and
977 always have it. Currently, we are just not afraid of too much walking in
981 if (aa_overwalked (fbi
))
983 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
984 if (paa
->ref_modified
)
990 gcc_checking_assert (gimple_vuse (stmt
));
991 ao_ref_init (&refd
, ref
);
992 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
995 fbi
->aa_walked
+= walked
;
997 paa
->ref_modified
= true;
1001 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1002 is known to be unmodified in this function before reaching call statement
1003 CALL into which it is passed. FBI describes the function body. */
1006 parm_ref_data_pass_through_p (struct func_body_info
*fbi
, int index
,
1007 gimple call
, tree parm
)
1009 bool modified
= false;
1012 /* It's unnecessary to calculate anything about memory contnets for a const
1013 function because it is not goin to use it. But do not cache the result
1014 either. Also, no such calculations for non-pointers. */
1015 if (!gimple_vuse (call
)
1016 || !POINTER_TYPE_P (TREE_TYPE (parm
))
1017 || aa_overwalked (fbi
))
1020 struct param_aa_status
*paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (call
),
1022 if (paa
->pt_modified
)
1025 ao_ref_init_from_ptr_and_size (&refd
, parm
, NULL_TREE
);
1026 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (call
), mark_modified
,
1028 fbi
->aa_walked
+= walked
;
1030 paa
->pt_modified
= true;
1034 /* Return true if we can prove that OP is a memory reference loading unmodified
1035 data from an aggregate passed as a parameter and if the aggregate is passed
1036 by reference, that the alias type of the load corresponds to the type of the
1037 formal parameter (so that we can rely on this type for TBAA in callers).
1038 INFO and PARMS_AINFO describe parameters of the current function (but the
1039 latter can be NULL), STMT is the load statement. If function returns true,
1040 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1041 within the aggregate and whether it is a load from a value passed by
1042 reference respectively. */
1045 ipa_load_from_parm_agg_1 (struct func_body_info
*fbi
,
1046 vec
<ipa_param_descriptor
> descriptors
,
1047 gimple stmt
, tree op
, int *index_p
,
1048 HOST_WIDE_INT
*offset_p
, HOST_WIDE_INT
*size_p
,
1052 HOST_WIDE_INT size
, max_size
;
1053 tree base
= get_ref_base_and_extent (op
, offset_p
, &size
, &max_size
);
1055 if (max_size
== -1 || max_size
!= size
|| *offset_p
< 0)
1060 int index
= ipa_get_param_decl_index_1 (descriptors
, base
);
1062 && parm_preserved_before_stmt_p (fbi
, index
, stmt
, op
))
1073 if (TREE_CODE (base
) != MEM_REF
1074 || TREE_CODE (TREE_OPERAND (base
, 0)) != SSA_NAME
1075 || !integer_zerop (TREE_OPERAND (base
, 1)))
1078 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base
, 0)))
1080 tree parm
= SSA_NAME_VAR (TREE_OPERAND (base
, 0));
1081 index
= ipa_get_param_decl_index_1 (descriptors
, parm
);
1085 /* This branch catches situations where a pointer parameter is not a
1086 gimple register, for example:
1088 void hip7(S*) (struct S * p)
1090 void (*<T2e4>) (struct S *) D.1867;
1095 D.1867_2 = p.1_1->f;
1100 gimple def
= SSA_NAME_DEF_STMT (TREE_OPERAND (base
, 0));
1101 index
= load_from_unmodified_param (fbi
, descriptors
, def
);
1105 && parm_ref_data_preserved_p (fbi
, index
, stmt
, op
))
1116 /* Just like the previous function, just without the param_analysis_info
1117 pointer, for users outside of this file. */
1120 ipa_load_from_parm_agg (struct ipa_node_params
*info
, gimple stmt
,
1121 tree op
, int *index_p
, HOST_WIDE_INT
*offset_p
,
1124 return ipa_load_from_parm_agg_1 (NULL
, info
->descriptors
, stmt
, op
, index_p
,
1125 offset_p
, NULL
, by_ref_p
);
1128 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1129 of an assignment statement STMT, try to determine whether we are actually
1130 handling any of the following cases and construct an appropriate jump
1131 function into JFUNC if so:
1133 1) The passed value is loaded from a formal parameter which is not a gimple
1134 register (most probably because it is addressable, the value has to be
1135 scalar) and we can guarantee the value has not changed. This case can
1136 therefore be described by a simple pass-through jump function. For example:
1145 2) The passed value can be described by a simple arithmetic pass-through
1152 D.2064_4 = a.1(D) + 4;
1155 This case can also occur in combination of the previous one, e.g.:
1163 D.2064_4 = a.0_3 + 4;
1166 3) The passed value is an address of an object within another one (which
1167 also passed by reference). Such situations are described by an ancestor
1168 jump function and describe situations such as:
1170 B::foo() (struct B * const this)
1174 D.1845_2 = &this_1(D)->D.1748;
1177 INFO is the structure describing individual parameters access different
1178 stages of IPA optimizations. PARMS_AINFO contains the information that is
1179 only needed for intraprocedural analysis. */
1182 compute_complex_assign_jump_func (struct func_body_info
*fbi
,
1183 struct ipa_node_params
*info
,
1184 struct ipa_jump_func
*jfunc
,
1185 gcall
*call
, gimple stmt
, tree name
,
1188 HOST_WIDE_INT offset
, size
, max_size
;
1189 tree op1
, tc_ssa
, base
, ssa
;
1192 op1
= gimple_assign_rhs1 (stmt
);
1194 if (TREE_CODE (op1
) == SSA_NAME
)
1196 if (SSA_NAME_IS_DEFAULT_DEF (op1
))
1197 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (op1
));
1199 index
= load_from_unmodified_param (fbi
, info
->descriptors
,
1200 SSA_NAME_DEF_STMT (op1
));
1205 index
= load_from_unmodified_param (fbi
, info
->descriptors
, stmt
);
1206 tc_ssa
= gimple_assign_lhs (stmt
);
1211 tree op2
= gimple_assign_rhs2 (stmt
);
1215 if (!is_gimple_ip_invariant (op2
)
1216 || (TREE_CODE_CLASS (gimple_expr_code (stmt
)) != tcc_comparison
1217 && !useless_type_conversion_p (TREE_TYPE (name
),
1221 ipa_set_jf_arith_pass_through (jfunc
, index
, op2
,
1222 gimple_assign_rhs_code (stmt
));
1224 else if (gimple_assign_single_p (stmt
))
1226 bool agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
, tc_ssa
);
1227 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1232 if (TREE_CODE (op1
) != ADDR_EXPR
)
1234 op1
= TREE_OPERAND (op1
, 0);
1235 if (TREE_CODE (TREE_TYPE (op1
)) != RECORD_TYPE
)
1237 base
= get_ref_base_and_extent (op1
, &offset
, &size
, &max_size
);
1238 if (TREE_CODE (base
) != MEM_REF
1239 /* If this is a varying address, punt. */
1241 || max_size
!= size
)
1243 offset
+= mem_ref_offset (base
).to_short_addr () * BITS_PER_UNIT
;
1244 ssa
= TREE_OPERAND (base
, 0);
1245 if (TREE_CODE (ssa
) != SSA_NAME
1246 || !SSA_NAME_IS_DEFAULT_DEF (ssa
)
1250 /* Dynamic types are changed in constructors and destructors. */
1251 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (ssa
));
1252 if (index
>= 0 && param_type
&& POINTER_TYPE_P (param_type
))
1253 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1254 parm_ref_data_pass_through_p (fbi
, index
, call
, ssa
));
1257 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1260 iftmp.1_3 = &obj_2(D)->D.1762;
1262 The base of the MEM_REF must be a default definition SSA NAME of a
1263 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1264 whole MEM_REF expression is returned and the offset calculated from any
1265 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1266 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1269 get_ancestor_addr_info (gimple assign
, tree
*obj_p
, HOST_WIDE_INT
*offset
)
1271 HOST_WIDE_INT size
, max_size
;
1272 tree expr
, parm
, obj
;
1274 if (!gimple_assign_single_p (assign
))
1276 expr
= gimple_assign_rhs1 (assign
);
1278 if (TREE_CODE (expr
) != ADDR_EXPR
)
1280 expr
= TREE_OPERAND (expr
, 0);
1282 expr
= get_ref_base_and_extent (expr
, offset
, &size
, &max_size
);
1284 if (TREE_CODE (expr
) != MEM_REF
1285 /* If this is a varying address, punt. */
1290 parm
= TREE_OPERAND (expr
, 0);
1291 if (TREE_CODE (parm
) != SSA_NAME
1292 || !SSA_NAME_IS_DEFAULT_DEF (parm
)
1293 || TREE_CODE (SSA_NAME_VAR (parm
)) != PARM_DECL
)
1296 *offset
+= mem_ref_offset (expr
).to_short_addr () * BITS_PER_UNIT
;
1302 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1303 statement PHI, try to find out whether NAME is in fact a
1304 multiple-inheritance typecast from a descendant into an ancestor of a formal
1305 parameter and thus can be described by an ancestor jump function and if so,
1306 write the appropriate function into JFUNC.
1308 Essentially we want to match the following pattern:
1316 iftmp.1_3 = &obj_2(D)->D.1762;
1319 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1320 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1324 compute_complex_ancestor_jump_func (struct func_body_info
*fbi
,
1325 struct ipa_node_params
*info
,
1326 struct ipa_jump_func
*jfunc
,
1327 gcall
*call
, gphi
*phi
)
1329 HOST_WIDE_INT offset
;
1330 gimple assign
, cond
;
1331 basic_block phi_bb
, assign_bb
, cond_bb
;
1332 tree tmp
, parm
, expr
, obj
;
1335 if (gimple_phi_num_args (phi
) != 2)
1338 if (integer_zerop (PHI_ARG_DEF (phi
, 1)))
1339 tmp
= PHI_ARG_DEF (phi
, 0);
1340 else if (integer_zerop (PHI_ARG_DEF (phi
, 0)))
1341 tmp
= PHI_ARG_DEF (phi
, 1);
1344 if (TREE_CODE (tmp
) != SSA_NAME
1345 || SSA_NAME_IS_DEFAULT_DEF (tmp
)
1346 || !POINTER_TYPE_P (TREE_TYPE (tmp
))
1347 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp
))) != RECORD_TYPE
)
1350 assign
= SSA_NAME_DEF_STMT (tmp
);
1351 assign_bb
= gimple_bb (assign
);
1352 if (!single_pred_p (assign_bb
))
1354 expr
= get_ancestor_addr_info (assign
, &obj
, &offset
);
1357 parm
= TREE_OPERAND (expr
, 0);
1358 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (parm
));
1362 cond_bb
= single_pred (assign_bb
);
1363 cond
= last_stmt (cond_bb
);
1365 || gimple_code (cond
) != GIMPLE_COND
1366 || gimple_cond_code (cond
) != NE_EXPR
1367 || gimple_cond_lhs (cond
) != parm
1368 || !integer_zerop (gimple_cond_rhs (cond
)))
1371 phi_bb
= gimple_bb (phi
);
1372 for (i
= 0; i
< 2; i
++)
1374 basic_block pred
= EDGE_PRED (phi_bb
, i
)->src
;
1375 if (pred
!= assign_bb
&& pred
!= cond_bb
)
1379 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1380 parm_ref_data_pass_through_p (fbi
, index
, call
, parm
));
1383 /* Inspect the given TYPE and return true iff it has the same structure (the
1384 same number of fields of the same types) as a C++ member pointer. If
1385 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1386 corresponding fields there. */
1389 type_like_member_ptr_p (tree type
, tree
*method_ptr
, tree
*delta
)
1393 if (TREE_CODE (type
) != RECORD_TYPE
)
1396 fld
= TYPE_FIELDS (type
);
1397 if (!fld
|| !POINTER_TYPE_P (TREE_TYPE (fld
))
1398 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld
))) != METHOD_TYPE
1399 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1405 fld
= DECL_CHAIN (fld
);
1406 if (!fld
|| INTEGRAL_TYPE_P (fld
)
1407 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1412 if (DECL_CHAIN (fld
))
1418 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1419 return the rhs of its defining statement. Otherwise return RHS as it
1423 get_ssa_def_if_simple_copy (tree rhs
)
1425 while (TREE_CODE (rhs
) == SSA_NAME
&& !SSA_NAME_IS_DEFAULT_DEF (rhs
))
1427 gimple def_stmt
= SSA_NAME_DEF_STMT (rhs
);
1429 if (gimple_assign_single_p (def_stmt
))
1430 rhs
= gimple_assign_rhs1 (def_stmt
);
1437 /* Simple linked list, describing known contents of an aggregate beforere
1440 struct ipa_known_agg_contents_list
1442 /* Offset and size of the described part of the aggregate. */
1443 HOST_WIDE_INT offset
, size
;
1444 /* Known constant value or NULL if the contents is known to be unknown. */
1446 /* Pointer to the next structure in the list. */
1447 struct ipa_known_agg_contents_list
*next
;
1450 /* Find the proper place in linked list of ipa_known_agg_contents_list
1451 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1452 unless there is a partial overlap, in which case return NULL, or such
1453 element is already there, in which case set *ALREADY_THERE to true. */
1455 static struct ipa_known_agg_contents_list
**
1456 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list
**list
,
1457 HOST_WIDE_INT lhs_offset
,
1458 HOST_WIDE_INT lhs_size
,
1459 bool *already_there
)
1461 struct ipa_known_agg_contents_list
**p
= list
;
1462 while (*p
&& (*p
)->offset
< lhs_offset
)
1464 if ((*p
)->offset
+ (*p
)->size
> lhs_offset
)
1469 if (*p
&& (*p
)->offset
< lhs_offset
+ lhs_size
)
1471 if ((*p
)->offset
== lhs_offset
&& (*p
)->size
== lhs_size
)
1472 /* We already know this value is subsequently overwritten with
1474 *already_there
= true;
1476 /* Otherwise this is a partial overlap which we cannot
1483 /* Build aggregate jump function from LIST, assuming there are exactly
1484 CONST_COUNT constant entries there and that th offset of the passed argument
1485 is ARG_OFFSET and store it into JFUNC. */
1488 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list
*list
,
1489 int const_count
, HOST_WIDE_INT arg_offset
,
1490 struct ipa_jump_func
*jfunc
)
1492 vec_alloc (jfunc
->agg
.items
, const_count
);
1497 struct ipa_agg_jf_item item
;
1498 item
.offset
= list
->offset
- arg_offset
;
1499 gcc_assert ((item
.offset
% BITS_PER_UNIT
) == 0);
1500 item
.value
= unshare_expr_without_location (list
->constant
);
1501 jfunc
->agg
.items
->quick_push (item
);
1507 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1508 in ARG is filled in with constant values. ARG can either be an aggregate
1509 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1510 aggregate. JFUNC is the jump function into which the constants are
1511 subsequently stored. */
1514 determine_locally_known_aggregate_parts (gcall
*call
, tree arg
,
1516 struct ipa_jump_func
*jfunc
)
1518 struct ipa_known_agg_contents_list
*list
= NULL
;
1519 int item_count
= 0, const_count
= 0;
1520 HOST_WIDE_INT arg_offset
, arg_size
;
1521 gimple_stmt_iterator gsi
;
1523 bool check_ref
, by_ref
;
1526 /* The function operates in three stages. First, we prepare check_ref, r,
1527 arg_base and arg_offset based on what is actually passed as an actual
1530 if (POINTER_TYPE_P (arg_type
))
1533 if (TREE_CODE (arg
) == SSA_NAME
)
1536 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type
))))
1541 type_size
= TYPE_SIZE (TREE_TYPE (arg_type
));
1542 arg_size
= tree_to_uhwi (type_size
);
1543 ao_ref_init_from_ptr_and_size (&r
, arg_base
, NULL_TREE
);
1545 else if (TREE_CODE (arg
) == ADDR_EXPR
)
1547 HOST_WIDE_INT arg_max_size
;
1549 arg
= TREE_OPERAND (arg
, 0);
1550 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1552 if (arg_max_size
== -1
1553 || arg_max_size
!= arg_size
1556 if (DECL_P (arg_base
))
1559 ao_ref_init (&r
, arg_base
);
1569 HOST_WIDE_INT arg_max_size
;
1571 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg
)));
1575 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1577 if (arg_max_size
== -1
1578 || arg_max_size
!= arg_size
1582 ao_ref_init (&r
, arg
);
1585 /* Second stage walks back the BB, looks at individual statements and as long
1586 as it is confident of how the statements affect contents of the
1587 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1589 gsi
= gsi_for_stmt (call
);
1591 for (; !gsi_end_p (gsi
); gsi_prev (&gsi
))
1593 struct ipa_known_agg_contents_list
*n
, **p
;
1594 gimple stmt
= gsi_stmt (gsi
);
1595 HOST_WIDE_INT lhs_offset
, lhs_size
, lhs_max_size
;
1596 tree lhs
, rhs
, lhs_base
;
1598 if (!stmt_may_clobber_ref_p_1 (stmt
, &r
))
1600 if (!gimple_assign_single_p (stmt
))
1603 lhs
= gimple_assign_lhs (stmt
);
1604 rhs
= gimple_assign_rhs1 (stmt
);
1605 if (!is_gimple_reg_type (TREE_TYPE (rhs
))
1606 || TREE_CODE (lhs
) == BIT_FIELD_REF
1607 || contains_bitfld_component_ref_p (lhs
))
1610 lhs_base
= get_ref_base_and_extent (lhs
, &lhs_offset
, &lhs_size
,
1612 if (lhs_max_size
== -1
1613 || lhs_max_size
!= lhs_size
)
1618 if (TREE_CODE (lhs_base
) != MEM_REF
1619 || TREE_OPERAND (lhs_base
, 0) != arg_base
1620 || !integer_zerop (TREE_OPERAND (lhs_base
, 1)))
1623 else if (lhs_base
!= arg_base
)
1625 if (DECL_P (lhs_base
))
1631 bool already_there
= false;
1632 p
= get_place_in_agg_contents_list (&list
, lhs_offset
, lhs_size
,
1639 rhs
= get_ssa_def_if_simple_copy (rhs
);
1640 n
= XALLOCA (struct ipa_known_agg_contents_list
);
1642 n
->offset
= lhs_offset
;
1643 if (is_gimple_ip_invariant (rhs
))
1649 n
->constant
= NULL_TREE
;
1654 if (const_count
== PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
)
1655 || item_count
== 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
))
1659 /* Third stage just goes over the list and creates an appropriate vector of
1660 ipa_agg_jf_item structures out of it, of sourse only if there are
1661 any known constants to begin with. */
1665 jfunc
->agg
.by_ref
= by_ref
;
1666 build_agg_jump_func_from_list (list
, const_count
, arg_offset
, jfunc
);
1671 ipa_get_callee_param_type (struct cgraph_edge
*e
, int i
)
1674 tree type
= (e
->callee
1675 ? TREE_TYPE (e
->callee
->decl
)
1676 : gimple_call_fntype (e
->call_stmt
));
1677 tree t
= TYPE_ARG_TYPES (type
);
1679 for (n
= 0; n
< i
; n
++)
1686 return TREE_VALUE (t
);
1689 t
= DECL_ARGUMENTS (e
->callee
->decl
);
1690 for (n
= 0; n
< i
; n
++)
1697 return TREE_TYPE (t
);
1701 /* Compute jump function for all arguments of callsite CS and insert the
1702 information in the jump_functions array in the ipa_edge_args corresponding
1703 to this callsite. */
1706 ipa_compute_jump_functions_for_edge (struct func_body_info
*fbi
,
1707 struct cgraph_edge
*cs
)
1709 struct ipa_node_params
*info
= IPA_NODE_REF (cs
->caller
);
1710 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
1711 gcall
*call
= cs
->call_stmt
;
1712 int n
, arg_num
= gimple_call_num_args (call
);
1713 bool useful_context
= false;
1715 if (arg_num
== 0 || args
->jump_functions
)
1717 vec_safe_grow_cleared (args
->jump_functions
, arg_num
);
1718 if (flag_devirtualize
)
1719 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, arg_num
);
1721 if (gimple_call_internal_p (call
))
1723 if (ipa_func_spec_opts_forbid_analysis_p (cs
->caller
))
1726 for (n
= 0; n
< arg_num
; n
++)
1728 struct ipa_jump_func
*jfunc
= ipa_get_ith_jump_func (args
, n
);
1729 tree arg
= gimple_call_arg (call
, n
);
1730 tree param_type
= ipa_get_callee_param_type (cs
, n
);
1731 if (flag_devirtualize
&& POINTER_TYPE_P (TREE_TYPE (arg
)))
1734 struct ipa_polymorphic_call_context
context (cs
->caller
->decl
,
1737 context
.get_dynamic_type (instance
, arg
, NULL
, cs
->call_stmt
);
1738 *ipa_get_ith_polymorhic_call_context (args
, n
) = context
;
1739 if (!context
.useless_p ())
1740 useful_context
= true;
1743 if (POINTER_TYPE_P (TREE_TYPE(arg
)))
1745 unsigned HOST_WIDE_INT hwi_bitpos
;
1748 if (get_pointer_alignment_1 (arg
, &align
, &hwi_bitpos
)
1749 && align
% BITS_PER_UNIT
== 0
1750 && hwi_bitpos
% BITS_PER_UNIT
== 0)
1752 jfunc
->alignment
.known
= true;
1753 jfunc
->alignment
.align
= align
/ BITS_PER_UNIT
;
1754 jfunc
->alignment
.misalign
= hwi_bitpos
/ BITS_PER_UNIT
;
1757 gcc_assert (!jfunc
->alignment
.known
);
1760 gcc_assert (!jfunc
->alignment
.known
);
1762 if (is_gimple_ip_invariant (arg
))
1763 ipa_set_jf_constant (jfunc
, arg
, cs
);
1764 else if (!is_gimple_reg_type (TREE_TYPE (arg
))
1765 && TREE_CODE (arg
) == PARM_DECL
)
1767 int index
= ipa_get_param_decl_index (info
, arg
);
1769 gcc_assert (index
>=0);
1770 /* Aggregate passed by value, check for pass-through, otherwise we
1771 will attempt to fill in aggregate contents later in this
1773 if (parm_preserved_before_stmt_p (fbi
, index
, call
, arg
))
1775 ipa_set_jf_simple_pass_through (jfunc
, index
, false);
1779 else if (TREE_CODE (arg
) == SSA_NAME
)
1781 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
1783 int index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (arg
));
1787 agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
, arg
);
1788 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1793 gimple stmt
= SSA_NAME_DEF_STMT (arg
);
1794 if (is_gimple_assign (stmt
))
1795 compute_complex_assign_jump_func (fbi
, info
, jfunc
,
1796 call
, stmt
, arg
, param_type
);
1797 else if (gimple_code (stmt
) == GIMPLE_PHI
)
1798 compute_complex_ancestor_jump_func (fbi
, info
, jfunc
,
1800 as_a
<gphi
*> (stmt
));
1804 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1805 passed (because type conversions are ignored in gimple). Usually we can
1806 safely get type from function declaration, but in case of K&R prototypes or
1807 variadic functions we can try our luck with type of the pointer passed.
1808 TODO: Since we look for actual initialization of the memory object, we may better
1809 work out the type based on the memory stores we find. */
1811 param_type
= TREE_TYPE (arg
);
1813 if ((jfunc
->type
!= IPA_JF_PASS_THROUGH
1814 || !ipa_get_jf_pass_through_agg_preserved (jfunc
))
1815 && (jfunc
->type
!= IPA_JF_ANCESTOR
1816 || !ipa_get_jf_ancestor_agg_preserved (jfunc
))
1817 && (AGGREGATE_TYPE_P (TREE_TYPE (arg
))
1818 || POINTER_TYPE_P (param_type
)))
1819 determine_locally_known_aggregate_parts (call
, arg
, param_type
, jfunc
);
1821 if (!useful_context
)
1822 vec_free (args
->polymorphic_call_contexts
);
1825 /* Compute jump functions for all edges - both direct and indirect - outgoing
1829 ipa_compute_jump_functions_for_bb (struct func_body_info
*fbi
, basic_block bb
)
1831 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
1833 struct cgraph_edge
*cs
;
1835 FOR_EACH_VEC_ELT_REVERSE (bi
->cg_edges
, i
, cs
)
1837 struct cgraph_node
*callee
= cs
->callee
;
1841 callee
->ultimate_alias_target ();
1842 /* We do not need to bother analyzing calls to unknown functions
1843 unless they may become known during lto/whopr. */
1844 if (!callee
->definition
&& !flag_lto
)
1847 ipa_compute_jump_functions_for_edge (fbi
, cs
);
1851 /* If STMT looks like a statement loading a value from a member pointer formal
1852 parameter, return that parameter and store the offset of the field to
1853 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1854 might be clobbered). If USE_DELTA, then we look for a use of the delta
1855 field rather than the pfn. */
1858 ipa_get_stmt_member_ptr_load_param (gimple stmt
, bool use_delta
,
1859 HOST_WIDE_INT
*offset_p
)
1861 tree rhs
, rec
, ref_field
, ref_offset
, fld
, ptr_field
, delta_field
;
1863 if (!gimple_assign_single_p (stmt
))
1866 rhs
= gimple_assign_rhs1 (stmt
);
1867 if (TREE_CODE (rhs
) == COMPONENT_REF
)
1869 ref_field
= TREE_OPERAND (rhs
, 1);
1870 rhs
= TREE_OPERAND (rhs
, 0);
1873 ref_field
= NULL_TREE
;
1874 if (TREE_CODE (rhs
) != MEM_REF
)
1876 rec
= TREE_OPERAND (rhs
, 0);
1877 if (TREE_CODE (rec
) != ADDR_EXPR
)
1879 rec
= TREE_OPERAND (rec
, 0);
1880 if (TREE_CODE (rec
) != PARM_DECL
1881 || !type_like_member_ptr_p (TREE_TYPE (rec
), &ptr_field
, &delta_field
))
1883 ref_offset
= TREE_OPERAND (rhs
, 1);
1890 *offset_p
= int_bit_position (fld
);
1894 if (integer_nonzerop (ref_offset
))
1896 return ref_field
== fld
? rec
: NULL_TREE
;
1899 return tree_int_cst_equal (byte_position (fld
), ref_offset
) ? rec
1903 /* Returns true iff T is an SSA_NAME defined by a statement. */
1906 ipa_is_ssa_with_stmt_def (tree t
)
1908 if (TREE_CODE (t
) == SSA_NAME
1909 && !SSA_NAME_IS_DEFAULT_DEF (t
))
1915 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1916 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1917 indirect call graph edge. */
1919 static struct cgraph_edge
*
1920 ipa_note_param_call (struct cgraph_node
*node
, int param_index
,
1923 struct cgraph_edge
*cs
;
1925 cs
= node
->get_edge (stmt
);
1926 cs
->indirect_info
->param_index
= param_index
;
1927 cs
->indirect_info
->agg_contents
= 0;
1928 cs
->indirect_info
->member_ptr
= 0;
1932 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1933 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1934 intermediate information about each formal parameter. Currently it checks
1935 whether the call calls a pointer that is a formal parameter and if so, the
1936 parameter is marked with the called flag and an indirect call graph edge
1937 describing the call is created. This is very simple for ordinary pointers
1938 represented in SSA but not-so-nice when it comes to member pointers. The
1939 ugly part of this function does nothing more than trying to match the
1940 pattern of such a call. An example of such a pattern is the gimple dump
1941 below, the call is on the last line:
1944 f$__delta_5 = f.__delta;
1945 f$__pfn_24 = f.__pfn;
1949 f$__delta_5 = MEM[(struct *)&f];
1950 f$__pfn_24 = MEM[(struct *)&f + 4B];
1952 and a few lines below:
1955 D.2496_3 = (int) f$__pfn_24;
1956 D.2497_4 = D.2496_3 & 1;
1963 D.2500_7 = (unsigned int) f$__delta_5;
1964 D.2501_8 = &S + D.2500_7;
1965 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1966 D.2503_10 = *D.2502_9;
1967 D.2504_12 = f$__pfn_24 + -1;
1968 D.2505_13 = (unsigned int) D.2504_12;
1969 D.2506_14 = D.2503_10 + D.2505_13;
1970 D.2507_15 = *D.2506_14;
1971 iftmp.11_16 = (String:: *) D.2507_15;
1974 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1975 D.2500_19 = (unsigned int) f$__delta_5;
1976 D.2508_20 = &S + D.2500_19;
1977 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1979 Such patterns are results of simple calls to a member pointer:
1981 int doprinting (int (MyString::* f)(int) const)
1983 MyString S ("somestring");
1988 Moreover, the function also looks for called pointers loaded from aggregates
1989 passed by value or reference. */
1992 ipa_analyze_indirect_call_uses (struct func_body_info
*fbi
, gcall
*call
,
1995 struct ipa_node_params
*info
= fbi
->info
;
1996 HOST_WIDE_INT offset
;
1999 if (SSA_NAME_IS_DEFAULT_DEF (target
))
2001 tree var
= SSA_NAME_VAR (target
);
2002 int index
= ipa_get_param_decl_index (info
, var
);
2004 ipa_note_param_call (fbi
->node
, index
, call
);
2009 gimple def
= SSA_NAME_DEF_STMT (target
);
2010 if (gimple_assign_single_p (def
)
2011 && ipa_load_from_parm_agg_1 (fbi
, info
->descriptors
, def
,
2012 gimple_assign_rhs1 (def
), &index
, &offset
,
2015 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2016 cs
->indirect_info
->offset
= offset
;
2017 cs
->indirect_info
->agg_contents
= 1;
2018 cs
->indirect_info
->by_ref
= by_ref
;
2022 /* Now we need to try to match the complex pattern of calling a member
2024 if (gimple_code (def
) != GIMPLE_PHI
2025 || gimple_phi_num_args (def
) != 2
2026 || !POINTER_TYPE_P (TREE_TYPE (target
))
2027 || TREE_CODE (TREE_TYPE (TREE_TYPE (target
))) != METHOD_TYPE
)
2030 /* First, we need to check whether one of these is a load from a member
2031 pointer that is a parameter to this function. */
2032 tree n1
= PHI_ARG_DEF (def
, 0);
2033 tree n2
= PHI_ARG_DEF (def
, 1);
2034 if (!ipa_is_ssa_with_stmt_def (n1
) || !ipa_is_ssa_with_stmt_def (n2
))
2036 gimple d1
= SSA_NAME_DEF_STMT (n1
);
2037 gimple d2
= SSA_NAME_DEF_STMT (n2
);
2040 basic_block bb
, virt_bb
;
2041 basic_block join
= gimple_bb (def
);
2042 if ((rec
= ipa_get_stmt_member_ptr_load_param (d1
, false, &offset
)))
2044 if (ipa_get_stmt_member_ptr_load_param (d2
, false, NULL
))
2047 bb
= EDGE_PRED (join
, 0)->src
;
2048 virt_bb
= gimple_bb (d2
);
2050 else if ((rec
= ipa_get_stmt_member_ptr_load_param (d2
, false, &offset
)))
2052 bb
= EDGE_PRED (join
, 1)->src
;
2053 virt_bb
= gimple_bb (d1
);
2058 /* Second, we need to check that the basic blocks are laid out in the way
2059 corresponding to the pattern. */
2061 if (!single_pred_p (virt_bb
) || !single_succ_p (virt_bb
)
2062 || single_pred (virt_bb
) != bb
2063 || single_succ (virt_bb
) != join
)
2066 /* Third, let's see that the branching is done depending on the least
2067 significant bit of the pfn. */
2069 gimple branch
= last_stmt (bb
);
2070 if (!branch
|| gimple_code (branch
) != GIMPLE_COND
)
2073 if ((gimple_cond_code (branch
) != NE_EXPR
2074 && gimple_cond_code (branch
) != EQ_EXPR
)
2075 || !integer_zerop (gimple_cond_rhs (branch
)))
2078 tree cond
= gimple_cond_lhs (branch
);
2079 if (!ipa_is_ssa_with_stmt_def (cond
))
2082 def
= SSA_NAME_DEF_STMT (cond
);
2083 if (!is_gimple_assign (def
)
2084 || gimple_assign_rhs_code (def
) != BIT_AND_EXPR
2085 || !integer_onep (gimple_assign_rhs2 (def
)))
2088 cond
= gimple_assign_rhs1 (def
);
2089 if (!ipa_is_ssa_with_stmt_def (cond
))
2092 def
= SSA_NAME_DEF_STMT (cond
);
2094 if (is_gimple_assign (def
)
2095 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
2097 cond
= gimple_assign_rhs1 (def
);
2098 if (!ipa_is_ssa_with_stmt_def (cond
))
2100 def
= SSA_NAME_DEF_STMT (cond
);
2104 rec2
= ipa_get_stmt_member_ptr_load_param (def
,
2105 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2106 == ptrmemfunc_vbit_in_delta
),
2111 index
= ipa_get_param_decl_index (info
, rec
);
2113 && parm_preserved_before_stmt_p (fbi
, index
, call
, rec
))
2115 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2116 cs
->indirect_info
->offset
= offset
;
2117 cs
->indirect_info
->agg_contents
= 1;
2118 cs
->indirect_info
->member_ptr
= 1;
2124 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2125 object referenced in the expression is a formal parameter of the caller
2126 FBI->node (described by FBI->info), create a call note for the
2130 ipa_analyze_virtual_call_uses (struct func_body_info
*fbi
,
2131 gcall
*call
, tree target
)
2133 tree obj
= OBJ_TYPE_REF_OBJECT (target
);
2135 HOST_WIDE_INT anc_offset
;
2137 if (!flag_devirtualize
)
2140 if (TREE_CODE (obj
) != SSA_NAME
)
2143 struct ipa_node_params
*info
= fbi
->info
;
2144 if (SSA_NAME_IS_DEFAULT_DEF (obj
))
2146 struct ipa_jump_func jfunc
;
2147 if (TREE_CODE (SSA_NAME_VAR (obj
)) != PARM_DECL
)
2151 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (obj
));
2152 gcc_assert (index
>= 0);
2153 if (detect_type_change_ssa (obj
, obj_type_ref_class (target
),
2159 struct ipa_jump_func jfunc
;
2160 gimple stmt
= SSA_NAME_DEF_STMT (obj
);
2163 expr
= get_ancestor_addr_info (stmt
, &obj
, &anc_offset
);
2166 index
= ipa_get_param_decl_index (info
,
2167 SSA_NAME_VAR (TREE_OPERAND (expr
, 0)));
2168 gcc_assert (index
>= 0);
2169 if (detect_type_change (obj
, expr
, obj_type_ref_class (target
),
2170 call
, &jfunc
, anc_offset
))
2174 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2175 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
2176 ii
->offset
= anc_offset
;
2177 ii
->otr_token
= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target
));
2178 ii
->otr_type
= obj_type_ref_class (target
);
2179 ii
->polymorphic
= 1;
2182 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2183 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2184 containing intermediate information about each formal parameter. */
2187 ipa_analyze_call_uses (struct func_body_info
*fbi
, gcall
*call
)
2189 tree target
= gimple_call_fn (call
);
2192 || (TREE_CODE (target
) != SSA_NAME
2193 && !virtual_method_call_p (target
)))
2196 struct cgraph_edge
*cs
= fbi
->node
->get_edge (call
);
2197 /* If we previously turned the call into a direct call, there is
2198 no need to analyze. */
2199 if (cs
&& !cs
->indirect_unknown_callee
)
2202 if (cs
->indirect_info
->polymorphic
&& flag_devirtualize
)
2205 tree target
= gimple_call_fn (call
);
2206 ipa_polymorphic_call_context
context (current_function_decl
,
2207 target
, call
, &instance
);
2209 gcc_checking_assert (cs
->indirect_info
->otr_type
2210 == obj_type_ref_class (target
));
2211 gcc_checking_assert (cs
->indirect_info
->otr_token
2212 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target
)));
2214 cs
->indirect_info
->vptr_changed
2215 = !context
.get_dynamic_type (instance
,
2216 OBJ_TYPE_REF_OBJECT (target
),
2217 obj_type_ref_class (target
), call
);
2218 cs
->indirect_info
->context
= context
;
2221 if (TREE_CODE (target
) == SSA_NAME
)
2222 ipa_analyze_indirect_call_uses (fbi
, call
, target
);
2223 else if (virtual_method_call_p (target
))
2224 ipa_analyze_virtual_call_uses (fbi
, call
, target
);
2228 /* Analyze the call statement STMT with respect to formal parameters (described
2229 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2230 formal parameters are called. */
2233 ipa_analyze_stmt_uses (struct func_body_info
*fbi
, gimple stmt
)
2235 if (is_gimple_call (stmt
))
2236 ipa_analyze_call_uses (fbi
, as_a
<gcall
*> (stmt
));
2239 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2240 If OP is a parameter declaration, mark it as used in the info structure
2244 visit_ref_for_mod_analysis (gimple
, tree op
, tree
, void *data
)
2246 struct ipa_node_params
*info
= (struct ipa_node_params
*) data
;
2248 op
= get_base_address (op
);
2250 && TREE_CODE (op
) == PARM_DECL
)
2252 int index
= ipa_get_param_decl_index (info
, op
);
2253 gcc_assert (index
>= 0);
2254 ipa_set_param_used (info
, index
, true);
2260 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2261 the findings in various structures of the associated ipa_node_params
2262 structure, such as parameter flags, notes etc. FBI holds various data about
2263 the function being analyzed. */
2266 ipa_analyze_params_uses_in_bb (struct func_body_info
*fbi
, basic_block bb
)
2268 gimple_stmt_iterator gsi
;
2269 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2271 gimple stmt
= gsi_stmt (gsi
);
2273 if (is_gimple_debug (stmt
))
2276 ipa_analyze_stmt_uses (fbi
, stmt
);
2277 walk_stmt_load_store_addr_ops (stmt
, fbi
->info
,
2278 visit_ref_for_mod_analysis
,
2279 visit_ref_for_mod_analysis
,
2280 visit_ref_for_mod_analysis
);
2282 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2283 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), fbi
->info
,
2284 visit_ref_for_mod_analysis
,
2285 visit_ref_for_mod_analysis
,
2286 visit_ref_for_mod_analysis
);
2289 /* Calculate controlled uses of parameters of NODE. */
2292 ipa_analyze_controlled_uses (struct cgraph_node
*node
)
2294 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
2296 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2298 tree parm
= ipa_get_param (info
, i
);
2299 int controlled_uses
= 0;
2301 /* For SSA regs see if parameter is used. For non-SSA we compute
2302 the flag during modification analysis. */
2303 if (is_gimple_reg (parm
))
2305 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
),
2307 if (ddef
&& !has_zero_uses (ddef
))
2309 imm_use_iterator imm_iter
;
2310 use_operand_p use_p
;
2312 ipa_set_param_used (info
, i
, true);
2313 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, ddef
)
2314 if (!is_gimple_call (USE_STMT (use_p
)))
2316 if (!is_gimple_debug (USE_STMT (use_p
)))
2318 controlled_uses
= IPA_UNDESCRIBED_USE
;
2326 controlled_uses
= 0;
2329 controlled_uses
= IPA_UNDESCRIBED_USE
;
2330 ipa_set_controlled_uses (info
, i
, controlled_uses
);
2334 /* Free stuff in BI. */
2337 free_ipa_bb_info (struct ipa_bb_info
*bi
)
2339 bi
->cg_edges
.release ();
2340 bi
->param_aa_statuses
.release ();
2343 /* Dominator walker driving the analysis. */
2345 class analysis_dom_walker
: public dom_walker
2348 analysis_dom_walker (struct func_body_info
*fbi
)
2349 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
) {}
2351 virtual void before_dom_children (basic_block
);
2354 struct func_body_info
*m_fbi
;
2358 analysis_dom_walker::before_dom_children (basic_block bb
)
2360 ipa_analyze_params_uses_in_bb (m_fbi
, bb
);
2361 ipa_compute_jump_functions_for_bb (m_fbi
, bb
);
2364 /* Initialize the array describing properties of of formal parameters
2365 of NODE, analyze their uses and compute jump functions associated
2366 with actual arguments of calls from within NODE. */
2369 ipa_analyze_node (struct cgraph_node
*node
)
2371 struct func_body_info fbi
;
2372 struct ipa_node_params
*info
;
2374 ipa_check_create_node_params ();
2375 ipa_check_create_edge_args ();
2376 info
= IPA_NODE_REF (node
);
2378 if (info
->analysis_done
)
2380 info
->analysis_done
= 1;
2382 if (ipa_func_spec_opts_forbid_analysis_p (node
))
2384 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2386 ipa_set_param_used (info
, i
, true);
2387 ipa_set_controlled_uses (info
, i
, IPA_UNDESCRIBED_USE
);
2392 struct function
*func
= DECL_STRUCT_FUNCTION (node
->decl
);
2394 calculate_dominance_info (CDI_DOMINATORS
);
2395 ipa_initialize_node_params (node
);
2396 ipa_analyze_controlled_uses (node
);
2399 fbi
.info
= IPA_NODE_REF (node
);
2400 fbi
.bb_infos
= vNULL
;
2401 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
));
2402 fbi
.param_count
= ipa_get_param_count (info
);
2405 for (struct cgraph_edge
*cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
2407 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
2408 bi
->cg_edges
.safe_push (cs
);
2411 for (struct cgraph_edge
*cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
2413 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
2414 bi
->cg_edges
.safe_push (cs
);
2417 analysis_dom_walker (&fbi
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
2420 struct ipa_bb_info
*bi
;
2421 FOR_EACH_VEC_ELT (fbi
.bb_infos
, i
, bi
)
2422 free_ipa_bb_info (bi
);
2423 fbi
.bb_infos
.release ();
2424 free_dominance_info (CDI_DOMINATORS
);
2428 /* Update the jump functions associated with call graph edge E when the call
2429 graph edge CS is being inlined, assuming that E->caller is already (possibly
2430 indirectly) inlined into CS->callee and that E has not been inlined. */
2433 update_jump_functions_after_inlining (struct cgraph_edge
*cs
,
2434 struct cgraph_edge
*e
)
2436 struct ipa_edge_args
*top
= IPA_EDGE_REF (cs
);
2437 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
2438 int count
= ipa_get_cs_argument_count (args
);
2441 for (i
= 0; i
< count
; i
++)
2443 struct ipa_jump_func
*dst
= ipa_get_ith_jump_func (args
, i
);
2444 struct ipa_polymorphic_call_context
*dst_ctx
2445 = ipa_get_ith_polymorhic_call_context (args
, i
);
2447 if (dst
->type
== IPA_JF_ANCESTOR
)
2449 struct ipa_jump_func
*src
;
2450 int dst_fid
= dst
->value
.ancestor
.formal_id
;
2451 struct ipa_polymorphic_call_context
*src_ctx
2452 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
2454 /* Variable number of arguments can cause havoc if we try to access
2455 one that does not exist in the inlined edge. So make sure we
2457 if (dst_fid
>= ipa_get_cs_argument_count (top
))
2459 ipa_set_jf_unknown (dst
);
2463 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2465 if (src_ctx
&& !src_ctx
->useless_p ())
2467 struct ipa_polymorphic_call_context ctx
= *src_ctx
;
2469 /* TODO: Make type preserved safe WRT contexts. */
2470 if (!ipa_get_jf_ancestor_type_preserved (dst
))
2471 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
2472 ctx
.offset_by (dst
->value
.ancestor
.offset
);
2473 if (!ctx
.useless_p ())
2475 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
2477 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
2479 dst_ctx
->combine_with (ctx
);
2483 && (dst
->value
.ancestor
.agg_preserved
|| !src
->agg
.by_ref
))
2485 struct ipa_agg_jf_item
*item
;
2488 /* Currently we do not produce clobber aggregate jump functions,
2489 replace with merging when we do. */
2490 gcc_assert (!dst
->agg
.items
);
2492 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2493 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2494 FOR_EACH_VEC_SAFE_ELT (dst
->agg
.items
, j
, item
)
2495 item
->offset
-= dst
->value
.ancestor
.offset
;
2498 if (src
->type
== IPA_JF_PASS_THROUGH
2499 && src
->value
.pass_through
.operation
== NOP_EXPR
)
2501 dst
->value
.ancestor
.formal_id
= src
->value
.pass_through
.formal_id
;
2502 dst
->value
.ancestor
.agg_preserved
&=
2503 src
->value
.pass_through
.agg_preserved
;
2505 else if (src
->type
== IPA_JF_ANCESTOR
)
2507 dst
->value
.ancestor
.formal_id
= src
->value
.ancestor
.formal_id
;
2508 dst
->value
.ancestor
.offset
+= src
->value
.ancestor
.offset
;
2509 dst
->value
.ancestor
.agg_preserved
&=
2510 src
->value
.ancestor
.agg_preserved
;
2513 ipa_set_jf_unknown (dst
);
2515 else if (dst
->type
== IPA_JF_PASS_THROUGH
)
2517 struct ipa_jump_func
*src
;
2518 /* We must check range due to calls with variable number of arguments
2519 and we cannot combine jump functions with operations. */
2520 if (dst
->value
.pass_through
.operation
== NOP_EXPR
2521 && (dst
->value
.pass_through
.formal_id
2522 < ipa_get_cs_argument_count (top
)))
2524 int dst_fid
= dst
->value
.pass_through
.formal_id
;
2525 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2526 bool dst_agg_p
= ipa_get_jf_pass_through_agg_preserved (dst
);
2527 struct ipa_polymorphic_call_context
*src_ctx
2528 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
2530 if (src_ctx
&& !src_ctx
->useless_p ())
2532 struct ipa_polymorphic_call_context ctx
= *src_ctx
;
2534 /* TODO: Make type preserved safe WRT contexts. */
2535 if (!ipa_get_jf_pass_through_type_preserved (dst
))
2536 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
2537 if (!ctx
.useless_p ())
2541 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
2543 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
2545 dst_ctx
->combine_with (ctx
);
2550 case IPA_JF_UNKNOWN
:
2551 ipa_set_jf_unknown (dst
);
2554 ipa_set_jf_cst_copy (dst
, src
);
2557 case IPA_JF_PASS_THROUGH
:
2559 int formal_id
= ipa_get_jf_pass_through_formal_id (src
);
2560 enum tree_code operation
;
2561 operation
= ipa_get_jf_pass_through_operation (src
);
2563 if (operation
== NOP_EXPR
)
2567 && ipa_get_jf_pass_through_agg_preserved (src
);
2568 ipa_set_jf_simple_pass_through (dst
, formal_id
, agg_p
);
2572 tree operand
= ipa_get_jf_pass_through_operand (src
);
2573 ipa_set_jf_arith_pass_through (dst
, formal_id
, operand
,
2578 case IPA_JF_ANCESTOR
:
2582 && ipa_get_jf_ancestor_agg_preserved (src
);
2583 ipa_set_ancestor_jf (dst
,
2584 ipa_get_jf_ancestor_offset (src
),
2585 ipa_get_jf_ancestor_formal_id (src
),
2594 && (dst_agg_p
|| !src
->agg
.by_ref
))
2596 /* Currently we do not produce clobber aggregate jump
2597 functions, replace with merging when we do. */
2598 gcc_assert (!dst
->agg
.items
);
2600 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2601 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2605 ipa_set_jf_unknown (dst
);
2610 /* If TARGET is an addr_expr of a function declaration, make it the
2611 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2612 Otherwise, return NULL. */
2614 struct cgraph_edge
*
2615 ipa_make_edge_direct_to_target (struct cgraph_edge
*ie
, tree target
,
2618 struct cgraph_node
*callee
;
2619 struct inline_edge_summary
*es
= inline_edge_summary (ie
);
2620 bool unreachable
= false;
2622 if (TREE_CODE (target
) == ADDR_EXPR
)
2623 target
= TREE_OPERAND (target
, 0);
2624 if (TREE_CODE (target
) != FUNCTION_DECL
)
2626 target
= canonicalize_constructor_val (target
, NULL
);
2627 if (!target
|| TREE_CODE (target
) != FUNCTION_DECL
)
2629 /* Member pointer call that goes through a VMT lookup. */
2630 if (ie
->indirect_info
->member_ptr
2631 /* Or if target is not an invariant expression and we do not
2632 know if it will evaulate to function at runtime.
2633 This can happen when folding through &VAR, where &VAR
2634 is IP invariant, but VAR itself is not.
2636 TODO: Revisit this when GCC 5 is branched. It seems that
2637 member_ptr check is not needed and that we may try to fold
2638 the expression and see if VAR is readonly. */
2639 || !is_gimple_ip_invariant (target
))
2641 if (dump_enabled_p ())
2643 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2644 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2645 "discovered direct call non-invariant "
2647 ie
->caller
->name (), ie
->caller
->order
);
2653 if (dump_enabled_p ())
2655 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2656 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2657 "discovered direct call to non-function in %s/%i, "
2658 "making it __builtin_unreachable\n",
2659 ie
->caller
->name (), ie
->caller
->order
);
2662 target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2663 callee
= cgraph_node::get_create (target
);
2667 callee
= cgraph_node::get (target
);
2670 callee
= cgraph_node::get (target
);
2672 /* Because may-edges are not explicitely represented and vtable may be external,
2673 we may create the first reference to the object in the unit. */
2674 if (!callee
|| callee
->global
.inlined_to
)
2677 /* We are better to ensure we can refer to it.
2678 In the case of static functions we are out of luck, since we already
2679 removed its body. In the case of public functions we may or may
2680 not introduce the reference. */
2681 if (!canonicalize_constructor_val (target
, NULL
)
2682 || !TREE_PUBLIC (target
))
2685 fprintf (dump_file
, "ipa-prop: Discovered call to a known target "
2686 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2687 xstrdup_for_dump (ie
->caller
->name ()),
2689 xstrdup_for_dump (ie
->callee
->name ()),
2693 callee
= cgraph_node::get_create (target
);
2696 /* If the edge is already speculated. */
2697 if (speculative
&& ie
->speculative
)
2699 struct cgraph_edge
*e2
;
2700 struct ipa_ref
*ref
;
2701 ie
->speculative_call_info (e2
, ie
, ref
);
2702 if (e2
->callee
->ultimate_alias_target ()
2703 != callee
->ultimate_alias_target ())
2706 fprintf (dump_file
, "ipa-prop: Discovered call to a speculative target "
2707 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2708 xstrdup_for_dump (ie
->caller
->name ()),
2710 xstrdup_for_dump (callee
->name ()),
2712 xstrdup_for_dump (e2
->callee
->name ()),
2718 fprintf (dump_file
, "ipa-prop: Discovered call to a speculative target "
2719 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2720 xstrdup_for_dump (ie
->caller
->name ()),
2722 xstrdup_for_dump (callee
->name ()),
2728 if (!dbg_cnt (devirt
))
2731 ipa_check_create_node_params ();
2733 /* We can not make edges to inline clones. It is bug that someone removed
2734 the cgraph node too early. */
2735 gcc_assert (!callee
->global
.inlined_to
);
2737 if (dump_file
&& !unreachable
)
2739 fprintf (dump_file
, "ipa-prop: Discovered %s call to a %s target "
2740 "(%s/%i -> %s/%i), for stmt ",
2741 ie
->indirect_info
->polymorphic
? "a virtual" : "an indirect",
2742 speculative
? "speculative" : "known",
2743 xstrdup_for_dump (ie
->caller
->name ()),
2745 xstrdup_for_dump (callee
->name ()),
2748 print_gimple_stmt (dump_file
, ie
->call_stmt
, 2, TDF_SLIM
);
2750 fprintf (dump_file
, "with uid %i\n", ie
->lto_stmt_uid
);
2752 if (dump_enabled_p ())
2754 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2756 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2757 "converting indirect call in %s to direct call to %s\n",
2758 ie
->caller
->name (), callee
->name ());
2762 struct cgraph_edge
*orig
= ie
;
2763 ie
= ie
->make_direct (callee
);
2764 /* If we resolved speculative edge the cost is already up to date
2765 for direct call (adjusted by inline_edge_duplication_hook). */
2768 es
= inline_edge_summary (ie
);
2769 es
->call_stmt_size
-= (eni_size_weights
.indirect_call_cost
2770 - eni_size_weights
.call_cost
);
2771 es
->call_stmt_time
-= (eni_time_weights
.indirect_call_cost
2772 - eni_time_weights
.call_cost
);
2777 if (!callee
->can_be_discarded_p ())
2780 alias
= dyn_cast
<cgraph_node
*> (callee
->noninterposable_alias ());
2784 /* make_speculative will update ie's cost to direct call cost. */
2785 ie
= ie
->make_speculative
2786 (callee
, ie
->count
* 8 / 10, ie
->frequency
* 8 / 10);
2792 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2793 return NULL if there is not any. BY_REF specifies whether the value has to
2794 be passed by reference or by value. */
2797 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function
*agg
,
2798 HOST_WIDE_INT offset
, bool by_ref
)
2800 struct ipa_agg_jf_item
*item
;
2803 if (by_ref
!= agg
->by_ref
)
2806 FOR_EACH_VEC_SAFE_ELT (agg
->items
, i
, item
)
2807 if (item
->offset
== offset
)
2809 /* Currently we do not have clobber values, return NULL for them once
2811 gcc_checking_assert (is_gimple_ip_invariant (item
->value
));
2817 /* Remove a reference to SYMBOL from the list of references of a node given by
2818 reference description RDESC. Return true if the reference has been
2819 successfully found and removed. */
2822 remove_described_reference (symtab_node
*symbol
, struct ipa_cst_ref_desc
*rdesc
)
2824 struct ipa_ref
*to_del
;
2825 struct cgraph_edge
*origin
;
2830 to_del
= origin
->caller
->find_reference (symbol
, origin
->call_stmt
,
2831 origin
->lto_stmt_uid
);
2835 to_del
->remove_reference ();
2837 fprintf (dump_file
, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2838 xstrdup_for_dump (origin
->caller
->name ()),
2839 origin
->caller
->order
, xstrdup_for_dump (symbol
->name ()));
2843 /* If JFUNC has a reference description with refcount different from
2844 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2845 NULL. JFUNC must be a constant jump function. */
2847 static struct ipa_cst_ref_desc
*
2848 jfunc_rdesc_usable (struct ipa_jump_func
*jfunc
)
2850 struct ipa_cst_ref_desc
*rdesc
= ipa_get_jf_constant_rdesc (jfunc
);
2851 if (rdesc
&& rdesc
->refcount
!= IPA_UNDESCRIBED_USE
)
2857 /* If the value of constant jump function JFUNC is an address of a function
2858 declaration, return the associated call graph node. Otherwise return
2861 static cgraph_node
*
2862 cgraph_node_for_jfunc (struct ipa_jump_func
*jfunc
)
2864 gcc_checking_assert (jfunc
->type
== IPA_JF_CONST
);
2865 tree cst
= ipa_get_jf_constant (jfunc
);
2866 if (TREE_CODE (cst
) != ADDR_EXPR
2867 || TREE_CODE (TREE_OPERAND (cst
, 0)) != FUNCTION_DECL
)
2870 return cgraph_node::get (TREE_OPERAND (cst
, 0));
2874 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2875 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2876 the edge specified in the rdesc. Return false if either the symbol or the
2877 reference could not be found, otherwise return true. */
2880 try_decrement_rdesc_refcount (struct ipa_jump_func
*jfunc
)
2882 struct ipa_cst_ref_desc
*rdesc
;
2883 if (jfunc
->type
== IPA_JF_CONST
2884 && (rdesc
= jfunc_rdesc_usable (jfunc
))
2885 && --rdesc
->refcount
== 0)
2887 symtab_node
*symbol
= cgraph_node_for_jfunc (jfunc
);
2891 return remove_described_reference (symbol
, rdesc
);
2896 /* Try to find a destination for indirect edge IE that corresponds to a simple
2897 call or a call of a member function pointer and where the destination is a
2898 pointer formal parameter described by jump function JFUNC. If it can be
2899 determined, return the newly direct edge, otherwise return NULL.
2900 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2902 static struct cgraph_edge
*
2903 try_make_edge_direct_simple_call (struct cgraph_edge
*ie
,
2904 struct ipa_jump_func
*jfunc
,
2905 struct ipa_node_params
*new_root_info
)
2907 struct cgraph_edge
*cs
;
2909 bool agg_contents
= ie
->indirect_info
->agg_contents
;
2911 if (ie
->indirect_info
->agg_contents
)
2912 target
= ipa_find_agg_cst_for_param (&jfunc
->agg
,
2913 ie
->indirect_info
->offset
,
2914 ie
->indirect_info
->by_ref
);
2916 target
= ipa_value_from_jfunc (new_root_info
, jfunc
);
2919 cs
= ipa_make_edge_direct_to_target (ie
, target
);
2921 if (cs
&& !agg_contents
)
2924 gcc_checking_assert (cs
->callee
2926 || jfunc
->type
!= IPA_JF_CONST
2927 || !cgraph_node_for_jfunc (jfunc
)
2928 || cs
->callee
== cgraph_node_for_jfunc (jfunc
)));
2929 ok
= try_decrement_rdesc_refcount (jfunc
);
2930 gcc_checking_assert (ok
);
2936 /* Return the target to be used in cases of impossible devirtualization. IE
2937 and target (the latter can be NULL) are dumped when dumping is enabled. */
2940 ipa_impossible_devirt_target (struct cgraph_edge
*ie
, tree target
)
2946 "Type inconsistent devirtualization: %s/%i->%s\n",
2947 ie
->caller
->name (), ie
->caller
->order
,
2948 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target
)));
2951 "No devirtualization target in %s/%i\n",
2952 ie
->caller
->name (), ie
->caller
->order
);
2954 tree new_target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2955 cgraph_node::get_create (new_target
);
2959 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2960 call based on a formal parameter which is described by jump function JFUNC
2961 and if it can be determined, make it direct and return the direct edge.
2962 Otherwise, return NULL. CTX describes the polymorphic context that the
2963 parameter the call is based on brings along with it. */
2965 static struct cgraph_edge
*
2966 try_make_edge_direct_virtual_call (struct cgraph_edge
*ie
,
2967 struct ipa_jump_func
*jfunc
,
2968 struct ipa_polymorphic_call_context ctx
)
2971 bool speculative
= false;
2973 if (!opt_for_fn (ie
->caller
->decl
, flag_devirtualize
))
2976 gcc_assert (!ie
->indirect_info
->by_ref
);
2978 /* Try to do lookup via known virtual table pointer value. */
2979 if (!ie
->indirect_info
->vptr_changed
2980 || opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
))
2983 unsigned HOST_WIDE_INT offset
;
2984 tree t
= ipa_find_agg_cst_for_param (&jfunc
->agg
,
2985 ie
->indirect_info
->offset
,
2987 if (t
&& vtable_pointer_value_to_vtable (t
, &vtable
, &offset
))
2989 t
= gimple_get_virt_method_for_vtable (ie
->indirect_info
->otr_token
,
2993 if ((TREE_CODE (TREE_TYPE (t
)) == FUNCTION_TYPE
2994 && DECL_FUNCTION_CODE (t
) == BUILT_IN_UNREACHABLE
)
2995 || !possible_polymorphic_call_target_p
2996 (ie
, cgraph_node::get (t
)))
2998 /* Do not speculate builtin_unreachable, it is stupid! */
2999 if (!ie
->indirect_info
->vptr_changed
)
3000 target
= ipa_impossible_devirt_target (ie
, target
);
3005 speculative
= ie
->indirect_info
->vptr_changed
;
3011 ipa_polymorphic_call_context
ie_context (ie
);
3012 vec
<cgraph_node
*>targets
;
3015 ctx
.offset_by (ie
->indirect_info
->offset
);
3016 if (ie
->indirect_info
->vptr_changed
)
3017 ctx
.possible_dynamic_type_change (ie
->in_polymorphic_cdtor
,
3018 ie
->indirect_info
->otr_type
);
3019 ctx
.combine_with (ie_context
, ie
->indirect_info
->otr_type
);
3020 targets
= possible_polymorphic_call_targets
3021 (ie
->indirect_info
->otr_type
,
3022 ie
->indirect_info
->otr_token
,
3024 if (final
&& targets
.length () <= 1)
3026 speculative
= false;
3027 if (targets
.length () == 1)
3028 target
= targets
[0]->decl
;
3030 target
= ipa_impossible_devirt_target (ie
, NULL_TREE
);
3032 else if (!target
&& opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
)
3033 && !ie
->speculative
&& ie
->maybe_hot_p ())
3036 n
= try_speculative_devirtualization (ie
->indirect_info
->otr_type
,
3037 ie
->indirect_info
->otr_token
,
3038 ie
->indirect_info
->context
);
3048 if (!possible_polymorphic_call_target_p
3049 (ie
, cgraph_node::get_create (target
)))
3053 target
= ipa_impossible_devirt_target (ie
, target
);
3055 return ipa_make_edge_direct_to_target (ie
, target
, speculative
);
3061 /* Update the param called notes associated with NODE when CS is being inlined,
3062 assuming NODE is (potentially indirectly) inlined into CS->callee.
3063 Moreover, if the callee is discovered to be constant, create a new cgraph
3064 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3065 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3068 update_indirect_edges_after_inlining (struct cgraph_edge
*cs
,
3069 struct cgraph_node
*node
,
3070 vec
<cgraph_edge
*> *new_edges
)
3072 struct ipa_edge_args
*top
;
3073 struct cgraph_edge
*ie
, *next_ie
, *new_direct_edge
;
3074 struct ipa_node_params
*new_root_info
;
3077 ipa_check_create_edge_args ();
3078 top
= IPA_EDGE_REF (cs
);
3079 new_root_info
= IPA_NODE_REF (cs
->caller
->global
.inlined_to
3080 ? cs
->caller
->global
.inlined_to
3083 for (ie
= node
->indirect_calls
; ie
; ie
= next_ie
)
3085 struct cgraph_indirect_call_info
*ici
= ie
->indirect_info
;
3086 struct ipa_jump_func
*jfunc
;
3088 cgraph_node
*spec_target
= NULL
;
3090 next_ie
= ie
->next_callee
;
3092 if (ici
->param_index
== -1)
3095 /* We must check range due to calls with variable number of arguments: */
3096 if (ici
->param_index
>= ipa_get_cs_argument_count (top
))
3098 ici
->param_index
= -1;
3102 param_index
= ici
->param_index
;
3103 jfunc
= ipa_get_ith_jump_func (top
, param_index
);
3105 if (ie
->speculative
)
3107 struct cgraph_edge
*de
;
3108 struct ipa_ref
*ref
;
3109 ie
->speculative_call_info (de
, ie
, ref
);
3110 spec_target
= de
->callee
;
3113 if (!opt_for_fn (node
->decl
, flag_indirect_inlining
))
3114 new_direct_edge
= NULL
;
3115 else if (ici
->polymorphic
)
3117 ipa_polymorphic_call_context ctx
;
3118 ctx
= ipa_context_from_jfunc (new_root_info
, cs
, param_index
, jfunc
);
3119 new_direct_edge
= try_make_edge_direct_virtual_call (ie
, jfunc
, ctx
);
3122 new_direct_edge
= try_make_edge_direct_simple_call (ie
, jfunc
,
3124 /* If speculation was removed, then we need to do nothing. */
3125 if (new_direct_edge
&& new_direct_edge
!= ie
3126 && new_direct_edge
->callee
== spec_target
)
3128 new_direct_edge
->indirect_inlining_edge
= 1;
3129 top
= IPA_EDGE_REF (cs
);
3131 if (!new_direct_edge
->speculative
)
3134 else if (new_direct_edge
)
3136 new_direct_edge
->indirect_inlining_edge
= 1;
3137 if (new_direct_edge
->call_stmt
)
3138 new_direct_edge
->call_stmt_cannot_inline_p
3139 = !gimple_check_call_matching_types (
3140 new_direct_edge
->call_stmt
,
3141 new_direct_edge
->callee
->decl
, false);
3144 new_edges
->safe_push (new_direct_edge
);
3147 top
= IPA_EDGE_REF (cs
);
3148 /* If speculative edge was introduced we still need to update
3149 call info of the indirect edge. */
3150 if (!new_direct_edge
->speculative
)
3153 if (jfunc
->type
== IPA_JF_PASS_THROUGH
3154 && ipa_get_jf_pass_through_operation (jfunc
) == NOP_EXPR
)
3156 if (ici
->agg_contents
3157 && !ipa_get_jf_pass_through_agg_preserved (jfunc
)
3158 && !ici
->polymorphic
)
3159 ici
->param_index
= -1;
3162 ici
->param_index
= ipa_get_jf_pass_through_formal_id (jfunc
);
3163 if (ici
->polymorphic
3164 && !ipa_get_jf_pass_through_type_preserved (jfunc
))
3165 ici
->vptr_changed
= true;
3168 else if (jfunc
->type
== IPA_JF_ANCESTOR
)
3170 if (ici
->agg_contents
3171 && !ipa_get_jf_ancestor_agg_preserved (jfunc
)
3172 && !ici
->polymorphic
)
3173 ici
->param_index
= -1;
3176 ici
->param_index
= ipa_get_jf_ancestor_formal_id (jfunc
);
3177 ici
->offset
+= ipa_get_jf_ancestor_offset (jfunc
);
3178 if (ici
->polymorphic
3179 && !ipa_get_jf_ancestor_type_preserved (jfunc
))
3180 ici
->vptr_changed
= true;
3184 /* Either we can find a destination for this edge now or never. */
3185 ici
->param_index
= -1;
3191 /* Recursively traverse subtree of NODE (including node) made of inlined
3192 cgraph_edges when CS has been inlined and invoke
3193 update_indirect_edges_after_inlining on all nodes and
3194 update_jump_functions_after_inlining on all non-inlined edges that lead out
3195 of this subtree. Newly discovered indirect edges will be added to
3196 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3200 propagate_info_to_inlined_callees (struct cgraph_edge
*cs
,
3201 struct cgraph_node
*node
,
3202 vec
<cgraph_edge
*> *new_edges
)
3204 struct cgraph_edge
*e
;
3207 res
= update_indirect_edges_after_inlining (cs
, node
, new_edges
);
3209 for (e
= node
->callees
; e
; e
= e
->next_callee
)
3210 if (!e
->inline_failed
)
3211 res
|= propagate_info_to_inlined_callees (cs
, e
->callee
, new_edges
);
3213 update_jump_functions_after_inlining (cs
, e
);
3214 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
3215 update_jump_functions_after_inlining (cs
, e
);
3220 /* Combine two controlled uses counts as done during inlining. */
3223 combine_controlled_uses_counters (int c
, int d
)
3225 if (c
== IPA_UNDESCRIBED_USE
|| d
== IPA_UNDESCRIBED_USE
)
3226 return IPA_UNDESCRIBED_USE
;
3231 /* Propagate number of controlled users from CS->caleee to the new root of the
3232 tree of inlined nodes. */
3235 propagate_controlled_uses (struct cgraph_edge
*cs
)
3237 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
3238 struct cgraph_node
*new_root
= cs
->caller
->global
.inlined_to
3239 ? cs
->caller
->global
.inlined_to
: cs
->caller
;
3240 struct ipa_node_params
*new_root_info
= IPA_NODE_REF (new_root
);
3241 struct ipa_node_params
*old_root_info
= IPA_NODE_REF (cs
->callee
);
3244 count
= MIN (ipa_get_cs_argument_count (args
),
3245 ipa_get_param_count (old_root_info
));
3246 for (i
= 0; i
< count
; i
++)
3248 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
3249 struct ipa_cst_ref_desc
*rdesc
;
3251 if (jf
->type
== IPA_JF_PASS_THROUGH
)
3254 src_idx
= ipa_get_jf_pass_through_formal_id (jf
);
3255 c
= ipa_get_controlled_uses (new_root_info
, src_idx
);
3256 d
= ipa_get_controlled_uses (old_root_info
, i
);
3258 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf
)
3259 == NOP_EXPR
|| c
== IPA_UNDESCRIBED_USE
);
3260 c
= combine_controlled_uses_counters (c
, d
);
3261 ipa_set_controlled_uses (new_root_info
, src_idx
, c
);
3262 if (c
== 0 && new_root_info
->ipcp_orig_node
)
3264 struct cgraph_node
*n
;
3265 struct ipa_ref
*ref
;
3266 tree t
= new_root_info
->known_csts
[src_idx
];
3268 if (t
&& TREE_CODE (t
) == ADDR_EXPR
3269 && TREE_CODE (TREE_OPERAND (t
, 0)) == FUNCTION_DECL
3270 && (n
= cgraph_node::get (TREE_OPERAND (t
, 0)))
3271 && (ref
= new_root
->find_reference (n
, NULL
, 0)))
3274 fprintf (dump_file
, "ipa-prop: Removing cloning-created "
3275 "reference from %s/%i to %s/%i.\n",
3276 xstrdup_for_dump (new_root
->name ()),
3278 xstrdup_for_dump (n
->name ()), n
->order
);
3279 ref
->remove_reference ();
3283 else if (jf
->type
== IPA_JF_CONST
3284 && (rdesc
= jfunc_rdesc_usable (jf
)))
3286 int d
= ipa_get_controlled_uses (old_root_info
, i
);
3287 int c
= rdesc
->refcount
;
3288 rdesc
->refcount
= combine_controlled_uses_counters (c
, d
);
3289 if (rdesc
->refcount
== 0)
3291 tree cst
= ipa_get_jf_constant (jf
);
3292 struct cgraph_node
*n
;
3293 gcc_checking_assert (TREE_CODE (cst
) == ADDR_EXPR
3294 && TREE_CODE (TREE_OPERAND (cst
, 0))
3296 n
= cgraph_node::get (TREE_OPERAND (cst
, 0));
3299 struct cgraph_node
*clone
;
3301 ok
= remove_described_reference (n
, rdesc
);
3302 gcc_checking_assert (ok
);
3305 while (clone
->global
.inlined_to
3306 && clone
!= rdesc
->cs
->caller
3307 && IPA_NODE_REF (clone
)->ipcp_orig_node
)
3309 struct ipa_ref
*ref
;
3310 ref
= clone
->find_reference (n
, NULL
, 0);
3314 fprintf (dump_file
, "ipa-prop: Removing "
3315 "cloning-created reference "
3316 "from %s/%i to %s/%i.\n",
3317 xstrdup_for_dump (clone
->name ()),
3319 xstrdup_for_dump (n
->name ()),
3321 ref
->remove_reference ();
3323 clone
= clone
->callers
->caller
;
3330 for (i
= ipa_get_param_count (old_root_info
);
3331 i
< ipa_get_cs_argument_count (args
);
3334 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
3336 if (jf
->type
== IPA_JF_CONST
)
3338 struct ipa_cst_ref_desc
*rdesc
= jfunc_rdesc_usable (jf
);
3340 rdesc
->refcount
= IPA_UNDESCRIBED_USE
;
3342 else if (jf
->type
== IPA_JF_PASS_THROUGH
)
3343 ipa_set_controlled_uses (new_root_info
,
3344 jf
->value
.pass_through
.formal_id
,
3345 IPA_UNDESCRIBED_USE
);
3349 /* Update jump functions and call note functions on inlining the call site CS.
3350 CS is expected to lead to a node already cloned by
3351 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3352 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3356 ipa_propagate_indirect_call_infos (struct cgraph_edge
*cs
,
3357 vec
<cgraph_edge
*> *new_edges
)
3360 /* Do nothing if the preparation phase has not been carried out yet
3361 (i.e. during early inlining). */
3362 if (!ipa_node_params_sum
)
3364 gcc_assert (ipa_edge_args_vector
);
3366 propagate_controlled_uses (cs
);
3367 changed
= propagate_info_to_inlined_callees (cs
, cs
->callee
, new_edges
);
3372 /* Frees all dynamically allocated structures that the argument info points
3376 ipa_free_edge_args_substructures (struct ipa_edge_args
*args
)
3378 vec_free (args
->jump_functions
);
3379 memset (args
, 0, sizeof (*args
));
3382 /* Free all ipa_edge structures. */
3385 ipa_free_all_edge_args (void)
3388 struct ipa_edge_args
*args
;
3390 if (!ipa_edge_args_vector
)
3393 FOR_EACH_VEC_ELT (*ipa_edge_args_vector
, i
, args
)
3394 ipa_free_edge_args_substructures (args
);
3396 vec_free (ipa_edge_args_vector
);
3399 /* Frees all dynamically allocated structures that the param info points
3402 ipa_node_params::~ipa_node_params ()
3404 descriptors
.release ();
3406 /* Lattice values and their sources are deallocated with their alocation
3408 known_contexts
.release ();
3411 ipcp_orig_node
= NULL
;
3414 do_clone_for_all_contexts
= 0;
3415 is_all_contexts_clone
= 0;
3419 /* Free all ipa_node_params structures. */
3422 ipa_free_all_node_params (void)
3424 delete ipa_node_params_sum
;
3425 ipa_node_params_sum
= NULL
;
3428 /* Grow ipcp_transformations if necessary. */
3431 ipcp_grow_transformations_if_necessary (void)
3433 if (vec_safe_length (ipcp_transformations
)
3434 <= (unsigned) symtab
->cgraph_max_uid
)
3435 vec_safe_grow_cleared (ipcp_transformations
, symtab
->cgraph_max_uid
+ 1);
3438 /* Set the aggregate replacements of NODE to be AGGVALS. */
3441 ipa_set_node_agg_value_chain (struct cgraph_node
*node
,
3442 struct ipa_agg_replacement_value
*aggvals
)
3444 ipcp_grow_transformations_if_necessary ();
3445 (*ipcp_transformations
)[node
->uid
].agg_values
= aggvals
;
3448 /* Hook that is called by cgraph.c when an edge is removed. */
3451 ipa_edge_removal_hook (struct cgraph_edge
*cs
, void *data ATTRIBUTE_UNUSED
)
3453 struct ipa_edge_args
*args
;
3455 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3456 if (vec_safe_length (ipa_edge_args_vector
) <= (unsigned)cs
->uid
)
3459 args
= IPA_EDGE_REF (cs
);
3460 if (args
->jump_functions
)
3462 struct ipa_jump_func
*jf
;
3464 FOR_EACH_VEC_ELT (*args
->jump_functions
, i
, jf
)
3466 struct ipa_cst_ref_desc
*rdesc
;
3467 try_decrement_rdesc_refcount (jf
);
3468 if (jf
->type
== IPA_JF_CONST
3469 && (rdesc
= ipa_get_jf_constant_rdesc (jf
))
3475 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs
));
3478 /* Hook that is called by cgraph.c when an edge is duplicated. */
3481 ipa_edge_duplication_hook (struct cgraph_edge
*src
, struct cgraph_edge
*dst
,
3484 struct ipa_edge_args
*old_args
, *new_args
;
3487 ipa_check_create_edge_args ();
3489 old_args
= IPA_EDGE_REF (src
);
3490 new_args
= IPA_EDGE_REF (dst
);
3492 new_args
->jump_functions
= vec_safe_copy (old_args
->jump_functions
);
3493 if (old_args
->polymorphic_call_contexts
)
3494 new_args
->polymorphic_call_contexts
3495 = vec_safe_copy (old_args
->polymorphic_call_contexts
);
3497 for (i
= 0; i
< vec_safe_length (old_args
->jump_functions
); i
++)
3499 struct ipa_jump_func
*src_jf
= ipa_get_ith_jump_func (old_args
, i
);
3500 struct ipa_jump_func
*dst_jf
= ipa_get_ith_jump_func (new_args
, i
);
3502 dst_jf
->agg
.items
= vec_safe_copy (dst_jf
->agg
.items
);
3504 if (src_jf
->type
== IPA_JF_CONST
)
3506 struct ipa_cst_ref_desc
*src_rdesc
= jfunc_rdesc_usable (src_jf
);
3509 dst_jf
->value
.constant
.rdesc
= NULL
;
3510 else if (src
->caller
== dst
->caller
)
3512 struct ipa_ref
*ref
;
3513 symtab_node
*n
= cgraph_node_for_jfunc (src_jf
);
3514 gcc_checking_assert (n
);
3515 ref
= src
->caller
->find_reference (n
, src
->call_stmt
,
3517 gcc_checking_assert (ref
);
3518 dst
->caller
->clone_reference (ref
, ref
->stmt
);
3520 gcc_checking_assert (ipa_refdesc_pool
);
3521 struct ipa_cst_ref_desc
*dst_rdesc
3522 = (struct ipa_cst_ref_desc
*) pool_alloc (ipa_refdesc_pool
);
3523 dst_rdesc
->cs
= dst
;
3524 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3525 dst_rdesc
->next_duplicate
= NULL
;
3526 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3528 else if (src_rdesc
->cs
== src
)
3530 struct ipa_cst_ref_desc
*dst_rdesc
;
3531 gcc_checking_assert (ipa_refdesc_pool
);
3533 = (struct ipa_cst_ref_desc
*) pool_alloc (ipa_refdesc_pool
);
3534 dst_rdesc
->cs
= dst
;
3535 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3536 dst_rdesc
->next_duplicate
= src_rdesc
->next_duplicate
;
3537 src_rdesc
->next_duplicate
= dst_rdesc
;
3538 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3542 struct ipa_cst_ref_desc
*dst_rdesc
;
3543 /* This can happen during inlining, when a JFUNC can refer to a
3544 reference taken in a function up in the tree of inline clones.
3545 We need to find the duplicate that refers to our tree of
3548 gcc_assert (dst
->caller
->global
.inlined_to
);
3549 for (dst_rdesc
= src_rdesc
->next_duplicate
;
3551 dst_rdesc
= dst_rdesc
->next_duplicate
)
3553 struct cgraph_node
*top
;
3554 top
= dst_rdesc
->cs
->caller
->global
.inlined_to
3555 ? dst_rdesc
->cs
->caller
->global
.inlined_to
3556 : dst_rdesc
->cs
->caller
;
3557 if (dst
->caller
->global
.inlined_to
== top
)
3560 gcc_assert (dst_rdesc
);
3561 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3564 else if (dst_jf
->type
== IPA_JF_PASS_THROUGH
3565 && src
->caller
== dst
->caller
)
3567 struct cgraph_node
*inline_root
= dst
->caller
->global
.inlined_to
3568 ? dst
->caller
->global
.inlined_to
: dst
->caller
;
3569 struct ipa_node_params
*root_info
= IPA_NODE_REF (inline_root
);
3570 int idx
= ipa_get_jf_pass_through_formal_id (dst_jf
);
3572 int c
= ipa_get_controlled_uses (root_info
, idx
);
3573 if (c
!= IPA_UNDESCRIBED_USE
)
3576 ipa_set_controlled_uses (root_info
, idx
, c
);
3582 /* Analyze newly added function into callgraph. */
3585 ipa_add_new_function (cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
3587 if (node
->has_gimple_body_p ())
3588 ipa_analyze_node (node
);
3591 /* Hook that is called by summary when a node is duplicated. */
3594 ipa_node_params_t::duplicate(cgraph_node
*src
, cgraph_node
*dst
,
3595 ipa_node_params
*old_info
,
3596 ipa_node_params
*new_info
)
3598 ipa_agg_replacement_value
*old_av
, *new_av
;
3600 new_info
->descriptors
= old_info
->descriptors
.copy ();
3601 new_info
->lattices
= NULL
;
3602 new_info
->ipcp_orig_node
= old_info
->ipcp_orig_node
;
3604 new_info
->analysis_done
= old_info
->analysis_done
;
3605 new_info
->node_enqueued
= old_info
->node_enqueued
;
3607 old_av
= ipa_get_agg_replacements_for_node (src
);
3613 struct ipa_agg_replacement_value
*v
;
3615 v
= ggc_alloc
<ipa_agg_replacement_value
> ();
3616 memcpy (v
, old_av
, sizeof (*v
));
3619 old_av
= old_av
->next
;
3621 ipa_set_node_agg_value_chain (dst
, new_av
);
3624 ipcp_transformation_summary
*src_trans
= ipcp_get_transformation_summary (src
);
3626 if (src_trans
&& vec_safe_length (src_trans
->alignments
) > 0)
3628 ipcp_grow_transformations_if_necessary ();
3629 src_trans
= ipcp_get_transformation_summary (src
);
3630 const vec
<ipa_alignment
, va_gc
> *src_alignments
= src_trans
->alignments
;
3631 vec
<ipa_alignment
, va_gc
> *&dst_alignments
3632 = ipcp_get_transformation_summary (dst
)->alignments
;
3633 vec_safe_reserve_exact (dst_alignments
, src_alignments
->length ());
3634 for (unsigned i
= 0; i
< src_alignments
->length (); ++i
)
3635 dst_alignments
->quick_push ((*src_alignments
)[i
]);
3639 /* Register our cgraph hooks if they are not already there. */
3642 ipa_register_cgraph_hooks (void)
3644 ipa_check_create_node_params ();
3646 if (!edge_removal_hook_holder
)
3647 edge_removal_hook_holder
=
3648 symtab
->add_edge_removal_hook (&ipa_edge_removal_hook
, NULL
);
3649 if (!edge_duplication_hook_holder
)
3650 edge_duplication_hook_holder
=
3651 symtab
->add_edge_duplication_hook (&ipa_edge_duplication_hook
, NULL
);
3652 function_insertion_hook_holder
=
3653 symtab
->add_cgraph_insertion_hook (&ipa_add_new_function
, NULL
);
3656 /* Unregister our cgraph hooks if they are not already there. */
3659 ipa_unregister_cgraph_hooks (void)
3661 symtab
->remove_edge_removal_hook (edge_removal_hook_holder
);
3662 edge_removal_hook_holder
= NULL
;
3663 symtab
->remove_edge_duplication_hook (edge_duplication_hook_holder
);
3664 edge_duplication_hook_holder
= NULL
;
3665 symtab
->remove_cgraph_insertion_hook (function_insertion_hook_holder
);
3666 function_insertion_hook_holder
= NULL
;
3669 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3670 longer needed after ipa-cp. */
3673 ipa_free_all_structures_after_ipa_cp (void)
3675 if (!optimize
&& !in_lto_p
)
3677 ipa_free_all_edge_args ();
3678 ipa_free_all_node_params ();
3679 free_alloc_pool (ipcp_sources_pool
);
3680 free_alloc_pool (ipcp_cst_values_pool
);
3681 free_alloc_pool (ipcp_poly_ctx_values_pool
);
3682 free_alloc_pool (ipcp_agg_lattice_pool
);
3683 ipa_unregister_cgraph_hooks ();
3684 if (ipa_refdesc_pool
)
3685 free_alloc_pool (ipa_refdesc_pool
);
3689 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3690 longer needed after indirect inlining. */
3693 ipa_free_all_structures_after_iinln (void)
3695 ipa_free_all_edge_args ();
3696 ipa_free_all_node_params ();
3697 ipa_unregister_cgraph_hooks ();
3698 if (ipcp_sources_pool
)
3699 free_alloc_pool (ipcp_sources_pool
);
3700 if (ipcp_cst_values_pool
)
3701 free_alloc_pool (ipcp_cst_values_pool
);
3702 if (ipcp_poly_ctx_values_pool
)
3703 free_alloc_pool (ipcp_poly_ctx_values_pool
);
3704 if (ipcp_agg_lattice_pool
)
3705 free_alloc_pool (ipcp_agg_lattice_pool
);
3706 if (ipa_refdesc_pool
)
3707 free_alloc_pool (ipa_refdesc_pool
);
3710 /* Print ipa_tree_map data structures of all functions in the
3714 ipa_print_node_params (FILE *f
, struct cgraph_node
*node
)
3717 struct ipa_node_params
*info
;
3719 if (!node
->definition
)
3721 info
= IPA_NODE_REF (node
);
3722 fprintf (f
, " function %s/%i parameter descriptors:\n",
3723 node
->name (), node
->order
);
3724 count
= ipa_get_param_count (info
);
3725 for (i
= 0; i
< count
; i
++)
3730 ipa_dump_param (f
, info
, i
);
3731 if (ipa_is_param_used (info
, i
))
3732 fprintf (f
, " used");
3733 c
= ipa_get_controlled_uses (info
, i
);
3734 if (c
== IPA_UNDESCRIBED_USE
)
3735 fprintf (f
, " undescribed_use");
3737 fprintf (f
, " controlled_uses=%i", c
);
3742 /* Print ipa_tree_map data structures of all functions in the
3746 ipa_print_all_params (FILE * f
)
3748 struct cgraph_node
*node
;
3750 fprintf (f
, "\nFunction parameters:\n");
3751 FOR_EACH_FUNCTION (node
)
3752 ipa_print_node_params (f
, node
);
3755 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3758 ipa_get_vector_of_formal_parms (tree fndecl
)
3764 gcc_assert (!flag_wpa
);
3765 count
= count_formal_params (fndecl
);
3766 args
.create (count
);
3767 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
3768 args
.quick_push (parm
);
3773 /* Return a heap allocated vector containing types of formal parameters of
3774 function type FNTYPE. */
3777 ipa_get_vector_of_formal_parm_types (tree fntype
)
3783 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
3786 types
.create (count
);
3787 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
3788 types
.quick_push (TREE_VALUE (t
));
3793 /* Modify the function declaration FNDECL and its type according to the plan in
3794 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3795 to reflect the actual parameters being modified which are determined by the
3796 base_index field. */
3799 ipa_modify_formal_parameters (tree fndecl
, ipa_parm_adjustment_vec adjustments
)
3801 vec
<tree
> oparms
= ipa_get_vector_of_formal_parms (fndecl
);
3802 tree orig_type
= TREE_TYPE (fndecl
);
3803 tree old_arg_types
= TYPE_ARG_TYPES (orig_type
);
3805 /* The following test is an ugly hack, some functions simply don't have any
3806 arguments in their type. This is probably a bug but well... */
3807 bool care_for_types
= (old_arg_types
!= NULL_TREE
);
3808 bool last_parm_void
;
3812 last_parm_void
= (TREE_VALUE (tree_last (old_arg_types
))
3814 otypes
= ipa_get_vector_of_formal_parm_types (orig_type
);
3816 gcc_assert (oparms
.length () + 1 == otypes
.length ());
3818 gcc_assert (oparms
.length () == otypes
.length ());
3822 last_parm_void
= false;
3826 int len
= adjustments
.length ();
3827 tree
*link
= &DECL_ARGUMENTS (fndecl
);
3828 tree new_arg_types
= NULL
;
3829 for (int i
= 0; i
< len
; i
++)
3831 struct ipa_parm_adjustment
*adj
;
3834 adj
= &adjustments
[i
];
3836 if (adj
->op
== IPA_PARM_OP_NEW
)
3839 parm
= oparms
[adj
->base_index
];
3842 if (adj
->op
== IPA_PARM_OP_COPY
)
3845 new_arg_types
= tree_cons (NULL_TREE
, otypes
[adj
->base_index
],
3848 link
= &DECL_CHAIN (parm
);
3850 else if (adj
->op
!= IPA_PARM_OP_REMOVE
)
3856 ptype
= build_pointer_type (adj
->type
);
3860 if (is_gimple_reg_type (ptype
))
3862 unsigned malign
= GET_MODE_ALIGNMENT (TYPE_MODE (ptype
));
3863 if (TYPE_ALIGN (ptype
) < malign
)
3864 ptype
= build_aligned_type (ptype
, malign
);
3869 new_arg_types
= tree_cons (NULL_TREE
, ptype
, new_arg_types
);
3871 new_parm
= build_decl (UNKNOWN_LOCATION
, PARM_DECL
, NULL_TREE
,
3873 const char *prefix
= adj
->arg_prefix
? adj
->arg_prefix
: "SYNTH";
3874 DECL_NAME (new_parm
) = create_tmp_var_name (prefix
);
3875 DECL_ARTIFICIAL (new_parm
) = 1;
3876 DECL_ARG_TYPE (new_parm
) = ptype
;
3877 DECL_CONTEXT (new_parm
) = fndecl
;
3878 TREE_USED (new_parm
) = 1;
3879 DECL_IGNORED_P (new_parm
) = 1;
3880 layout_decl (new_parm
, 0);
3882 if (adj
->op
== IPA_PARM_OP_NEW
)
3886 adj
->new_decl
= new_parm
;
3889 link
= &DECL_CHAIN (new_parm
);
3895 tree new_reversed
= NULL
;
3898 new_reversed
= nreverse (new_arg_types
);
3902 TREE_CHAIN (new_arg_types
) = void_list_node
;
3904 new_reversed
= void_list_node
;
3908 /* Use copy_node to preserve as much as possible from original type
3909 (debug info, attribute lists etc.)
3910 Exception is METHOD_TYPEs must have THIS argument.
3911 When we are asked to remove it, we need to build new FUNCTION_TYPE
3913 tree new_type
= NULL
;
3914 if (TREE_CODE (orig_type
) != METHOD_TYPE
3915 || (adjustments
[0].op
== IPA_PARM_OP_COPY
3916 && adjustments
[0].base_index
== 0))
3918 new_type
= build_distinct_type_copy (orig_type
);
3919 TYPE_ARG_TYPES (new_type
) = new_reversed
;
3924 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type
),
3926 TYPE_CONTEXT (new_type
) = TYPE_CONTEXT (orig_type
);
3927 DECL_VINDEX (fndecl
) = NULL_TREE
;
3930 /* When signature changes, we need to clear builtin info. */
3931 if (DECL_BUILT_IN (fndecl
))
3933 DECL_BUILT_IN_CLASS (fndecl
) = NOT_BUILT_IN
;
3934 DECL_FUNCTION_CODE (fndecl
) = (enum built_in_function
) 0;
3937 TREE_TYPE (fndecl
) = new_type
;
3938 DECL_VIRTUAL_P (fndecl
) = 0;
3939 DECL_LANG_SPECIFIC (fndecl
) = NULL
;
3944 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3945 If this is a directly recursive call, CS must be NULL. Otherwise it must
3946 contain the corresponding call graph edge. */
3949 ipa_modify_call_arguments (struct cgraph_edge
*cs
, gcall
*stmt
,
3950 ipa_parm_adjustment_vec adjustments
)
3952 struct cgraph_node
*current_node
= cgraph_node::get (current_function_decl
);
3954 vec
<tree
, va_gc
> **debug_args
= NULL
;
3956 gimple_stmt_iterator gsi
, prev_gsi
;
3960 len
= adjustments
.length ();
3962 callee_decl
= !cs
? gimple_call_fndecl (stmt
) : cs
->callee
->decl
;
3963 current_node
->remove_stmt_references (stmt
);
3965 gsi
= gsi_for_stmt (stmt
);
3967 gsi_prev (&prev_gsi
);
3968 for (i
= 0; i
< len
; i
++)
3970 struct ipa_parm_adjustment
*adj
;
3972 adj
= &adjustments
[i
];
3974 if (adj
->op
== IPA_PARM_OP_COPY
)
3976 tree arg
= gimple_call_arg (stmt
, adj
->base_index
);
3978 vargs
.quick_push (arg
);
3980 else if (adj
->op
!= IPA_PARM_OP_REMOVE
)
3982 tree expr
, base
, off
;
3984 unsigned int deref_align
= 0;
3985 bool deref_base
= false;
3987 /* We create a new parameter out of the value of the old one, we can
3988 do the following kind of transformations:
3990 - A scalar passed by reference is converted to a scalar passed by
3991 value. (adj->by_ref is false and the type of the original
3992 actual argument is a pointer to a scalar).
3994 - A part of an aggregate is passed instead of the whole aggregate.
3995 The part can be passed either by value or by reference, this is
3996 determined by value of adj->by_ref. Moreover, the code below
3997 handles both situations when the original aggregate is passed by
3998 value (its type is not a pointer) and when it is passed by
3999 reference (it is a pointer to an aggregate).
4001 When the new argument is passed by reference (adj->by_ref is true)
4002 it must be a part of an aggregate and therefore we form it by
4003 simply taking the address of a reference inside the original
4006 gcc_checking_assert (adj
->offset
% BITS_PER_UNIT
== 0);
4007 base
= gimple_call_arg (stmt
, adj
->base_index
);
4008 loc
= DECL_P (base
) ? DECL_SOURCE_LOCATION (base
)
4009 : EXPR_LOCATION (base
);
4011 if (TREE_CODE (base
) != ADDR_EXPR
4012 && POINTER_TYPE_P (TREE_TYPE (base
)))
4013 off
= build_int_cst (adj
->alias_ptr_type
,
4014 adj
->offset
/ BITS_PER_UNIT
);
4017 HOST_WIDE_INT base_offset
;
4021 if (TREE_CODE (base
) == ADDR_EXPR
)
4023 base
= TREE_OPERAND (base
, 0);
4029 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
4030 /* Aggregate arguments can have non-invariant addresses. */
4033 base
= build_fold_addr_expr (prev_base
);
4034 off
= build_int_cst (adj
->alias_ptr_type
,
4035 adj
->offset
/ BITS_PER_UNIT
);
4037 else if (TREE_CODE (base
) == MEM_REF
)
4042 deref_align
= TYPE_ALIGN (TREE_TYPE (base
));
4044 off
= build_int_cst (adj
->alias_ptr_type
,
4046 + adj
->offset
/ BITS_PER_UNIT
);
4047 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1),
4049 base
= TREE_OPERAND (base
, 0);
4053 off
= build_int_cst (adj
->alias_ptr_type
,
4055 + adj
->offset
/ BITS_PER_UNIT
);
4056 base
= build_fold_addr_expr (base
);
4062 tree type
= adj
->type
;
4064 unsigned HOST_WIDE_INT misalign
;
4068 align
= deref_align
;
4073 get_pointer_alignment_1 (base
, &align
, &misalign
);
4074 if (TYPE_ALIGN (type
) > align
)
4075 align
= TYPE_ALIGN (type
);
4077 misalign
+= (offset_int::from (off
, SIGNED
).to_short_addr ()
4079 misalign
= misalign
& (align
- 1);
4081 align
= (misalign
& -misalign
);
4082 if (align
< TYPE_ALIGN (type
))
4083 type
= build_aligned_type (type
, align
);
4084 base
= force_gimple_operand_gsi (&gsi
, base
,
4085 true, NULL
, true, GSI_SAME_STMT
);
4086 expr
= fold_build2_loc (loc
, MEM_REF
, type
, base
, off
);
4087 /* If expr is not a valid gimple call argument emit
4088 a load into a temporary. */
4089 if (is_gimple_reg_type (TREE_TYPE (expr
)))
4091 gimple tem
= gimple_build_assign (NULL_TREE
, expr
);
4092 if (gimple_in_ssa_p (cfun
))
4094 gimple_set_vuse (tem
, gimple_vuse (stmt
));
4095 expr
= make_ssa_name (TREE_TYPE (expr
), tem
);
4098 expr
= create_tmp_reg (TREE_TYPE (expr
));
4099 gimple_assign_set_lhs (tem
, expr
);
4100 gsi_insert_before (&gsi
, tem
, GSI_SAME_STMT
);
4105 expr
= fold_build2_loc (loc
, MEM_REF
, adj
->type
, base
, off
);
4106 expr
= build_fold_addr_expr (expr
);
4107 expr
= force_gimple_operand_gsi (&gsi
, expr
,
4108 true, NULL
, true, GSI_SAME_STMT
);
4110 vargs
.quick_push (expr
);
4112 if (adj
->op
!= IPA_PARM_OP_COPY
&& MAY_HAVE_DEBUG_STMTS
)
4115 tree ddecl
= NULL_TREE
, origin
= DECL_ORIGIN (adj
->base
), arg
;
4118 arg
= gimple_call_arg (stmt
, adj
->base_index
);
4119 if (!useless_type_conversion_p (TREE_TYPE (origin
), TREE_TYPE (arg
)))
4121 if (!fold_convertible_p (TREE_TYPE (origin
), arg
))
4123 arg
= fold_convert_loc (gimple_location (stmt
),
4124 TREE_TYPE (origin
), arg
);
4126 if (debug_args
== NULL
)
4127 debug_args
= decl_debug_args_insert (callee_decl
);
4128 for (ix
= 0; vec_safe_iterate (*debug_args
, ix
, &ddecl
); ix
+= 2)
4129 if (ddecl
== origin
)
4131 ddecl
= (**debug_args
)[ix
+ 1];
4136 ddecl
= make_node (DEBUG_EXPR_DECL
);
4137 DECL_ARTIFICIAL (ddecl
) = 1;
4138 TREE_TYPE (ddecl
) = TREE_TYPE (origin
);
4139 DECL_MODE (ddecl
) = DECL_MODE (origin
);
4141 vec_safe_push (*debug_args
, origin
);
4142 vec_safe_push (*debug_args
, ddecl
);
4144 def_temp
= gimple_build_debug_bind (ddecl
, unshare_expr (arg
), stmt
);
4145 gsi_insert_before (&gsi
, def_temp
, GSI_SAME_STMT
);
4149 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4151 fprintf (dump_file
, "replacing stmt:");
4152 print_gimple_stmt (dump_file
, gsi_stmt (gsi
), 0, 0);
4155 new_stmt
= gimple_build_call_vec (callee_decl
, vargs
);
4157 if (gimple_call_lhs (stmt
))
4158 gimple_call_set_lhs (new_stmt
, gimple_call_lhs (stmt
));
4160 gimple_set_block (new_stmt
, gimple_block (stmt
));
4161 if (gimple_has_location (stmt
))
4162 gimple_set_location (new_stmt
, gimple_location (stmt
));
4163 gimple_call_set_chain (new_stmt
, gimple_call_chain (stmt
));
4164 gimple_call_copy_flags (new_stmt
, stmt
);
4165 if (gimple_in_ssa_p (cfun
))
4167 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
4168 if (gimple_vdef (stmt
))
4170 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
4171 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
4175 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4177 fprintf (dump_file
, "with stmt:");
4178 print_gimple_stmt (dump_file
, new_stmt
, 0, 0);
4179 fprintf (dump_file
, "\n");
4181 gsi_replace (&gsi
, new_stmt
, true);
4183 cs
->set_call_stmt (new_stmt
);
4186 current_node
->record_stmt_references (gsi_stmt (gsi
));
4189 while (gsi_stmt (gsi
) != gsi_stmt (prev_gsi
));
4192 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4193 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4194 specifies whether the function should care about type incompatibility the
4195 current and new expressions. If it is false, the function will leave
4196 incompatibility issues to the caller. Return true iff the expression
4200 ipa_modify_expr (tree
*expr
, bool convert
,
4201 ipa_parm_adjustment_vec adjustments
)
4203 struct ipa_parm_adjustment
*cand
4204 = ipa_get_adjustment_candidate (&expr
, &convert
, adjustments
, false);
4210 src
= build_simple_mem_ref (cand
->new_decl
);
4212 src
= cand
->new_decl
;
4214 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4216 fprintf (dump_file
, "About to replace expr ");
4217 print_generic_expr (dump_file
, *expr
, 0);
4218 fprintf (dump_file
, " with ");
4219 print_generic_expr (dump_file
, src
, 0);
4220 fprintf (dump_file
, "\n");
4223 if (convert
&& !useless_type_conversion_p (TREE_TYPE (*expr
), cand
->type
))
4225 tree vce
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (*expr
), src
);
4233 /* If T is an SSA_NAME, return NULL if it is not a default def or
4234 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4235 the base variable is always returned, regardless if it is a default
4236 def. Return T if it is not an SSA_NAME. */
4239 get_ssa_base_param (tree t
, bool ignore_default_def
)
4241 if (TREE_CODE (t
) == SSA_NAME
)
4243 if (ignore_default_def
|| SSA_NAME_IS_DEFAULT_DEF (t
))
4244 return SSA_NAME_VAR (t
);
4251 /* Given an expression, return an adjustment entry specifying the
4252 transformation to be done on EXPR. If no suitable adjustment entry
4253 was found, returns NULL.
4255 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4256 default def, otherwise bail on them.
4258 If CONVERT is non-NULL, this function will set *CONVERT if the
4259 expression provided is a component reference. ADJUSTMENTS is the
4260 adjustments vector. */
4262 ipa_parm_adjustment
*
4263 ipa_get_adjustment_candidate (tree
**expr
, bool *convert
,
4264 ipa_parm_adjustment_vec adjustments
,
4265 bool ignore_default_def
)
4267 if (TREE_CODE (**expr
) == BIT_FIELD_REF
4268 || TREE_CODE (**expr
) == IMAGPART_EXPR
4269 || TREE_CODE (**expr
) == REALPART_EXPR
)
4271 *expr
= &TREE_OPERAND (**expr
, 0);
4276 HOST_WIDE_INT offset
, size
, max_size
;
4277 tree base
= get_ref_base_and_extent (**expr
, &offset
, &size
, &max_size
);
4278 if (!base
|| size
== -1 || max_size
== -1)
4281 if (TREE_CODE (base
) == MEM_REF
)
4283 offset
+= mem_ref_offset (base
).to_short_addr () * BITS_PER_UNIT
;
4284 base
= TREE_OPERAND (base
, 0);
4287 base
= get_ssa_base_param (base
, ignore_default_def
);
4288 if (!base
|| TREE_CODE (base
) != PARM_DECL
)
4291 struct ipa_parm_adjustment
*cand
= NULL
;
4292 unsigned int len
= adjustments
.length ();
4293 for (unsigned i
= 0; i
< len
; i
++)
4295 struct ipa_parm_adjustment
*adj
= &adjustments
[i
];
4297 if (adj
->base
== base
4298 && (adj
->offset
== offset
|| adj
->op
== IPA_PARM_OP_REMOVE
))
4305 if (!cand
|| cand
->op
== IPA_PARM_OP_COPY
|| cand
->op
== IPA_PARM_OP_REMOVE
)
4310 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4313 index_in_adjustments_multiple_times_p (int base_index
,
4314 ipa_parm_adjustment_vec adjustments
)
4316 int i
, len
= adjustments
.length ();
4319 for (i
= 0; i
< len
; i
++)
4321 struct ipa_parm_adjustment
*adj
;
4322 adj
= &adjustments
[i
];
4324 if (adj
->base_index
== base_index
)
4336 /* Return adjustments that should have the same effect on function parameters
4337 and call arguments as if they were first changed according to adjustments in
4338 INNER and then by adjustments in OUTER. */
4340 ipa_parm_adjustment_vec
4341 ipa_combine_adjustments (ipa_parm_adjustment_vec inner
,
4342 ipa_parm_adjustment_vec outer
)
4344 int i
, outlen
= outer
.length ();
4345 int inlen
= inner
.length ();
4347 ipa_parm_adjustment_vec adjustments
, tmp
;
4350 for (i
= 0; i
< inlen
; i
++)
4352 struct ipa_parm_adjustment
*n
;
4355 if (n
->op
== IPA_PARM_OP_REMOVE
)
4359 /* FIXME: Handling of new arguments are not implemented yet. */
4360 gcc_assert (n
->op
!= IPA_PARM_OP_NEW
);
4361 tmp
.quick_push (*n
);
4365 adjustments
.create (outlen
+ removals
);
4366 for (i
= 0; i
< outlen
; i
++)
4368 struct ipa_parm_adjustment r
;
4369 struct ipa_parm_adjustment
*out
= &outer
[i
];
4370 struct ipa_parm_adjustment
*in
= &tmp
[out
->base_index
];
4372 memset (&r
, 0, sizeof (r
));
4373 gcc_assert (in
->op
!= IPA_PARM_OP_REMOVE
);
4374 if (out
->op
== IPA_PARM_OP_REMOVE
)
4376 if (!index_in_adjustments_multiple_times_p (in
->base_index
, tmp
))
4378 r
.op
= IPA_PARM_OP_REMOVE
;
4379 adjustments
.quick_push (r
);
4385 /* FIXME: Handling of new arguments are not implemented yet. */
4386 gcc_assert (out
->op
!= IPA_PARM_OP_NEW
);
4389 r
.base_index
= in
->base_index
;
4392 /* FIXME: Create nonlocal value too. */
4394 if (in
->op
== IPA_PARM_OP_COPY
&& out
->op
== IPA_PARM_OP_COPY
)
4395 r
.op
= IPA_PARM_OP_COPY
;
4396 else if (in
->op
== IPA_PARM_OP_COPY
)
4397 r
.offset
= out
->offset
;
4398 else if (out
->op
== IPA_PARM_OP_COPY
)
4399 r
.offset
= in
->offset
;
4401 r
.offset
= in
->offset
+ out
->offset
;
4402 adjustments
.quick_push (r
);
4405 for (i
= 0; i
< inlen
; i
++)
4407 struct ipa_parm_adjustment
*n
= &inner
[i
];
4409 if (n
->op
== IPA_PARM_OP_REMOVE
)
4410 adjustments
.quick_push (*n
);
4417 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4418 friendly way, assuming they are meant to be applied to FNDECL. */
4421 ipa_dump_param_adjustments (FILE *file
, ipa_parm_adjustment_vec adjustments
,
4424 int i
, len
= adjustments
.length ();
4426 vec
<tree
> parms
= ipa_get_vector_of_formal_parms (fndecl
);
4428 fprintf (file
, "IPA param adjustments: ");
4429 for (i
= 0; i
< len
; i
++)
4431 struct ipa_parm_adjustment
*adj
;
4432 adj
= &adjustments
[i
];
4435 fprintf (file
, " ");
4439 fprintf (file
, "%i. base_index: %i - ", i
, adj
->base_index
);
4440 print_generic_expr (file
, parms
[adj
->base_index
], 0);
4443 fprintf (file
, ", base: ");
4444 print_generic_expr (file
, adj
->base
, 0);
4448 fprintf (file
, ", new_decl: ");
4449 print_generic_expr (file
, adj
->new_decl
, 0);
4451 if (adj
->new_ssa_base
)
4453 fprintf (file
, ", new_ssa_base: ");
4454 print_generic_expr (file
, adj
->new_ssa_base
, 0);
4457 if (adj
->op
== IPA_PARM_OP_COPY
)
4458 fprintf (file
, ", copy_param");
4459 else if (adj
->op
== IPA_PARM_OP_REMOVE
)
4460 fprintf (file
, ", remove_param");
4462 fprintf (file
, ", offset %li", (long) adj
->offset
);
4464 fprintf (file
, ", by_ref");
4465 print_node_brief (file
, ", type: ", adj
->type
, 0);
4466 fprintf (file
, "\n");
4471 /* Dump the AV linked list. */
4474 ipa_dump_agg_replacement_values (FILE *f
, struct ipa_agg_replacement_value
*av
)
4477 fprintf (f
, " Aggregate replacements:");
4478 for (; av
; av
= av
->next
)
4480 fprintf (f
, "%s %i[" HOST_WIDE_INT_PRINT_DEC
"]=", comma
? "," : "",
4481 av
->index
, av
->offset
);
4482 print_generic_expr (f
, av
->value
, 0);
4488 /* Stream out jump function JUMP_FUNC to OB. */
4491 ipa_write_jump_function (struct output_block
*ob
,
4492 struct ipa_jump_func
*jump_func
)
4494 struct ipa_agg_jf_item
*item
;
4495 struct bitpack_d bp
;
4498 streamer_write_uhwi (ob
, jump_func
->type
);
4499 switch (jump_func
->type
)
4501 case IPA_JF_UNKNOWN
:
4505 EXPR_LOCATION (jump_func
->value
.constant
.value
) == UNKNOWN_LOCATION
);
4506 stream_write_tree (ob
, jump_func
->value
.constant
.value
, true);
4508 case IPA_JF_PASS_THROUGH
:
4509 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.operation
);
4510 if (jump_func
->value
.pass_through
.operation
== NOP_EXPR
)
4512 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4513 bp
= bitpack_create (ob
->main_stream
);
4514 bp_pack_value (&bp
, jump_func
->value
.pass_through
.agg_preserved
, 1);
4515 streamer_write_bitpack (&bp
);
4519 stream_write_tree (ob
, jump_func
->value
.pass_through
.operand
, true);
4520 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4523 case IPA_JF_ANCESTOR
:
4524 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.offset
);
4525 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.formal_id
);
4526 bp
= bitpack_create (ob
->main_stream
);
4527 bp_pack_value (&bp
, jump_func
->value
.ancestor
.agg_preserved
, 1);
4528 streamer_write_bitpack (&bp
);
4532 count
= vec_safe_length (jump_func
->agg
.items
);
4533 streamer_write_uhwi (ob
, count
);
4536 bp
= bitpack_create (ob
->main_stream
);
4537 bp_pack_value (&bp
, jump_func
->agg
.by_ref
, 1);
4538 streamer_write_bitpack (&bp
);
4541 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, i
, item
)
4543 streamer_write_uhwi (ob
, item
->offset
);
4544 stream_write_tree (ob
, item
->value
, true);
4547 bp
= bitpack_create (ob
->main_stream
);
4548 bp_pack_value (&bp
, jump_func
->alignment
.known
, 1);
4549 streamer_write_bitpack (&bp
);
4550 if (jump_func
->alignment
.known
)
4552 streamer_write_uhwi (ob
, jump_func
->alignment
.align
);
4553 streamer_write_uhwi (ob
, jump_func
->alignment
.misalign
);
4557 /* Read in jump function JUMP_FUNC from IB. */
4560 ipa_read_jump_function (struct lto_input_block
*ib
,
4561 struct ipa_jump_func
*jump_func
,
4562 struct cgraph_edge
*cs
,
4563 struct data_in
*data_in
)
4565 enum jump_func_type jftype
;
4566 enum tree_code operation
;
4569 jftype
= (enum jump_func_type
) streamer_read_uhwi (ib
);
4572 case IPA_JF_UNKNOWN
:
4573 ipa_set_jf_unknown (jump_func
);
4576 ipa_set_jf_constant (jump_func
, stream_read_tree (ib
, data_in
), cs
);
4578 case IPA_JF_PASS_THROUGH
:
4579 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
4580 if (operation
== NOP_EXPR
)
4582 int formal_id
= streamer_read_uhwi (ib
);
4583 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4584 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4585 ipa_set_jf_simple_pass_through (jump_func
, formal_id
, agg_preserved
);
4589 tree operand
= stream_read_tree (ib
, data_in
);
4590 int formal_id
= streamer_read_uhwi (ib
);
4591 ipa_set_jf_arith_pass_through (jump_func
, formal_id
, operand
,
4595 case IPA_JF_ANCESTOR
:
4597 HOST_WIDE_INT offset
= streamer_read_uhwi (ib
);
4598 int formal_id
= streamer_read_uhwi (ib
);
4599 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4600 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4601 ipa_set_ancestor_jf (jump_func
, offset
, formal_id
, agg_preserved
);
4606 count
= streamer_read_uhwi (ib
);
4607 vec_alloc (jump_func
->agg
.items
, count
);
4610 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4611 jump_func
->agg
.by_ref
= bp_unpack_value (&bp
, 1);
4613 for (i
= 0; i
< count
; i
++)
4615 struct ipa_agg_jf_item item
;
4616 item
.offset
= streamer_read_uhwi (ib
);
4617 item
.value
= stream_read_tree (ib
, data_in
);
4618 jump_func
->agg
.items
->quick_push (item
);
4621 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4622 bool alignment_known
= bp_unpack_value (&bp
, 1);
4623 if (alignment_known
)
4625 jump_func
->alignment
.known
= true;
4626 jump_func
->alignment
.align
= streamer_read_uhwi (ib
);
4627 jump_func
->alignment
.misalign
= streamer_read_uhwi (ib
);
4630 jump_func
->alignment
.known
= false;
4633 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4634 relevant to indirect inlining to OB. */
4637 ipa_write_indirect_edge_info (struct output_block
*ob
,
4638 struct cgraph_edge
*cs
)
4640 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4641 struct bitpack_d bp
;
4643 streamer_write_hwi (ob
, ii
->param_index
);
4644 bp
= bitpack_create (ob
->main_stream
);
4645 bp_pack_value (&bp
, ii
->polymorphic
, 1);
4646 bp_pack_value (&bp
, ii
->agg_contents
, 1);
4647 bp_pack_value (&bp
, ii
->member_ptr
, 1);
4648 bp_pack_value (&bp
, ii
->by_ref
, 1);
4649 bp_pack_value (&bp
, ii
->vptr_changed
, 1);
4650 streamer_write_bitpack (&bp
);
4651 if (ii
->agg_contents
|| ii
->polymorphic
)
4652 streamer_write_hwi (ob
, ii
->offset
);
4654 gcc_assert (ii
->offset
== 0);
4656 if (ii
->polymorphic
)
4658 streamer_write_hwi (ob
, ii
->otr_token
);
4659 stream_write_tree (ob
, ii
->otr_type
, true);
4660 ii
->context
.stream_out (ob
);
4664 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4665 relevant to indirect inlining from IB. */
4668 ipa_read_indirect_edge_info (struct lto_input_block
*ib
,
4669 struct data_in
*data_in
,
4670 struct cgraph_edge
*cs
)
4672 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4673 struct bitpack_d bp
;
4675 ii
->param_index
= (int) streamer_read_hwi (ib
);
4676 bp
= streamer_read_bitpack (ib
);
4677 ii
->polymorphic
= bp_unpack_value (&bp
, 1);
4678 ii
->agg_contents
= bp_unpack_value (&bp
, 1);
4679 ii
->member_ptr
= bp_unpack_value (&bp
, 1);
4680 ii
->by_ref
= bp_unpack_value (&bp
, 1);
4681 ii
->vptr_changed
= bp_unpack_value (&bp
, 1);
4682 if (ii
->agg_contents
|| ii
->polymorphic
)
4683 ii
->offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4686 if (ii
->polymorphic
)
4688 ii
->otr_token
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4689 ii
->otr_type
= stream_read_tree (ib
, data_in
);
4690 ii
->context
.stream_in (ib
, data_in
);
4694 /* Stream out NODE info to OB. */
4697 ipa_write_node_info (struct output_block
*ob
, struct cgraph_node
*node
)
4700 lto_symtab_encoder_t encoder
;
4701 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
4703 struct cgraph_edge
*e
;
4704 struct bitpack_d bp
;
4706 encoder
= ob
->decl_state
->symtab_node_encoder
;
4707 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4708 streamer_write_uhwi (ob
, node_ref
);
4710 streamer_write_uhwi (ob
, ipa_get_param_count (info
));
4711 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4712 streamer_write_uhwi (ob
, ipa_get_param_move_cost (info
, j
));
4713 bp
= bitpack_create (ob
->main_stream
);
4714 gcc_assert (info
->analysis_done
4715 || ipa_get_param_count (info
) == 0);
4716 gcc_assert (!info
->node_enqueued
);
4717 gcc_assert (!info
->ipcp_orig_node
);
4718 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4719 bp_pack_value (&bp
, ipa_is_param_used (info
, j
), 1);
4720 streamer_write_bitpack (&bp
);
4721 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4722 streamer_write_hwi (ob
, ipa_get_controlled_uses (info
, j
));
4723 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4725 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4727 streamer_write_uhwi (ob
,
4728 ipa_get_cs_argument_count (args
) * 2
4729 + (args
->polymorphic_call_contexts
!= NULL
));
4730 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4732 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4733 if (args
->polymorphic_call_contexts
!= NULL
)
4734 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
4737 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4739 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4741 streamer_write_uhwi (ob
,
4742 ipa_get_cs_argument_count (args
) * 2
4743 + (args
->polymorphic_call_contexts
!= NULL
));
4744 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4746 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4747 if (args
->polymorphic_call_contexts
!= NULL
)
4748 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
4750 ipa_write_indirect_edge_info (ob
, e
);
4754 /* Stream in NODE info from IB. */
4757 ipa_read_node_info (struct lto_input_block
*ib
, struct cgraph_node
*node
,
4758 struct data_in
*data_in
)
4760 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
4762 struct cgraph_edge
*e
;
4763 struct bitpack_d bp
;
4765 ipa_alloc_node_params (node
, streamer_read_uhwi (ib
));
4767 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4768 info
->descriptors
[k
].move_cost
= streamer_read_uhwi (ib
);
4770 bp
= streamer_read_bitpack (ib
);
4771 if (ipa_get_param_count (info
) != 0)
4772 info
->analysis_done
= true;
4773 info
->node_enqueued
= false;
4774 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4775 ipa_set_param_used (info
, k
, bp_unpack_value (&bp
, 1));
4776 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4777 ipa_set_controlled_uses (info
, k
, streamer_read_hwi (ib
));
4778 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4780 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4781 int count
= streamer_read_uhwi (ib
);
4782 bool contexts_computed
= count
& 1;
4787 vec_safe_grow_cleared (args
->jump_functions
, count
);
4788 if (contexts_computed
)
4789 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
);
4791 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
4793 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4795 if (contexts_computed
)
4796 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in (ib
, data_in
);
4799 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4801 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4802 int count
= streamer_read_uhwi (ib
);
4803 bool contexts_computed
= count
& 1;
4808 vec_safe_grow_cleared (args
->jump_functions
, count
);
4809 if (contexts_computed
)
4810 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
);
4811 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
4813 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4815 if (contexts_computed
)
4816 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in (ib
, data_in
);
4819 ipa_read_indirect_edge_info (ib
, data_in
, e
);
4823 /* Write jump functions for nodes in SET. */
4826 ipa_prop_write_jump_functions (void)
4828 struct cgraph_node
*node
;
4829 struct output_block
*ob
;
4830 unsigned int count
= 0;
4831 lto_symtab_encoder_iterator lsei
;
4832 lto_symtab_encoder_t encoder
;
4834 if (!ipa_node_params_sum
)
4837 ob
= create_output_block (LTO_section_jump_functions
);
4838 encoder
= ob
->decl_state
->symtab_node_encoder
;
4840 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4841 lsei_next_function_in_partition (&lsei
))
4843 node
= lsei_cgraph_node (lsei
);
4844 if (node
->has_gimple_body_p ()
4845 && IPA_NODE_REF (node
) != NULL
)
4849 streamer_write_uhwi (ob
, count
);
4851 /* Process all of the functions. */
4852 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4853 lsei_next_function_in_partition (&lsei
))
4855 node
= lsei_cgraph_node (lsei
);
4856 if (node
->has_gimple_body_p ()
4857 && IPA_NODE_REF (node
) != NULL
)
4858 ipa_write_node_info (ob
, node
);
4860 streamer_write_char_stream (ob
->main_stream
, 0);
4861 produce_asm (ob
, NULL
);
4862 destroy_output_block (ob
);
4865 /* Read section in file FILE_DATA of length LEN with data DATA. */
4868 ipa_prop_read_section (struct lto_file_decl_data
*file_data
, const char *data
,
4871 const struct lto_function_header
*header
=
4872 (const struct lto_function_header
*) data
;
4873 const int cfg_offset
= sizeof (struct lto_function_header
);
4874 const int main_offset
= cfg_offset
+ header
->cfg_size
;
4875 const int string_offset
= main_offset
+ header
->main_size
;
4876 struct data_in
*data_in
;
4880 lto_input_block
ib_main ((const char *) data
+ main_offset
,
4881 header
->main_size
, file_data
->mode_table
);
4884 lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
4885 header
->string_size
, vNULL
);
4886 count
= streamer_read_uhwi (&ib_main
);
4888 for (i
= 0; i
< count
; i
++)
4891 struct cgraph_node
*node
;
4892 lto_symtab_encoder_t encoder
;
4894 index
= streamer_read_uhwi (&ib_main
);
4895 encoder
= file_data
->symtab_node_encoder
;
4896 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
4898 gcc_assert (node
->definition
);
4899 ipa_read_node_info (&ib_main
, node
, data_in
);
4901 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
4903 lto_data_in_delete (data_in
);
4906 /* Read ipcp jump functions. */
4909 ipa_prop_read_jump_functions (void)
4911 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
4912 struct lto_file_decl_data
*file_data
;
4915 ipa_check_create_node_params ();
4916 ipa_check_create_edge_args ();
4917 ipa_register_cgraph_hooks ();
4919 while ((file_data
= file_data_vec
[j
++]))
4922 const char *data
= lto_get_section_data (file_data
, LTO_section_jump_functions
, NULL
, &len
);
4925 ipa_prop_read_section (file_data
, data
, len
);
4929 /* After merging units, we can get mismatch in argument counts.
4930 Also decl merging might've rendered parameter lists obsolete.
4931 Also compute called_with_variable_arg info. */
4934 ipa_update_after_lto_read (void)
4936 ipa_check_create_node_params ();
4937 ipa_check_create_edge_args ();
4941 write_ipcp_transformation_info (output_block
*ob
, cgraph_node
*node
)
4944 unsigned int count
= 0;
4945 lto_symtab_encoder_t encoder
;
4946 struct ipa_agg_replacement_value
*aggvals
, *av
;
4948 aggvals
= ipa_get_agg_replacements_for_node (node
);
4949 encoder
= ob
->decl_state
->symtab_node_encoder
;
4950 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4951 streamer_write_uhwi (ob
, node_ref
);
4953 for (av
= aggvals
; av
; av
= av
->next
)
4955 streamer_write_uhwi (ob
, count
);
4957 for (av
= aggvals
; av
; av
= av
->next
)
4959 struct bitpack_d bp
;
4961 streamer_write_uhwi (ob
, av
->offset
);
4962 streamer_write_uhwi (ob
, av
->index
);
4963 stream_write_tree (ob
, av
->value
, true);
4965 bp
= bitpack_create (ob
->main_stream
);
4966 bp_pack_value (&bp
, av
->by_ref
, 1);
4967 streamer_write_bitpack (&bp
);
4970 ipcp_transformation_summary
*ts
= ipcp_get_transformation_summary (node
);
4971 if (ts
&& vec_safe_length (ts
->alignments
) > 0)
4973 count
= ts
->alignments
->length ();
4975 streamer_write_uhwi (ob
, count
);
4976 for (unsigned i
= 0; i
< count
; ++i
)
4978 ipa_alignment
*parm_al
= &(*ts
->alignments
)[i
];
4980 struct bitpack_d bp
;
4981 bp
= bitpack_create (ob
->main_stream
);
4982 bp_pack_value (&bp
, parm_al
->known
, 1);
4983 streamer_write_bitpack (&bp
);
4986 streamer_write_uhwi (ob
, parm_al
->align
);
4987 streamer_write_hwi_in_range (ob
->main_stream
, 0, parm_al
->align
,
4993 streamer_write_uhwi (ob
, 0);
4996 /* Stream in the aggregate value replacement chain for NODE from IB. */
4999 read_ipcp_transformation_info (lto_input_block
*ib
, cgraph_node
*node
,
5002 struct ipa_agg_replacement_value
*aggvals
= NULL
;
5003 unsigned int count
, i
;
5005 count
= streamer_read_uhwi (ib
);
5006 for (i
= 0; i
<count
; i
++)
5008 struct ipa_agg_replacement_value
*av
;
5009 struct bitpack_d bp
;
5011 av
= ggc_alloc
<ipa_agg_replacement_value
> ();
5012 av
->offset
= streamer_read_uhwi (ib
);
5013 av
->index
= streamer_read_uhwi (ib
);
5014 av
->value
= stream_read_tree (ib
, data_in
);
5015 bp
= streamer_read_bitpack (ib
);
5016 av
->by_ref
= bp_unpack_value (&bp
, 1);
5020 ipa_set_node_agg_value_chain (node
, aggvals
);
5022 count
= streamer_read_uhwi (ib
);
5025 ipcp_grow_transformations_if_necessary ();
5027 ipcp_transformation_summary
*ts
= ipcp_get_transformation_summary (node
);
5028 vec_safe_grow_cleared (ts
->alignments
, count
);
5030 for (i
= 0; i
< count
; i
++)
5032 ipa_alignment
*parm_al
;
5033 parm_al
= &(*ts
->alignments
)[i
];
5034 struct bitpack_d bp
;
5035 bp
= streamer_read_bitpack (ib
);
5036 parm_al
->known
= bp_unpack_value (&bp
, 1);
5039 parm_al
->align
= streamer_read_uhwi (ib
);
5041 = streamer_read_hwi_in_range (ib
, "ipa-prop misalign",
5048 /* Write all aggregate replacement for nodes in set. */
5051 ipcp_write_transformation_summaries (void)
5053 struct cgraph_node
*node
;
5054 struct output_block
*ob
;
5055 unsigned int count
= 0;
5056 lto_symtab_encoder_iterator lsei
;
5057 lto_symtab_encoder_t encoder
;
5059 ob
= create_output_block (LTO_section_ipcp_transform
);
5060 encoder
= ob
->decl_state
->symtab_node_encoder
;
5062 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5063 lsei_next_function_in_partition (&lsei
))
5065 node
= lsei_cgraph_node (lsei
);
5066 if (node
->has_gimple_body_p ())
5070 streamer_write_uhwi (ob
, count
);
5072 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5073 lsei_next_function_in_partition (&lsei
))
5075 node
= lsei_cgraph_node (lsei
);
5076 if (node
->has_gimple_body_p ())
5077 write_ipcp_transformation_info (ob
, node
);
5079 streamer_write_char_stream (ob
->main_stream
, 0);
5080 produce_asm (ob
, NULL
);
5081 destroy_output_block (ob
);
5084 /* Read replacements section in file FILE_DATA of length LEN with data
5088 read_replacements_section (struct lto_file_decl_data
*file_data
,
5092 const struct lto_function_header
*header
=
5093 (const struct lto_function_header
*) data
;
5094 const int cfg_offset
= sizeof (struct lto_function_header
);
5095 const int main_offset
= cfg_offset
+ header
->cfg_size
;
5096 const int string_offset
= main_offset
+ header
->main_size
;
5097 struct data_in
*data_in
;
5101 lto_input_block
ib_main ((const char *) data
+ main_offset
,
5102 header
->main_size
, file_data
->mode_table
);
5104 data_in
= lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
5105 header
->string_size
, vNULL
);
5106 count
= streamer_read_uhwi (&ib_main
);
5108 for (i
= 0; i
< count
; i
++)
5111 struct cgraph_node
*node
;
5112 lto_symtab_encoder_t encoder
;
5114 index
= streamer_read_uhwi (&ib_main
);
5115 encoder
= file_data
->symtab_node_encoder
;
5116 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
5118 gcc_assert (node
->definition
);
5119 read_ipcp_transformation_info (&ib_main
, node
, data_in
);
5121 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
5123 lto_data_in_delete (data_in
);
5126 /* Read IPA-CP aggregate replacements. */
5129 ipcp_read_transformation_summaries (void)
5131 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
5132 struct lto_file_decl_data
*file_data
;
5135 while ((file_data
= file_data_vec
[j
++]))
5138 const char *data
= lto_get_section_data (file_data
,
5139 LTO_section_ipcp_transform
,
5142 read_replacements_section (file_data
, data
, len
);
5146 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5150 adjust_agg_replacement_values (struct cgraph_node
*node
,
5151 struct ipa_agg_replacement_value
*aggval
)
5153 struct ipa_agg_replacement_value
*v
;
5154 int i
, c
= 0, d
= 0, *adj
;
5156 if (!node
->clone
.combined_args_to_skip
)
5159 for (v
= aggval
; v
; v
= v
->next
)
5161 gcc_assert (v
->index
>= 0);
5167 adj
= XALLOCAVEC (int, c
);
5168 for (i
= 0; i
< c
; i
++)
5169 if (bitmap_bit_p (node
->clone
.combined_args_to_skip
, i
))
5177 for (v
= aggval
; v
; v
= v
->next
)
5178 v
->index
= adj
[v
->index
];
5181 /* Dominator walker driving the ipcp modification phase. */
5183 class ipcp_modif_dom_walker
: public dom_walker
5186 ipcp_modif_dom_walker (struct func_body_info
*fbi
,
5187 vec
<ipa_param_descriptor
> descs
,
5188 struct ipa_agg_replacement_value
*av
,
5190 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
), m_descriptors (descs
),
5191 m_aggval (av
), m_something_changed (sc
), m_cfg_changed (cc
) {}
5193 virtual void before_dom_children (basic_block
);
5196 struct func_body_info
*m_fbi
;
5197 vec
<ipa_param_descriptor
> m_descriptors
;
5198 struct ipa_agg_replacement_value
*m_aggval
;
5199 bool *m_something_changed
, *m_cfg_changed
;
5203 ipcp_modif_dom_walker::before_dom_children (basic_block bb
)
5205 gimple_stmt_iterator gsi
;
5206 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5208 struct ipa_agg_replacement_value
*v
;
5209 gimple stmt
= gsi_stmt (gsi
);
5211 HOST_WIDE_INT offset
, size
;
5215 if (!gimple_assign_load_p (stmt
))
5217 rhs
= gimple_assign_rhs1 (stmt
);
5218 if (!is_gimple_reg_type (TREE_TYPE (rhs
)))
5223 while (handled_component_p (t
))
5225 /* V_C_E can do things like convert an array of integers to one
5226 bigger integer and similar things we do not handle below. */
5227 if (TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
)
5232 t
= TREE_OPERAND (t
, 0);
5237 if (!ipa_load_from_parm_agg_1 (m_fbi
, m_descriptors
, stmt
, rhs
, &index
,
5238 &offset
, &size
, &by_ref
))
5240 for (v
= m_aggval
; v
; v
= v
->next
)
5241 if (v
->index
== index
5242 && v
->offset
== offset
)
5245 || v
->by_ref
!= by_ref
5246 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v
->value
))) != size
)
5249 gcc_checking_assert (is_gimple_ip_invariant (v
->value
));
5250 if (!useless_type_conversion_p (TREE_TYPE (rhs
), TREE_TYPE (v
->value
)))
5252 if (fold_convertible_p (TREE_TYPE (rhs
), v
->value
))
5253 val
= fold_build1 (NOP_EXPR
, TREE_TYPE (rhs
), v
->value
);
5254 else if (TYPE_SIZE (TREE_TYPE (rhs
))
5255 == TYPE_SIZE (TREE_TYPE (v
->value
)))
5256 val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (rhs
), v
->value
);
5261 fprintf (dump_file
, " const ");
5262 print_generic_expr (dump_file
, v
->value
, 0);
5263 fprintf (dump_file
, " can't be converted to type of ");
5264 print_generic_expr (dump_file
, rhs
, 0);
5265 fprintf (dump_file
, "\n");
5273 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5275 fprintf (dump_file
, "Modifying stmt:\n ");
5276 print_gimple_stmt (dump_file
, stmt
, 0, 0);
5278 gimple_assign_set_rhs_from_tree (&gsi
, val
);
5281 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5283 fprintf (dump_file
, "into:\n ");
5284 print_gimple_stmt (dump_file
, stmt
, 0, 0);
5285 fprintf (dump_file
, "\n");
5288 *m_something_changed
= true;
5289 if (maybe_clean_eh_stmt (stmt
)
5290 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
5291 *m_cfg_changed
= true;
5296 /* Update alignment of formal parameters as described in
5297 ipcp_transformation_summary. */
5300 ipcp_update_alignments (struct cgraph_node
*node
)
5302 tree fndecl
= node
->decl
;
5303 tree parm
= DECL_ARGUMENTS (fndecl
);
5304 tree next_parm
= parm
;
5305 ipcp_transformation_summary
*ts
= ipcp_get_transformation_summary (node
);
5306 if (!ts
|| vec_safe_length (ts
->alignments
) == 0)
5308 const vec
<ipa_alignment
, va_gc
> &alignments
= *ts
->alignments
;
5309 unsigned count
= alignments
.length ();
5311 for (unsigned i
= 0; i
< count
; ++i
, parm
= next_parm
)
5313 if (node
->clone
.combined_args_to_skip
5314 && bitmap_bit_p (node
->clone
.combined_args_to_skip
, i
))
5316 gcc_checking_assert (parm
);
5317 next_parm
= DECL_CHAIN (parm
);
5319 if (!alignments
[i
].known
|| !is_gimple_reg (parm
))
5321 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
), parm
);
5326 fprintf (dump_file
, " Adjusting alignment of param %u to %u, "
5327 "misalignment to %u\n", i
, alignments
[i
].align
,
5328 alignments
[i
].misalign
);
5330 struct ptr_info_def
*pi
= get_ptr_info (ddef
);
5331 gcc_checking_assert (pi
);
5333 unsigned old_misalign
;
5334 bool old_known
= get_ptr_info_alignment (pi
, &old_align
, &old_misalign
);
5337 && old_align
>= alignments
[i
].align
)
5340 fprintf (dump_file
, " But the alignment was already %u.\n",
5344 set_ptr_info_alignment (pi
, alignments
[i
].align
, alignments
[i
].misalign
);
5348 /* IPCP transformation phase doing propagation of aggregate values. */
5351 ipcp_transform_function (struct cgraph_node
*node
)
5353 vec
<ipa_param_descriptor
> descriptors
= vNULL
;
5354 struct func_body_info fbi
;
5355 struct ipa_agg_replacement_value
*aggval
;
5357 bool cfg_changed
= false, something_changed
= false;
5359 gcc_checking_assert (cfun
);
5360 gcc_checking_assert (current_function_decl
);
5363 fprintf (dump_file
, "Modification phase of node %s/%i\n",
5364 node
->name (), node
->order
);
5366 ipcp_update_alignments (node
);
5367 aggval
= ipa_get_agg_replacements_for_node (node
);
5370 param_count
= count_formal_params (node
->decl
);
5371 if (param_count
== 0)
5373 adjust_agg_replacement_values (node
, aggval
);
5375 ipa_dump_agg_replacement_values (dump_file
, aggval
);
5379 fbi
.bb_infos
= vNULL
;
5380 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
));
5381 fbi
.param_count
= param_count
;
5384 descriptors
.safe_grow_cleared (param_count
);
5385 ipa_populate_param_decls (node
, descriptors
);
5386 calculate_dominance_info (CDI_DOMINATORS
);
5387 ipcp_modif_dom_walker (&fbi
, descriptors
, aggval
, &something_changed
,
5388 &cfg_changed
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5391 struct ipa_bb_info
*bi
;
5392 FOR_EACH_VEC_ELT (fbi
.bb_infos
, i
, bi
)
5393 free_ipa_bb_info (bi
);
5394 fbi
.bb_infos
.release ();
5395 free_dominance_info (CDI_DOMINATORS
);
5396 (*ipcp_transformations
)[node
->uid
].agg_values
= NULL
;
5397 (*ipcp_transformations
)[node
->uid
].alignments
= NULL
;
5398 descriptors
.release ();
5400 if (!something_changed
)
5402 else if (cfg_changed
)
5403 return TODO_update_ssa_only_virtuals
| TODO_cleanup_cfg
;
5405 return TODO_update_ssa_only_virtuals
;