1 /* Interprocedural analyses.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
26 #include "double-int.h"
34 #include "fold-const.h"
37 #include "hard-reg-set.h"
39 #include "dominance.h"
41 #include "basic-block.h"
42 #include "tree-ssa-alias.h"
43 #include "internal-fn.h"
44 #include "gimple-fold.h"
46 #include "gimple-expr.h"
52 #include "statistics.h"
54 #include "fixed-value.h"
55 #include "insn-config.h"
64 #include "stor-layout.h"
65 #include "print-tree.h"
67 #include "gimple-iterator.h"
68 #include "gimplify-me.h"
69 #include "gimple-walk.h"
70 #include "langhooks.h"
73 #include "plugin-api.h"
76 #include "alloc-pool.h"
77 #include "symbol-summary.h"
80 #include "gimple-ssa.h"
82 #include "tree-phinodes.h"
83 #include "ssa-iterators.h"
84 #include "tree-into-ssa.h"
86 #include "tree-pass.h"
87 #include "tree-inline.h"
88 #include "ipa-inline.h"
89 #include "diagnostic.h"
90 #include "gimple-pretty-print.h"
91 #include "lto-streamer.h"
92 #include "data-streamer.h"
93 #include "tree-streamer.h"
95 #include "ipa-utils.h"
96 #include "stringpool.h"
97 #include "tree-ssanames.h"
100 #include "builtins.h"
102 /* Intermediate information that we get from alias analysis about a particular
103 parameter in a particular basic_block. When a parameter or the memory it
104 references is marked modified, we use that information in all dominatd
105 blocks without cosulting alias analysis oracle. */
107 struct param_aa_status
109 /* Set when this structure contains meaningful information. If not, the
110 structure describing a dominating BB should be used instead. */
113 /* Whether we have seen something which might have modified the data in
114 question. PARM is for the parameter itself, REF is for data it points to
115 but using the alias type of individual accesses and PT is the same thing
116 but for computing aggregate pass-through functions using a very inclusive
118 bool parm_modified
, ref_modified
, pt_modified
;
121 /* Information related to a given BB that used only when looking at function
126 /* Call graph edges going out of this BB. */
127 vec
<cgraph_edge
*> cg_edges
;
128 /* Alias analysis statuses of each formal parameter at this bb. */
129 vec
<param_aa_status
> param_aa_statuses
;
132 /* Structure with global information that is only used when looking at function
135 struct func_body_info
137 /* The node that is being analyzed. */
141 struct ipa_node_params
*info
;
143 /* Information about individual BBs. */
144 vec
<ipa_bb_info
> bb_infos
;
146 /* Number of parameters. */
149 /* Number of statements already walked by when analyzing this function. */
150 unsigned int aa_walked
;
153 /* Function summary where the parameter infos are actually stored. */
154 ipa_node_params_t
*ipa_node_params_sum
= NULL
;
155 /* Vector of IPA-CP transformation data for each clone. */
156 vec
<ipcp_transformation_summary
, va_gc
> *ipcp_transformations
;
157 /* Vector where the parameter infos are actually stored. */
158 vec
<ipa_edge_args
, va_gc
> *ipa_edge_args_vector
;
160 /* Holders of ipa cgraph hooks: */
161 static struct cgraph_edge_hook_list
*edge_removal_hook_holder
;
162 static struct cgraph_2edge_hook_list
*edge_duplication_hook_holder
;
163 static struct cgraph_node_hook_list
*function_insertion_hook_holder
;
165 /* Description of a reference to an IPA constant. */
166 struct ipa_cst_ref_desc
168 /* Edge that corresponds to the statement which took the reference. */
169 struct cgraph_edge
*cs
;
170 /* Linked list of duplicates created when call graph edges are cloned. */
171 struct ipa_cst_ref_desc
*next_duplicate
;
172 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
173 if out of control. */
177 /* Allocation pool for reference descriptions. */
179 static alloc_pool ipa_refdesc_pool
;
181 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
182 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
185 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node
*node
)
187 tree fs_opts
= DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node
->decl
);
191 return !opt_for_fn (node
->decl
, optimize
) || !opt_for_fn (node
->decl
, flag_ipa_cp
);
194 /* Return index of the formal whose tree is PTREE in function which corresponds
198 ipa_get_param_decl_index_1 (vec
<ipa_param_descriptor
> descriptors
, tree ptree
)
202 count
= descriptors
.length ();
203 for (i
= 0; i
< count
; i
++)
204 if (descriptors
[i
].decl
== ptree
)
210 /* Return index of the formal whose tree is PTREE in function which corresponds
214 ipa_get_param_decl_index (struct ipa_node_params
*info
, tree ptree
)
216 return ipa_get_param_decl_index_1 (info
->descriptors
, ptree
);
219 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
223 ipa_populate_param_decls (struct cgraph_node
*node
,
224 vec
<ipa_param_descriptor
> &descriptors
)
232 gcc_assert (gimple_has_body_p (fndecl
));
233 fnargs
= DECL_ARGUMENTS (fndecl
);
235 for (parm
= fnargs
; parm
; parm
= DECL_CHAIN (parm
))
237 descriptors
[param_num
].decl
= parm
;
238 descriptors
[param_num
].move_cost
= estimate_move_cost (TREE_TYPE (parm
),
244 /* Return how many formal parameters FNDECL has. */
247 count_formal_params (tree fndecl
)
251 gcc_assert (gimple_has_body_p (fndecl
));
253 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
259 /* Return the declaration of Ith formal parameter of the function corresponding
260 to INFO. Note there is no setter function as this array is built just once
261 using ipa_initialize_node_params. */
264 ipa_dump_param (FILE *file
, struct ipa_node_params
*info
, int i
)
266 fprintf (file
, "param #%i", i
);
267 if (info
->descriptors
[i
].decl
)
270 print_generic_expr (file
, info
->descriptors
[i
].decl
, 0);
274 /* Initialize the ipa_node_params structure associated with NODE
275 to hold PARAM_COUNT parameters. */
278 ipa_alloc_node_params (struct cgraph_node
*node
, int param_count
)
280 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
282 if (!info
->descriptors
.exists () && param_count
)
283 info
->descriptors
.safe_grow_cleared (param_count
);
286 /* Initialize the ipa_node_params structure associated with NODE by counting
287 the function parameters, creating the descriptors and populating their
291 ipa_initialize_node_params (struct cgraph_node
*node
)
293 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
295 if (!info
->descriptors
.exists ())
297 ipa_alloc_node_params (node
, count_formal_params (node
->decl
));
298 ipa_populate_param_decls (node
, info
->descriptors
);
302 /* Print the jump functions associated with call graph edge CS to file F. */
305 ipa_print_node_jump_functions_for_edge (FILE *f
, struct cgraph_edge
*cs
)
309 count
= ipa_get_cs_argument_count (IPA_EDGE_REF (cs
));
310 for (i
= 0; i
< count
; i
++)
312 struct ipa_jump_func
*jump_func
;
313 enum jump_func_type type
;
315 jump_func
= ipa_get_ith_jump_func (IPA_EDGE_REF (cs
), i
);
316 type
= jump_func
->type
;
318 fprintf (f
, " param %d: ", i
);
319 if (type
== IPA_JF_UNKNOWN
)
320 fprintf (f
, "UNKNOWN\n");
321 else if (type
== IPA_JF_CONST
)
323 tree val
= jump_func
->value
.constant
.value
;
324 fprintf (f
, "CONST: ");
325 print_generic_expr (f
, val
, 0);
326 if (TREE_CODE (val
) == ADDR_EXPR
327 && TREE_CODE (TREE_OPERAND (val
, 0)) == CONST_DECL
)
330 print_generic_expr (f
, DECL_INITIAL (TREE_OPERAND (val
, 0)),
335 else if (type
== IPA_JF_PASS_THROUGH
)
337 fprintf (f
, "PASS THROUGH: ");
338 fprintf (f
, "%d, op %s",
339 jump_func
->value
.pass_through
.formal_id
,
340 get_tree_code_name(jump_func
->value
.pass_through
.operation
));
341 if (jump_func
->value
.pass_through
.operation
!= NOP_EXPR
)
344 print_generic_expr (f
,
345 jump_func
->value
.pass_through
.operand
, 0);
347 if (jump_func
->value
.pass_through
.agg_preserved
)
348 fprintf (f
, ", agg_preserved");
351 else if (type
== IPA_JF_ANCESTOR
)
353 fprintf (f
, "ANCESTOR: ");
354 fprintf (f
, "%d, offset "HOST_WIDE_INT_PRINT_DEC
,
355 jump_func
->value
.ancestor
.formal_id
,
356 jump_func
->value
.ancestor
.offset
);
357 if (jump_func
->value
.ancestor
.agg_preserved
)
358 fprintf (f
, ", agg_preserved");
362 if (jump_func
->agg
.items
)
364 struct ipa_agg_jf_item
*item
;
367 fprintf (f
, " Aggregate passed by %s:\n",
368 jump_func
->agg
.by_ref
? "reference" : "value");
369 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, j
, item
)
371 fprintf (f
, " offset: " HOST_WIDE_INT_PRINT_DEC
", ",
373 if (TYPE_P (item
->value
))
374 fprintf (f
, "clobber of " HOST_WIDE_INT_PRINT_DEC
" bits",
375 tree_to_uhwi (TYPE_SIZE (item
->value
)));
378 fprintf (f
, "cst: ");
379 print_generic_expr (f
, item
->value
, 0);
385 struct ipa_polymorphic_call_context
*ctx
386 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs
), i
);
387 if (ctx
&& !ctx
->useless_p ())
389 fprintf (f
, " Context: ");
390 ctx
->dump (dump_file
);
393 if (jump_func
->alignment
.known
)
395 fprintf (f
, " Alignment: %u, misalignment: %u\n",
396 jump_func
->alignment
.align
,
397 jump_func
->alignment
.misalign
);
400 fprintf (f
, " Unknown alignment\n");
405 /* Print the jump functions of all arguments on all call graph edges going from
409 ipa_print_node_jump_functions (FILE *f
, struct cgraph_node
*node
)
411 struct cgraph_edge
*cs
;
413 fprintf (f
, " Jump functions of caller %s/%i:\n", node
->name (),
415 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
417 if (!ipa_edge_args_info_available_for_edge_p (cs
))
420 fprintf (f
, " callsite %s/%i -> %s/%i : \n",
421 xstrdup_for_dump (node
->name ()), node
->order
,
422 xstrdup_for_dump (cs
->callee
->name ()),
424 ipa_print_node_jump_functions_for_edge (f
, cs
);
427 for (cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
429 struct cgraph_indirect_call_info
*ii
;
430 if (!ipa_edge_args_info_available_for_edge_p (cs
))
433 ii
= cs
->indirect_info
;
434 if (ii
->agg_contents
)
435 fprintf (f
, " indirect %s callsite, calling param %i, "
436 "offset " HOST_WIDE_INT_PRINT_DEC
", %s",
437 ii
->member_ptr
? "member ptr" : "aggregate",
438 ii
->param_index
, ii
->offset
,
439 ii
->by_ref
? "by reference" : "by_value");
441 fprintf (f
, " indirect %s callsite, calling param %i, "
442 "offset " HOST_WIDE_INT_PRINT_DEC
,
443 ii
->polymorphic
? "polymorphic" : "simple", ii
->param_index
,
448 fprintf (f
, ", for stmt ");
449 print_gimple_stmt (f
, cs
->call_stmt
, 0, TDF_SLIM
);
454 ii
->context
.dump (f
);
455 ipa_print_node_jump_functions_for_edge (f
, cs
);
459 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
462 ipa_print_all_jump_functions (FILE *f
)
464 struct cgraph_node
*node
;
466 fprintf (f
, "\nJump functions:\n");
467 FOR_EACH_FUNCTION (node
)
469 ipa_print_node_jump_functions (f
, node
);
473 /* Set jfunc to be a know-really nothing jump function. */
476 ipa_set_jf_unknown (struct ipa_jump_func
*jfunc
)
478 jfunc
->type
= IPA_JF_UNKNOWN
;
479 jfunc
->alignment
.known
= false;
482 /* Set JFUNC to be a copy of another jmp (to be used by jump function
483 combination code). The two functions will share their rdesc. */
486 ipa_set_jf_cst_copy (struct ipa_jump_func
*dst
,
487 struct ipa_jump_func
*src
)
490 gcc_checking_assert (src
->type
== IPA_JF_CONST
);
491 dst
->type
= IPA_JF_CONST
;
492 dst
->value
.constant
= src
->value
.constant
;
495 /* Set JFUNC to be a constant jmp function. */
498 ipa_set_jf_constant (struct ipa_jump_func
*jfunc
, tree constant
,
499 struct cgraph_edge
*cs
)
501 constant
= unshare_expr (constant
);
502 if (constant
&& EXPR_P (constant
))
503 SET_EXPR_LOCATION (constant
, UNKNOWN_LOCATION
);
504 jfunc
->type
= IPA_JF_CONST
;
505 jfunc
->value
.constant
.value
= unshare_expr_without_location (constant
);
507 if (TREE_CODE (constant
) == ADDR_EXPR
508 && TREE_CODE (TREE_OPERAND (constant
, 0)) == FUNCTION_DECL
)
510 struct ipa_cst_ref_desc
*rdesc
;
511 if (!ipa_refdesc_pool
)
512 ipa_refdesc_pool
= create_alloc_pool ("IPA-PROP ref descriptions",
513 sizeof (struct ipa_cst_ref_desc
), 32);
515 rdesc
= (struct ipa_cst_ref_desc
*) pool_alloc (ipa_refdesc_pool
);
517 rdesc
->next_duplicate
= NULL
;
519 jfunc
->value
.constant
.rdesc
= rdesc
;
522 jfunc
->value
.constant
.rdesc
= NULL
;
525 /* Set JFUNC to be a simple pass-through jump function. */
527 ipa_set_jf_simple_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
530 jfunc
->type
= IPA_JF_PASS_THROUGH
;
531 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
532 jfunc
->value
.pass_through
.formal_id
= formal_id
;
533 jfunc
->value
.pass_through
.operation
= NOP_EXPR
;
534 jfunc
->value
.pass_through
.agg_preserved
= agg_preserved
;
537 /* Set JFUNC to be an arithmetic pass through jump function. */
540 ipa_set_jf_arith_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
541 tree operand
, enum tree_code operation
)
543 jfunc
->type
= IPA_JF_PASS_THROUGH
;
544 jfunc
->value
.pass_through
.operand
= unshare_expr_without_location (operand
);
545 jfunc
->value
.pass_through
.formal_id
= formal_id
;
546 jfunc
->value
.pass_through
.operation
= operation
;
547 jfunc
->value
.pass_through
.agg_preserved
= false;
550 /* Set JFUNC to be an ancestor jump function. */
553 ipa_set_ancestor_jf (struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
,
554 int formal_id
, bool agg_preserved
)
556 jfunc
->type
= IPA_JF_ANCESTOR
;
557 jfunc
->value
.ancestor
.formal_id
= formal_id
;
558 jfunc
->value
.ancestor
.offset
= offset
;
559 jfunc
->value
.ancestor
.agg_preserved
= agg_preserved
;
562 /* Get IPA BB information about the given BB. FBI is the context of analyzis
563 of this function body. */
565 static struct ipa_bb_info
*
566 ipa_get_bb_info (struct func_body_info
*fbi
, basic_block bb
)
568 gcc_checking_assert (fbi
);
569 return &fbi
->bb_infos
[bb
->index
];
572 /* Structure to be passed in between detect_type_change and
573 check_stmt_for_type_change. */
575 struct prop_type_change_info
577 /* Offset into the object where there is the virtual method pointer we are
579 HOST_WIDE_INT offset
;
580 /* The declaration or SSA_NAME pointer of the base that we are checking for
583 /* Set to true if dynamic type change has been detected. */
584 bool type_maybe_changed
;
587 /* Return true if STMT can modify a virtual method table pointer.
589 This function makes special assumptions about both constructors and
590 destructors which are all the functions that are allowed to alter the VMT
591 pointers. It assumes that destructors begin with assignment into all VMT
592 pointers and that constructors essentially look in the following way:
594 1) The very first thing they do is that they call constructors of ancestor
595 sub-objects that have them.
597 2) Then VMT pointers of this and all its ancestors is set to new values
598 corresponding to the type corresponding to the constructor.
600 3) Only afterwards, other stuff such as constructor of member sub-objects
601 and the code written by the user is run. Only this may include calling
602 virtual functions, directly or indirectly.
604 There is no way to call a constructor of an ancestor sub-object in any
607 This means that we do not have to care whether constructors get the correct
608 type information because they will always change it (in fact, if we define
609 the type to be given by the VMT pointer, it is undefined).
611 The most important fact to derive from the above is that if, for some
612 statement in the section 3, we try to detect whether the dynamic type has
613 changed, we can safely ignore all calls as we examine the function body
614 backwards until we reach statements in section 2 because these calls cannot
615 be ancestor constructors or destructors (if the input is not bogus) and so
616 do not change the dynamic type (this holds true only for automatically
617 allocated objects but at the moment we devirtualize only these). We then
618 must detect that statements in section 2 change the dynamic type and can try
619 to derive the new type. That is enough and we can stop, we will never see
620 the calls into constructors of sub-objects in this code. Therefore we can
621 safely ignore all call statements that we traverse.
625 stmt_may_be_vtbl_ptr_store (gimple stmt
)
627 if (is_gimple_call (stmt
))
629 if (gimple_clobber_p (stmt
))
631 else if (is_gimple_assign (stmt
))
633 tree lhs
= gimple_assign_lhs (stmt
);
635 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs
)))
637 if (flag_strict_aliasing
638 && !POINTER_TYPE_P (TREE_TYPE (lhs
)))
641 if (TREE_CODE (lhs
) == COMPONENT_REF
642 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
644 /* In the future we might want to use get_base_ref_and_offset to find
645 if there is a field corresponding to the offset and if so, proceed
646 almost like if it was a component ref. */
652 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
653 to check whether a particular statement may modify the virtual table
654 pointerIt stores its result into DATA, which points to a
655 prop_type_change_info structure. */
658 check_stmt_for_type_change (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef
, void *data
)
660 gimple stmt
= SSA_NAME_DEF_STMT (vdef
);
661 struct prop_type_change_info
*tci
= (struct prop_type_change_info
*) data
;
663 if (stmt_may_be_vtbl_ptr_store (stmt
))
665 tci
->type_maybe_changed
= true;
672 /* See if ARG is PARAM_DECl describing instance passed by pointer
673 or reference in FUNCTION. Return false if the dynamic type may change
674 in between beggining of the function until CALL is invoked.
676 Generally functions are not allowed to change type of such instances,
677 but they call destructors. We assume that methods can not destroy the THIS
678 pointer. Also as a special cases, constructor and destructors may change
679 type of the THIS pointer. */
682 param_type_may_change_p (tree function
, tree arg
, gimple call
)
684 /* Pure functions can not do any changes on the dynamic type;
685 that require writting to memory. */
686 if (flags_from_decl_or_type (function
) & (ECF_PURE
| ECF_CONST
))
688 /* We need to check if we are within inlined consturctor
689 or destructor (ideally we would have way to check that the
690 inline cdtor is actually working on ARG, but we don't have
691 easy tie on this, so punt on all non-pure cdtors.
692 We may also record the types of cdtors and once we know type
693 of the instance match them.
695 Also code unification optimizations may merge calls from
696 different blocks making return values unreliable. So
697 do nothing during late optimization. */
698 if (DECL_STRUCT_FUNCTION (function
)->after_inlining
)
700 if (TREE_CODE (arg
) == SSA_NAME
701 && SSA_NAME_IS_DEFAULT_DEF (arg
)
702 && TREE_CODE (SSA_NAME_VAR (arg
)) == PARM_DECL
)
704 /* Normal (non-THIS) argument. */
705 if ((SSA_NAME_VAR (arg
) != DECL_ARGUMENTS (function
)
706 || TREE_CODE (TREE_TYPE (function
)) != METHOD_TYPE
)
707 /* THIS pointer of an method - here we we want to watch constructors
708 and destructors as those definitely may change the dynamic
710 || (TREE_CODE (TREE_TYPE (function
)) == METHOD_TYPE
711 && !DECL_CXX_CONSTRUCTOR_P (function
)
712 && !DECL_CXX_DESTRUCTOR_P (function
)
713 && (SSA_NAME_VAR (arg
) == DECL_ARGUMENTS (function
))))
715 /* Walk the inline stack and watch out for ctors/dtors. */
716 for (tree block
= gimple_block (call
); block
&& TREE_CODE (block
) == BLOCK
;
717 block
= BLOCK_SUPERCONTEXT (block
))
718 if (inlined_polymorphic_ctor_dtor_block_p (block
, false))
726 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
727 callsite CALL) by looking for assignments to its virtual table pointer. If
728 it is, return true and fill in the jump function JFUNC with relevant type
729 information or set it to unknown. ARG is the object itself (not a pointer
730 to it, unless dereferenced). BASE is the base of the memory access as
731 returned by get_ref_base_and_extent, as is the offset.
733 This is helper function for detect_type_change and detect_type_change_ssa
734 that does the heavy work which is usually unnecesary. */
737 detect_type_change_from_memory_writes (tree arg
, tree base
, tree comp_type
,
738 gcall
*call
, struct ipa_jump_func
*jfunc
,
739 HOST_WIDE_INT offset
)
741 struct prop_type_change_info tci
;
743 bool entry_reached
= false;
745 gcc_checking_assert (DECL_P (arg
)
746 || TREE_CODE (arg
) == MEM_REF
747 || handled_component_p (arg
));
749 comp_type
= TYPE_MAIN_VARIANT (comp_type
);
751 /* Const calls cannot call virtual methods through VMT and so type changes do
753 if (!flag_devirtualize
|| !gimple_vuse (call
)
754 /* Be sure expected_type is polymorphic. */
756 || TREE_CODE (comp_type
) != RECORD_TYPE
757 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))
758 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))))
761 ao_ref_init (&ao
, arg
);
764 ao
.size
= POINTER_SIZE
;
765 ao
.max_size
= ao
.size
;
768 tci
.object
= get_base_address (arg
);
769 tci
.type_maybe_changed
= false;
771 walk_aliased_vdefs (&ao
, gimple_vuse (call
), check_stmt_for_type_change
,
772 &tci
, NULL
, &entry_reached
);
773 if (!tci
.type_maybe_changed
)
776 ipa_set_jf_unknown (jfunc
);
780 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
781 If it is, return true and fill in the jump function JFUNC with relevant type
782 information or set it to unknown. ARG is the object itself (not a pointer
783 to it, unless dereferenced). BASE is the base of the memory access as
784 returned by get_ref_base_and_extent, as is the offset. */
787 detect_type_change (tree arg
, tree base
, tree comp_type
, gcall
*call
,
788 struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
)
790 if (!flag_devirtualize
)
793 if (TREE_CODE (base
) == MEM_REF
794 && !param_type_may_change_p (current_function_decl
,
795 TREE_OPERAND (base
, 0),
798 return detect_type_change_from_memory_writes (arg
, base
, comp_type
,
799 call
, jfunc
, offset
);
802 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
803 SSA name (its dereference will become the base and the offset is assumed to
807 detect_type_change_ssa (tree arg
, tree comp_type
,
808 gcall
*call
, struct ipa_jump_func
*jfunc
)
810 gcc_checking_assert (TREE_CODE (arg
) == SSA_NAME
);
811 if (!flag_devirtualize
812 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
815 if (!param_type_may_change_p (current_function_decl
, arg
, call
))
818 arg
= build2 (MEM_REF
, ptr_type_node
, arg
,
819 build_int_cst (ptr_type_node
, 0));
821 return detect_type_change_from_memory_writes (arg
, arg
, comp_type
,
825 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
826 boolean variable pointed to by DATA. */
829 mark_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
832 bool *b
= (bool *) data
;
837 /* Return true if we have already walked so many statements in AA that we
838 should really just start giving up. */
841 aa_overwalked (struct func_body_info
*fbi
)
843 gcc_checking_assert (fbi
);
844 return fbi
->aa_walked
> (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS
);
847 /* Find the nearest valid aa status for parameter specified by INDEX that
850 static struct param_aa_status
*
851 find_dominating_aa_status (struct func_body_info
*fbi
, basic_block bb
,
856 bb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
859 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
860 if (!bi
->param_aa_statuses
.is_empty ()
861 && bi
->param_aa_statuses
[index
].valid
)
862 return &bi
->param_aa_statuses
[index
];
866 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
867 structures and/or intialize the result with a dominating description as
870 static struct param_aa_status
*
871 parm_bb_aa_status_for_bb (struct func_body_info
*fbi
, basic_block bb
,
874 gcc_checking_assert (fbi
);
875 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
876 if (bi
->param_aa_statuses
.is_empty ())
877 bi
->param_aa_statuses
.safe_grow_cleared (fbi
->param_count
);
878 struct param_aa_status
*paa
= &bi
->param_aa_statuses
[index
];
881 gcc_checking_assert (!paa
->parm_modified
882 && !paa
->ref_modified
883 && !paa
->pt_modified
);
884 struct param_aa_status
*dom_paa
;
885 dom_paa
= find_dominating_aa_status (fbi
, bb
, index
);
895 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
896 a value known not to be modified in this function before reaching the
897 statement STMT. FBI holds information about the function we have so far
898 gathered but do not survive the summary building stage. */
901 parm_preserved_before_stmt_p (struct func_body_info
*fbi
, int index
,
902 gimple stmt
, tree parm_load
)
904 struct param_aa_status
*paa
;
905 bool modified
= false;
908 /* FIXME: FBI can be NULL if we are being called from outside
909 ipa_node_analysis or ipcp_transform_function, which currently happens
910 during inlining analysis. It would be great to extend fbi's lifetime and
911 always have it. Currently, we are just not afraid of too much walking in
915 if (aa_overwalked (fbi
))
917 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
918 if (paa
->parm_modified
)
924 gcc_checking_assert (gimple_vuse (stmt
) != NULL_TREE
);
925 ao_ref_init (&refd
, parm_load
);
926 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
929 fbi
->aa_walked
+= walked
;
931 paa
->parm_modified
= true;
935 /* If STMT is an assignment that loads a value from an parameter declaration,
936 return the index of the parameter in ipa_node_params which has not been
937 modified. Otherwise return -1. */
940 load_from_unmodified_param (struct func_body_info
*fbi
,
941 vec
<ipa_param_descriptor
> descriptors
,
947 if (!gimple_assign_single_p (stmt
))
950 op1
= gimple_assign_rhs1 (stmt
);
951 if (TREE_CODE (op1
) != PARM_DECL
)
954 index
= ipa_get_param_decl_index_1 (descriptors
, op1
);
956 || !parm_preserved_before_stmt_p (fbi
, index
, stmt
, op1
))
962 /* Return true if memory reference REF (which must be a load through parameter
963 with INDEX) loads data that are known to be unmodified in this function
964 before reaching statement STMT. */
967 parm_ref_data_preserved_p (struct func_body_info
*fbi
,
968 int index
, gimple stmt
, tree ref
)
970 struct param_aa_status
*paa
;
971 bool modified
= false;
974 /* FIXME: FBI can be NULL if we are being called from outside
975 ipa_node_analysis or ipcp_transform_function, which currently happens
976 during inlining analysis. It would be great to extend fbi's lifetime and
977 always have it. Currently, we are just not afraid of too much walking in
981 if (aa_overwalked (fbi
))
983 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
984 if (paa
->ref_modified
)
990 gcc_checking_assert (gimple_vuse (stmt
));
991 ao_ref_init (&refd
, ref
);
992 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
995 fbi
->aa_walked
+= walked
;
997 paa
->ref_modified
= true;
1001 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1002 is known to be unmodified in this function before reaching call statement
1003 CALL into which it is passed. FBI describes the function body. */
1006 parm_ref_data_pass_through_p (struct func_body_info
*fbi
, int index
,
1007 gimple call
, tree parm
)
1009 bool modified
= false;
1012 /* It's unnecessary to calculate anything about memory contnets for a const
1013 function because it is not goin to use it. But do not cache the result
1014 either. Also, no such calculations for non-pointers. */
1015 if (!gimple_vuse (call
)
1016 || !POINTER_TYPE_P (TREE_TYPE (parm
))
1017 || aa_overwalked (fbi
))
1020 struct param_aa_status
*paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (call
),
1022 if (paa
->pt_modified
)
1025 ao_ref_init_from_ptr_and_size (&refd
, parm
, NULL_TREE
);
1026 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (call
), mark_modified
,
1028 fbi
->aa_walked
+= walked
;
1030 paa
->pt_modified
= true;
1034 /* Return true if we can prove that OP is a memory reference loading unmodified
1035 data from an aggregate passed as a parameter and if the aggregate is passed
1036 by reference, that the alias type of the load corresponds to the type of the
1037 formal parameter (so that we can rely on this type for TBAA in callers).
1038 INFO and PARMS_AINFO describe parameters of the current function (but the
1039 latter can be NULL), STMT is the load statement. If function returns true,
1040 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1041 within the aggregate and whether it is a load from a value passed by
1042 reference respectively. */
1045 ipa_load_from_parm_agg_1 (struct func_body_info
*fbi
,
1046 vec
<ipa_param_descriptor
> descriptors
,
1047 gimple stmt
, tree op
, int *index_p
,
1048 HOST_WIDE_INT
*offset_p
, HOST_WIDE_INT
*size_p
,
1052 HOST_WIDE_INT size
, max_size
;
1053 tree base
= get_ref_base_and_extent (op
, offset_p
, &size
, &max_size
);
1055 if (max_size
== -1 || max_size
!= size
|| *offset_p
< 0)
1060 int index
= ipa_get_param_decl_index_1 (descriptors
, base
);
1062 && parm_preserved_before_stmt_p (fbi
, index
, stmt
, op
))
1073 if (TREE_CODE (base
) != MEM_REF
1074 || TREE_CODE (TREE_OPERAND (base
, 0)) != SSA_NAME
1075 || !integer_zerop (TREE_OPERAND (base
, 1)))
1078 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base
, 0)))
1080 tree parm
= SSA_NAME_VAR (TREE_OPERAND (base
, 0));
1081 index
= ipa_get_param_decl_index_1 (descriptors
, parm
);
1085 /* This branch catches situations where a pointer parameter is not a
1086 gimple register, for example:
1088 void hip7(S*) (struct S * p)
1090 void (*<T2e4>) (struct S *) D.1867;
1095 D.1867_2 = p.1_1->f;
1100 gimple def
= SSA_NAME_DEF_STMT (TREE_OPERAND (base
, 0));
1101 index
= load_from_unmodified_param (fbi
, descriptors
, def
);
1105 && parm_ref_data_preserved_p (fbi
, index
, stmt
, op
))
1116 /* Just like the previous function, just without the param_analysis_info
1117 pointer, for users outside of this file. */
1120 ipa_load_from_parm_agg (struct ipa_node_params
*info
, gimple stmt
,
1121 tree op
, int *index_p
, HOST_WIDE_INT
*offset_p
,
1124 return ipa_load_from_parm_agg_1 (NULL
, info
->descriptors
, stmt
, op
, index_p
,
1125 offset_p
, NULL
, by_ref_p
);
1128 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1129 of an assignment statement STMT, try to determine whether we are actually
1130 handling any of the following cases and construct an appropriate jump
1131 function into JFUNC if so:
1133 1) The passed value is loaded from a formal parameter which is not a gimple
1134 register (most probably because it is addressable, the value has to be
1135 scalar) and we can guarantee the value has not changed. This case can
1136 therefore be described by a simple pass-through jump function. For example:
1145 2) The passed value can be described by a simple arithmetic pass-through
1152 D.2064_4 = a.1(D) + 4;
1155 This case can also occur in combination of the previous one, e.g.:
1163 D.2064_4 = a.0_3 + 4;
1166 3) The passed value is an address of an object within another one (which
1167 also passed by reference). Such situations are described by an ancestor
1168 jump function and describe situations such as:
1170 B::foo() (struct B * const this)
1174 D.1845_2 = &this_1(D)->D.1748;
1177 INFO is the structure describing individual parameters access different
1178 stages of IPA optimizations. PARMS_AINFO contains the information that is
1179 only needed for intraprocedural analysis. */
1182 compute_complex_assign_jump_func (struct func_body_info
*fbi
,
1183 struct ipa_node_params
*info
,
1184 struct ipa_jump_func
*jfunc
,
1185 gcall
*call
, gimple stmt
, tree name
,
1188 HOST_WIDE_INT offset
, size
, max_size
;
1189 tree op1
, tc_ssa
, base
, ssa
;
1192 op1
= gimple_assign_rhs1 (stmt
);
1194 if (TREE_CODE (op1
) == SSA_NAME
)
1196 if (SSA_NAME_IS_DEFAULT_DEF (op1
))
1197 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (op1
));
1199 index
= load_from_unmodified_param (fbi
, info
->descriptors
,
1200 SSA_NAME_DEF_STMT (op1
));
1205 index
= load_from_unmodified_param (fbi
, info
->descriptors
, stmt
);
1206 tc_ssa
= gimple_assign_lhs (stmt
);
1211 tree op2
= gimple_assign_rhs2 (stmt
);
1215 if (!is_gimple_ip_invariant (op2
)
1216 || (TREE_CODE_CLASS (gimple_expr_code (stmt
)) != tcc_comparison
1217 && !useless_type_conversion_p (TREE_TYPE (name
),
1221 ipa_set_jf_arith_pass_through (jfunc
, index
, op2
,
1222 gimple_assign_rhs_code (stmt
));
1224 else if (gimple_assign_single_p (stmt
))
1226 bool agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
, tc_ssa
);
1227 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1232 if (TREE_CODE (op1
) != ADDR_EXPR
)
1234 op1
= TREE_OPERAND (op1
, 0);
1235 if (TREE_CODE (TREE_TYPE (op1
)) != RECORD_TYPE
)
1237 base
= get_ref_base_and_extent (op1
, &offset
, &size
, &max_size
);
1238 if (TREE_CODE (base
) != MEM_REF
1239 /* If this is a varying address, punt. */
1241 || max_size
!= size
)
1243 offset
+= mem_ref_offset (base
).to_short_addr () * BITS_PER_UNIT
;
1244 ssa
= TREE_OPERAND (base
, 0);
1245 if (TREE_CODE (ssa
) != SSA_NAME
1246 || !SSA_NAME_IS_DEFAULT_DEF (ssa
)
1250 /* Dynamic types are changed in constructors and destructors. */
1251 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (ssa
));
1252 if (index
>= 0 && param_type
&& POINTER_TYPE_P (param_type
))
1253 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1254 parm_ref_data_pass_through_p (fbi
, index
, call
, ssa
));
1257 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1260 iftmp.1_3 = &obj_2(D)->D.1762;
1262 The base of the MEM_REF must be a default definition SSA NAME of a
1263 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1264 whole MEM_REF expression is returned and the offset calculated from any
1265 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1266 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1269 get_ancestor_addr_info (gimple assign
, tree
*obj_p
, HOST_WIDE_INT
*offset
)
1271 HOST_WIDE_INT size
, max_size
;
1272 tree expr
, parm
, obj
;
1274 if (!gimple_assign_single_p (assign
))
1276 expr
= gimple_assign_rhs1 (assign
);
1278 if (TREE_CODE (expr
) != ADDR_EXPR
)
1280 expr
= TREE_OPERAND (expr
, 0);
1282 expr
= get_ref_base_and_extent (expr
, offset
, &size
, &max_size
);
1284 if (TREE_CODE (expr
) != MEM_REF
1285 /* If this is a varying address, punt. */
1290 parm
= TREE_OPERAND (expr
, 0);
1291 if (TREE_CODE (parm
) != SSA_NAME
1292 || !SSA_NAME_IS_DEFAULT_DEF (parm
)
1293 || TREE_CODE (SSA_NAME_VAR (parm
)) != PARM_DECL
)
1296 *offset
+= mem_ref_offset (expr
).to_short_addr () * BITS_PER_UNIT
;
1302 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1303 statement PHI, try to find out whether NAME is in fact a
1304 multiple-inheritance typecast from a descendant into an ancestor of a formal
1305 parameter and thus can be described by an ancestor jump function and if so,
1306 write the appropriate function into JFUNC.
1308 Essentially we want to match the following pattern:
1316 iftmp.1_3 = &obj_2(D)->D.1762;
1319 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1320 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1324 compute_complex_ancestor_jump_func (struct func_body_info
*fbi
,
1325 struct ipa_node_params
*info
,
1326 struct ipa_jump_func
*jfunc
,
1327 gcall
*call
, gphi
*phi
)
1329 HOST_WIDE_INT offset
;
1330 gimple assign
, cond
;
1331 basic_block phi_bb
, assign_bb
, cond_bb
;
1332 tree tmp
, parm
, expr
, obj
;
1335 if (gimple_phi_num_args (phi
) != 2)
1338 if (integer_zerop (PHI_ARG_DEF (phi
, 1)))
1339 tmp
= PHI_ARG_DEF (phi
, 0);
1340 else if (integer_zerop (PHI_ARG_DEF (phi
, 0)))
1341 tmp
= PHI_ARG_DEF (phi
, 1);
1344 if (TREE_CODE (tmp
) != SSA_NAME
1345 || SSA_NAME_IS_DEFAULT_DEF (tmp
)
1346 || !POINTER_TYPE_P (TREE_TYPE (tmp
))
1347 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp
))) != RECORD_TYPE
)
1350 assign
= SSA_NAME_DEF_STMT (tmp
);
1351 assign_bb
= gimple_bb (assign
);
1352 if (!single_pred_p (assign_bb
))
1354 expr
= get_ancestor_addr_info (assign
, &obj
, &offset
);
1357 parm
= TREE_OPERAND (expr
, 0);
1358 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (parm
));
1362 cond_bb
= single_pred (assign_bb
);
1363 cond
= last_stmt (cond_bb
);
1365 || gimple_code (cond
) != GIMPLE_COND
1366 || gimple_cond_code (cond
) != NE_EXPR
1367 || gimple_cond_lhs (cond
) != parm
1368 || !integer_zerop (gimple_cond_rhs (cond
)))
1371 phi_bb
= gimple_bb (phi
);
1372 for (i
= 0; i
< 2; i
++)
1374 basic_block pred
= EDGE_PRED (phi_bb
, i
)->src
;
1375 if (pred
!= assign_bb
&& pred
!= cond_bb
)
1379 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1380 parm_ref_data_pass_through_p (fbi
, index
, call
, parm
));
1383 /* Inspect the given TYPE and return true iff it has the same structure (the
1384 same number of fields of the same types) as a C++ member pointer. If
1385 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1386 corresponding fields there. */
1389 type_like_member_ptr_p (tree type
, tree
*method_ptr
, tree
*delta
)
1393 if (TREE_CODE (type
) != RECORD_TYPE
)
1396 fld
= TYPE_FIELDS (type
);
1397 if (!fld
|| !POINTER_TYPE_P (TREE_TYPE (fld
))
1398 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld
))) != METHOD_TYPE
1399 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1405 fld
= DECL_CHAIN (fld
);
1406 if (!fld
|| INTEGRAL_TYPE_P (fld
)
1407 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1412 if (DECL_CHAIN (fld
))
1418 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1419 return the rhs of its defining statement. Otherwise return RHS as it
1423 get_ssa_def_if_simple_copy (tree rhs
)
1425 while (TREE_CODE (rhs
) == SSA_NAME
&& !SSA_NAME_IS_DEFAULT_DEF (rhs
))
1427 gimple def_stmt
= SSA_NAME_DEF_STMT (rhs
);
1429 if (gimple_assign_single_p (def_stmt
))
1430 rhs
= gimple_assign_rhs1 (def_stmt
);
1437 /* Simple linked list, describing known contents of an aggregate beforere
1440 struct ipa_known_agg_contents_list
1442 /* Offset and size of the described part of the aggregate. */
1443 HOST_WIDE_INT offset
, size
;
1444 /* Known constant value or NULL if the contents is known to be unknown. */
1446 /* Pointer to the next structure in the list. */
1447 struct ipa_known_agg_contents_list
*next
;
1450 /* Find the proper place in linked list of ipa_known_agg_contents_list
1451 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1452 unless there is a partial overlap, in which case return NULL, or such
1453 element is already there, in which case set *ALREADY_THERE to true. */
1455 static struct ipa_known_agg_contents_list
**
1456 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list
**list
,
1457 HOST_WIDE_INT lhs_offset
,
1458 HOST_WIDE_INT lhs_size
,
1459 bool *already_there
)
1461 struct ipa_known_agg_contents_list
**p
= list
;
1462 while (*p
&& (*p
)->offset
< lhs_offset
)
1464 if ((*p
)->offset
+ (*p
)->size
> lhs_offset
)
1469 if (*p
&& (*p
)->offset
< lhs_offset
+ lhs_size
)
1471 if ((*p
)->offset
== lhs_offset
&& (*p
)->size
== lhs_size
)
1472 /* We already know this value is subsequently overwritten with
1474 *already_there
= true;
1476 /* Otherwise this is a partial overlap which we cannot
1483 /* Build aggregate jump function from LIST, assuming there are exactly
1484 CONST_COUNT constant entries there and that th offset of the passed argument
1485 is ARG_OFFSET and store it into JFUNC. */
1488 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list
*list
,
1489 int const_count
, HOST_WIDE_INT arg_offset
,
1490 struct ipa_jump_func
*jfunc
)
1492 vec_alloc (jfunc
->agg
.items
, const_count
);
1497 struct ipa_agg_jf_item item
;
1498 item
.offset
= list
->offset
- arg_offset
;
1499 gcc_assert ((item
.offset
% BITS_PER_UNIT
) == 0);
1500 item
.value
= unshare_expr_without_location (list
->constant
);
1501 jfunc
->agg
.items
->quick_push (item
);
1507 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1508 in ARG is filled in with constant values. ARG can either be an aggregate
1509 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1510 aggregate. JFUNC is the jump function into which the constants are
1511 subsequently stored. */
1514 determine_locally_known_aggregate_parts (gcall
*call
, tree arg
,
1516 struct ipa_jump_func
*jfunc
)
1518 struct ipa_known_agg_contents_list
*list
= NULL
;
1519 int item_count
= 0, const_count
= 0;
1520 HOST_WIDE_INT arg_offset
, arg_size
;
1521 gimple_stmt_iterator gsi
;
1523 bool check_ref
, by_ref
;
1526 /* The function operates in three stages. First, we prepare check_ref, r,
1527 arg_base and arg_offset based on what is actually passed as an actual
1530 if (POINTER_TYPE_P (arg_type
))
1533 if (TREE_CODE (arg
) == SSA_NAME
)
1536 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type
))))
1541 type_size
= TYPE_SIZE (TREE_TYPE (arg_type
));
1542 arg_size
= tree_to_uhwi (type_size
);
1543 ao_ref_init_from_ptr_and_size (&r
, arg_base
, NULL_TREE
);
1545 else if (TREE_CODE (arg
) == ADDR_EXPR
)
1547 HOST_WIDE_INT arg_max_size
;
1549 arg
= TREE_OPERAND (arg
, 0);
1550 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1552 if (arg_max_size
== -1
1553 || arg_max_size
!= arg_size
1556 if (DECL_P (arg_base
))
1559 ao_ref_init (&r
, arg_base
);
1569 HOST_WIDE_INT arg_max_size
;
1571 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg
)));
1575 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1577 if (arg_max_size
== -1
1578 || arg_max_size
!= arg_size
1582 ao_ref_init (&r
, arg
);
1585 /* Second stage walks back the BB, looks at individual statements and as long
1586 as it is confident of how the statements affect contents of the
1587 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1589 gsi
= gsi_for_stmt (call
);
1591 for (; !gsi_end_p (gsi
); gsi_prev (&gsi
))
1593 struct ipa_known_agg_contents_list
*n
, **p
;
1594 gimple stmt
= gsi_stmt (gsi
);
1595 HOST_WIDE_INT lhs_offset
, lhs_size
, lhs_max_size
;
1596 tree lhs
, rhs
, lhs_base
;
1598 if (!stmt_may_clobber_ref_p_1 (stmt
, &r
))
1600 if (!gimple_assign_single_p (stmt
))
1603 lhs
= gimple_assign_lhs (stmt
);
1604 rhs
= gimple_assign_rhs1 (stmt
);
1605 if (!is_gimple_reg_type (TREE_TYPE (rhs
))
1606 || TREE_CODE (lhs
) == BIT_FIELD_REF
1607 || contains_bitfld_component_ref_p (lhs
))
1610 lhs_base
= get_ref_base_and_extent (lhs
, &lhs_offset
, &lhs_size
,
1612 if (lhs_max_size
== -1
1613 || lhs_max_size
!= lhs_size
)
1618 if (TREE_CODE (lhs_base
) != MEM_REF
1619 || TREE_OPERAND (lhs_base
, 0) != arg_base
1620 || !integer_zerop (TREE_OPERAND (lhs_base
, 1)))
1623 else if (lhs_base
!= arg_base
)
1625 if (DECL_P (lhs_base
))
1631 bool already_there
= false;
1632 p
= get_place_in_agg_contents_list (&list
, lhs_offset
, lhs_size
,
1639 rhs
= get_ssa_def_if_simple_copy (rhs
);
1640 n
= XALLOCA (struct ipa_known_agg_contents_list
);
1642 n
->offset
= lhs_offset
;
1643 if (is_gimple_ip_invariant (rhs
))
1649 n
->constant
= NULL_TREE
;
1654 if (const_count
== PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
)
1655 || item_count
== 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
))
1659 /* Third stage just goes over the list and creates an appropriate vector of
1660 ipa_agg_jf_item structures out of it, of sourse only if there are
1661 any known constants to begin with. */
1665 jfunc
->agg
.by_ref
= by_ref
;
1666 build_agg_jump_func_from_list (list
, const_count
, arg_offset
, jfunc
);
1671 ipa_get_callee_param_type (struct cgraph_edge
*e
, int i
)
1674 tree type
= (e
->callee
1675 ? TREE_TYPE (e
->callee
->decl
)
1676 : gimple_call_fntype (e
->call_stmt
));
1677 tree t
= TYPE_ARG_TYPES (type
);
1679 for (n
= 0; n
< i
; n
++)
1686 return TREE_VALUE (t
);
1689 t
= DECL_ARGUMENTS (e
->callee
->decl
);
1690 for (n
= 0; n
< i
; n
++)
1697 return TREE_TYPE (t
);
1701 /* Compute jump function for all arguments of callsite CS and insert the
1702 information in the jump_functions array in the ipa_edge_args corresponding
1703 to this callsite. */
1706 ipa_compute_jump_functions_for_edge (struct func_body_info
*fbi
,
1707 struct cgraph_edge
*cs
)
1709 struct ipa_node_params
*info
= IPA_NODE_REF (cs
->caller
);
1710 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
1711 gcall
*call
= cs
->call_stmt
;
1712 int n
, arg_num
= gimple_call_num_args (call
);
1713 bool useful_context
= false;
1715 if (arg_num
== 0 || args
->jump_functions
)
1717 vec_safe_grow_cleared (args
->jump_functions
, arg_num
);
1718 if (flag_devirtualize
)
1719 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, arg_num
);
1721 if (gimple_call_internal_p (call
))
1723 if (ipa_func_spec_opts_forbid_analysis_p (cs
->caller
))
1726 for (n
= 0; n
< arg_num
; n
++)
1728 struct ipa_jump_func
*jfunc
= ipa_get_ith_jump_func (args
, n
);
1729 tree arg
= gimple_call_arg (call
, n
);
1730 tree param_type
= ipa_get_callee_param_type (cs
, n
);
1731 if (flag_devirtualize
&& POINTER_TYPE_P (TREE_TYPE (arg
)))
1734 struct ipa_polymorphic_call_context
context (cs
->caller
->decl
,
1737 context
.get_dynamic_type (instance
, arg
, NULL
, cs
->call_stmt
);
1738 *ipa_get_ith_polymorhic_call_context (args
, n
) = context
;
1739 if (!context
.useless_p ())
1740 useful_context
= true;
1743 if (POINTER_TYPE_P (TREE_TYPE(arg
)))
1745 unsigned HOST_WIDE_INT hwi_bitpos
;
1748 if (get_pointer_alignment_1 (arg
, &align
, &hwi_bitpos
)
1749 && align
% BITS_PER_UNIT
== 0
1750 && hwi_bitpos
% BITS_PER_UNIT
== 0)
1752 jfunc
->alignment
.known
= true;
1753 jfunc
->alignment
.align
= align
/ BITS_PER_UNIT
;
1754 jfunc
->alignment
.misalign
= hwi_bitpos
/ BITS_PER_UNIT
;
1757 gcc_assert (!jfunc
->alignment
.known
);
1760 gcc_assert (!jfunc
->alignment
.known
);
1762 if (is_gimple_ip_invariant (arg
))
1763 ipa_set_jf_constant (jfunc
, arg
, cs
);
1764 else if (!is_gimple_reg_type (TREE_TYPE (arg
))
1765 && TREE_CODE (arg
) == PARM_DECL
)
1767 int index
= ipa_get_param_decl_index (info
, arg
);
1769 gcc_assert (index
>=0);
1770 /* Aggregate passed by value, check for pass-through, otherwise we
1771 will attempt to fill in aggregate contents later in this
1773 if (parm_preserved_before_stmt_p (fbi
, index
, call
, arg
))
1775 ipa_set_jf_simple_pass_through (jfunc
, index
, false);
1779 else if (TREE_CODE (arg
) == SSA_NAME
)
1781 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
1783 int index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (arg
));
1787 agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
, arg
);
1788 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1793 gimple stmt
= SSA_NAME_DEF_STMT (arg
);
1794 if (is_gimple_assign (stmt
))
1795 compute_complex_assign_jump_func (fbi
, info
, jfunc
,
1796 call
, stmt
, arg
, param_type
);
1797 else if (gimple_code (stmt
) == GIMPLE_PHI
)
1798 compute_complex_ancestor_jump_func (fbi
, info
, jfunc
,
1800 as_a
<gphi
*> (stmt
));
1804 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1805 passed (because type conversions are ignored in gimple). Usually we can
1806 safely get type from function declaration, but in case of K&R prototypes or
1807 variadic functions we can try our luck with type of the pointer passed.
1808 TODO: Since we look for actual initialization of the memory object, we may better
1809 work out the type based on the memory stores we find. */
1811 param_type
= TREE_TYPE (arg
);
1813 if ((jfunc
->type
!= IPA_JF_PASS_THROUGH
1814 || !ipa_get_jf_pass_through_agg_preserved (jfunc
))
1815 && (jfunc
->type
!= IPA_JF_ANCESTOR
1816 || !ipa_get_jf_ancestor_agg_preserved (jfunc
))
1817 && (AGGREGATE_TYPE_P (TREE_TYPE (arg
))
1818 || POINTER_TYPE_P (param_type
)))
1819 determine_locally_known_aggregate_parts (call
, arg
, param_type
, jfunc
);
1821 if (!useful_context
)
1822 vec_free (args
->polymorphic_call_contexts
);
1825 /* Compute jump functions for all edges - both direct and indirect - outgoing
1829 ipa_compute_jump_functions_for_bb (struct func_body_info
*fbi
, basic_block bb
)
1831 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
1833 struct cgraph_edge
*cs
;
1835 FOR_EACH_VEC_ELT_REVERSE (bi
->cg_edges
, i
, cs
)
1837 struct cgraph_node
*callee
= cs
->callee
;
1841 callee
->ultimate_alias_target ();
1842 /* We do not need to bother analyzing calls to unknown functions
1843 unless they may become known during lto/whopr. */
1844 if (!callee
->definition
&& !flag_lto
)
1847 ipa_compute_jump_functions_for_edge (fbi
, cs
);
1851 /* If STMT looks like a statement loading a value from a member pointer formal
1852 parameter, return that parameter and store the offset of the field to
1853 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1854 might be clobbered). If USE_DELTA, then we look for a use of the delta
1855 field rather than the pfn. */
1858 ipa_get_stmt_member_ptr_load_param (gimple stmt
, bool use_delta
,
1859 HOST_WIDE_INT
*offset_p
)
1861 tree rhs
, rec
, ref_field
, ref_offset
, fld
, ptr_field
, delta_field
;
1863 if (!gimple_assign_single_p (stmt
))
1866 rhs
= gimple_assign_rhs1 (stmt
);
1867 if (TREE_CODE (rhs
) == COMPONENT_REF
)
1869 ref_field
= TREE_OPERAND (rhs
, 1);
1870 rhs
= TREE_OPERAND (rhs
, 0);
1873 ref_field
= NULL_TREE
;
1874 if (TREE_CODE (rhs
) != MEM_REF
)
1876 rec
= TREE_OPERAND (rhs
, 0);
1877 if (TREE_CODE (rec
) != ADDR_EXPR
)
1879 rec
= TREE_OPERAND (rec
, 0);
1880 if (TREE_CODE (rec
) != PARM_DECL
1881 || !type_like_member_ptr_p (TREE_TYPE (rec
), &ptr_field
, &delta_field
))
1883 ref_offset
= TREE_OPERAND (rhs
, 1);
1890 *offset_p
= int_bit_position (fld
);
1894 if (integer_nonzerop (ref_offset
))
1896 return ref_field
== fld
? rec
: NULL_TREE
;
1899 return tree_int_cst_equal (byte_position (fld
), ref_offset
) ? rec
1903 /* Returns true iff T is an SSA_NAME defined by a statement. */
1906 ipa_is_ssa_with_stmt_def (tree t
)
1908 if (TREE_CODE (t
) == SSA_NAME
1909 && !SSA_NAME_IS_DEFAULT_DEF (t
))
1915 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1916 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1917 indirect call graph edge. */
1919 static struct cgraph_edge
*
1920 ipa_note_param_call (struct cgraph_node
*node
, int param_index
,
1923 struct cgraph_edge
*cs
;
1925 cs
= node
->get_edge (stmt
);
1926 cs
->indirect_info
->param_index
= param_index
;
1927 cs
->indirect_info
->agg_contents
= 0;
1928 cs
->indirect_info
->member_ptr
= 0;
1932 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1933 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1934 intermediate information about each formal parameter. Currently it checks
1935 whether the call calls a pointer that is a formal parameter and if so, the
1936 parameter is marked with the called flag and an indirect call graph edge
1937 describing the call is created. This is very simple for ordinary pointers
1938 represented in SSA but not-so-nice when it comes to member pointers. The
1939 ugly part of this function does nothing more than trying to match the
1940 pattern of such a call. An example of such a pattern is the gimple dump
1941 below, the call is on the last line:
1944 f$__delta_5 = f.__delta;
1945 f$__pfn_24 = f.__pfn;
1949 f$__delta_5 = MEM[(struct *)&f];
1950 f$__pfn_24 = MEM[(struct *)&f + 4B];
1952 and a few lines below:
1955 D.2496_3 = (int) f$__pfn_24;
1956 D.2497_4 = D.2496_3 & 1;
1963 D.2500_7 = (unsigned int) f$__delta_5;
1964 D.2501_8 = &S + D.2500_7;
1965 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1966 D.2503_10 = *D.2502_9;
1967 D.2504_12 = f$__pfn_24 + -1;
1968 D.2505_13 = (unsigned int) D.2504_12;
1969 D.2506_14 = D.2503_10 + D.2505_13;
1970 D.2507_15 = *D.2506_14;
1971 iftmp.11_16 = (String:: *) D.2507_15;
1974 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1975 D.2500_19 = (unsigned int) f$__delta_5;
1976 D.2508_20 = &S + D.2500_19;
1977 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1979 Such patterns are results of simple calls to a member pointer:
1981 int doprinting (int (MyString::* f)(int) const)
1983 MyString S ("somestring");
1988 Moreover, the function also looks for called pointers loaded from aggregates
1989 passed by value or reference. */
1992 ipa_analyze_indirect_call_uses (struct func_body_info
*fbi
, gcall
*call
,
1995 struct ipa_node_params
*info
= fbi
->info
;
1996 HOST_WIDE_INT offset
;
1999 if (SSA_NAME_IS_DEFAULT_DEF (target
))
2001 tree var
= SSA_NAME_VAR (target
);
2002 int index
= ipa_get_param_decl_index (info
, var
);
2004 ipa_note_param_call (fbi
->node
, index
, call
);
2009 gimple def
= SSA_NAME_DEF_STMT (target
);
2010 if (gimple_assign_single_p (def
)
2011 && ipa_load_from_parm_agg_1 (fbi
, info
->descriptors
, def
,
2012 gimple_assign_rhs1 (def
), &index
, &offset
,
2015 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2016 cs
->indirect_info
->offset
= offset
;
2017 cs
->indirect_info
->agg_contents
= 1;
2018 cs
->indirect_info
->by_ref
= by_ref
;
2022 /* Now we need to try to match the complex pattern of calling a member
2024 if (gimple_code (def
) != GIMPLE_PHI
2025 || gimple_phi_num_args (def
) != 2
2026 || !POINTER_TYPE_P (TREE_TYPE (target
))
2027 || TREE_CODE (TREE_TYPE (TREE_TYPE (target
))) != METHOD_TYPE
)
2030 /* First, we need to check whether one of these is a load from a member
2031 pointer that is a parameter to this function. */
2032 tree n1
= PHI_ARG_DEF (def
, 0);
2033 tree n2
= PHI_ARG_DEF (def
, 1);
2034 if (!ipa_is_ssa_with_stmt_def (n1
) || !ipa_is_ssa_with_stmt_def (n2
))
2036 gimple d1
= SSA_NAME_DEF_STMT (n1
);
2037 gimple d2
= SSA_NAME_DEF_STMT (n2
);
2040 basic_block bb
, virt_bb
;
2041 basic_block join
= gimple_bb (def
);
2042 if ((rec
= ipa_get_stmt_member_ptr_load_param (d1
, false, &offset
)))
2044 if (ipa_get_stmt_member_ptr_load_param (d2
, false, NULL
))
2047 bb
= EDGE_PRED (join
, 0)->src
;
2048 virt_bb
= gimple_bb (d2
);
2050 else if ((rec
= ipa_get_stmt_member_ptr_load_param (d2
, false, &offset
)))
2052 bb
= EDGE_PRED (join
, 1)->src
;
2053 virt_bb
= gimple_bb (d1
);
2058 /* Second, we need to check that the basic blocks are laid out in the way
2059 corresponding to the pattern. */
2061 if (!single_pred_p (virt_bb
) || !single_succ_p (virt_bb
)
2062 || single_pred (virt_bb
) != bb
2063 || single_succ (virt_bb
) != join
)
2066 /* Third, let's see that the branching is done depending on the least
2067 significant bit of the pfn. */
2069 gimple branch
= last_stmt (bb
);
2070 if (!branch
|| gimple_code (branch
) != GIMPLE_COND
)
2073 if ((gimple_cond_code (branch
) != NE_EXPR
2074 && gimple_cond_code (branch
) != EQ_EXPR
)
2075 || !integer_zerop (gimple_cond_rhs (branch
)))
2078 tree cond
= gimple_cond_lhs (branch
);
2079 if (!ipa_is_ssa_with_stmt_def (cond
))
2082 def
= SSA_NAME_DEF_STMT (cond
);
2083 if (!is_gimple_assign (def
)
2084 || gimple_assign_rhs_code (def
) != BIT_AND_EXPR
2085 || !integer_onep (gimple_assign_rhs2 (def
)))
2088 cond
= gimple_assign_rhs1 (def
);
2089 if (!ipa_is_ssa_with_stmt_def (cond
))
2092 def
= SSA_NAME_DEF_STMT (cond
);
2094 if (is_gimple_assign (def
)
2095 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
2097 cond
= gimple_assign_rhs1 (def
);
2098 if (!ipa_is_ssa_with_stmt_def (cond
))
2100 def
= SSA_NAME_DEF_STMT (cond
);
2104 rec2
= ipa_get_stmt_member_ptr_load_param (def
,
2105 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2106 == ptrmemfunc_vbit_in_delta
),
2111 index
= ipa_get_param_decl_index (info
, rec
);
2113 && parm_preserved_before_stmt_p (fbi
, index
, call
, rec
))
2115 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2116 cs
->indirect_info
->offset
= offset
;
2117 cs
->indirect_info
->agg_contents
= 1;
2118 cs
->indirect_info
->member_ptr
= 1;
2124 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2125 object referenced in the expression is a formal parameter of the caller
2126 FBI->node (described by FBI->info), create a call note for the
2130 ipa_analyze_virtual_call_uses (struct func_body_info
*fbi
,
2131 gcall
*call
, tree target
)
2133 tree obj
= OBJ_TYPE_REF_OBJECT (target
);
2135 HOST_WIDE_INT anc_offset
;
2137 if (!flag_devirtualize
)
2140 if (TREE_CODE (obj
) != SSA_NAME
)
2143 struct ipa_node_params
*info
= fbi
->info
;
2144 if (SSA_NAME_IS_DEFAULT_DEF (obj
))
2146 struct ipa_jump_func jfunc
;
2147 if (TREE_CODE (SSA_NAME_VAR (obj
)) != PARM_DECL
)
2151 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (obj
));
2152 gcc_assert (index
>= 0);
2153 if (detect_type_change_ssa (obj
, obj_type_ref_class (target
),
2159 struct ipa_jump_func jfunc
;
2160 gimple stmt
= SSA_NAME_DEF_STMT (obj
);
2163 expr
= get_ancestor_addr_info (stmt
, &obj
, &anc_offset
);
2166 index
= ipa_get_param_decl_index (info
,
2167 SSA_NAME_VAR (TREE_OPERAND (expr
, 0)));
2168 gcc_assert (index
>= 0);
2169 if (detect_type_change (obj
, expr
, obj_type_ref_class (target
),
2170 call
, &jfunc
, anc_offset
))
2174 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2175 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
2176 ii
->offset
= anc_offset
;
2177 ii
->otr_token
= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target
));
2178 ii
->otr_type
= obj_type_ref_class (target
);
2179 ii
->polymorphic
= 1;
2182 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2183 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2184 containing intermediate information about each formal parameter. */
2187 ipa_analyze_call_uses (struct func_body_info
*fbi
, gcall
*call
)
2189 tree target
= gimple_call_fn (call
);
2192 || (TREE_CODE (target
) != SSA_NAME
2193 && !virtual_method_call_p (target
)))
2196 struct cgraph_edge
*cs
= fbi
->node
->get_edge (call
);
2197 /* If we previously turned the call into a direct call, there is
2198 no need to analyze. */
2199 if (cs
&& !cs
->indirect_unknown_callee
)
2202 if (cs
->indirect_info
->polymorphic
&& flag_devirtualize
)
2205 tree target
= gimple_call_fn (call
);
2206 ipa_polymorphic_call_context
context (current_function_decl
,
2207 target
, call
, &instance
);
2209 gcc_checking_assert (cs
->indirect_info
->otr_type
2210 == obj_type_ref_class (target
));
2211 gcc_checking_assert (cs
->indirect_info
->otr_token
2212 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target
)));
2214 cs
->indirect_info
->vptr_changed
2215 = !context
.get_dynamic_type (instance
,
2216 OBJ_TYPE_REF_OBJECT (target
),
2217 obj_type_ref_class (target
), call
);
2218 cs
->indirect_info
->context
= context
;
2221 if (TREE_CODE (target
) == SSA_NAME
)
2222 ipa_analyze_indirect_call_uses (fbi
, call
, target
);
2223 else if (virtual_method_call_p (target
))
2224 ipa_analyze_virtual_call_uses (fbi
, call
, target
);
2228 /* Analyze the call statement STMT with respect to formal parameters (described
2229 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2230 formal parameters are called. */
2233 ipa_analyze_stmt_uses (struct func_body_info
*fbi
, gimple stmt
)
2235 if (is_gimple_call (stmt
))
2236 ipa_analyze_call_uses (fbi
, as_a
<gcall
*> (stmt
));
2239 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2240 If OP is a parameter declaration, mark it as used in the info structure
2244 visit_ref_for_mod_analysis (gimple
, tree op
, tree
, void *data
)
2246 struct ipa_node_params
*info
= (struct ipa_node_params
*) data
;
2248 op
= get_base_address (op
);
2250 && TREE_CODE (op
) == PARM_DECL
)
2252 int index
= ipa_get_param_decl_index (info
, op
);
2253 gcc_assert (index
>= 0);
2254 ipa_set_param_used (info
, index
, true);
2260 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2261 the findings in various structures of the associated ipa_node_params
2262 structure, such as parameter flags, notes etc. FBI holds various data about
2263 the function being analyzed. */
2266 ipa_analyze_params_uses_in_bb (struct func_body_info
*fbi
, basic_block bb
)
2268 gimple_stmt_iterator gsi
;
2269 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2271 gimple stmt
= gsi_stmt (gsi
);
2273 if (is_gimple_debug (stmt
))
2276 ipa_analyze_stmt_uses (fbi
, stmt
);
2277 walk_stmt_load_store_addr_ops (stmt
, fbi
->info
,
2278 visit_ref_for_mod_analysis
,
2279 visit_ref_for_mod_analysis
,
2280 visit_ref_for_mod_analysis
);
2282 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2283 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), fbi
->info
,
2284 visit_ref_for_mod_analysis
,
2285 visit_ref_for_mod_analysis
,
2286 visit_ref_for_mod_analysis
);
2289 /* Calculate controlled uses of parameters of NODE. */
2292 ipa_analyze_controlled_uses (struct cgraph_node
*node
)
2294 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
2296 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2298 tree parm
= ipa_get_param (info
, i
);
2299 int controlled_uses
= 0;
2301 /* For SSA regs see if parameter is used. For non-SSA we compute
2302 the flag during modification analysis. */
2303 if (is_gimple_reg (parm
))
2305 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
),
2307 if (ddef
&& !has_zero_uses (ddef
))
2309 imm_use_iterator imm_iter
;
2310 use_operand_p use_p
;
2312 ipa_set_param_used (info
, i
, true);
2313 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, ddef
)
2314 if (!is_gimple_call (USE_STMT (use_p
)))
2316 if (!is_gimple_debug (USE_STMT (use_p
)))
2318 controlled_uses
= IPA_UNDESCRIBED_USE
;
2326 controlled_uses
= 0;
2329 controlled_uses
= IPA_UNDESCRIBED_USE
;
2330 ipa_set_controlled_uses (info
, i
, controlled_uses
);
2334 /* Free stuff in BI. */
2337 free_ipa_bb_info (struct ipa_bb_info
*bi
)
2339 bi
->cg_edges
.release ();
2340 bi
->param_aa_statuses
.release ();
2343 /* Dominator walker driving the analysis. */
2345 class analysis_dom_walker
: public dom_walker
2348 analysis_dom_walker (struct func_body_info
*fbi
)
2349 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
) {}
2351 virtual void before_dom_children (basic_block
);
2354 struct func_body_info
*m_fbi
;
2358 analysis_dom_walker::before_dom_children (basic_block bb
)
2360 ipa_analyze_params_uses_in_bb (m_fbi
, bb
);
2361 ipa_compute_jump_functions_for_bb (m_fbi
, bb
);
2364 /* Initialize the array describing properties of of formal parameters
2365 of NODE, analyze their uses and compute jump functions associated
2366 with actual arguments of calls from within NODE. */
2369 ipa_analyze_node (struct cgraph_node
*node
)
2371 struct func_body_info fbi
;
2372 struct ipa_node_params
*info
;
2374 ipa_check_create_node_params ();
2375 ipa_check_create_edge_args ();
2376 info
= IPA_NODE_REF (node
);
2378 if (info
->analysis_done
)
2380 info
->analysis_done
= 1;
2382 if (ipa_func_spec_opts_forbid_analysis_p (node
))
2384 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2386 ipa_set_param_used (info
, i
, true);
2387 ipa_set_controlled_uses (info
, i
, IPA_UNDESCRIBED_USE
);
2392 struct function
*func
= DECL_STRUCT_FUNCTION (node
->decl
);
2394 calculate_dominance_info (CDI_DOMINATORS
);
2395 ipa_initialize_node_params (node
);
2396 ipa_analyze_controlled_uses (node
);
2399 fbi
.info
= IPA_NODE_REF (node
);
2400 fbi
.bb_infos
= vNULL
;
2401 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
));
2402 fbi
.param_count
= ipa_get_param_count (info
);
2405 for (struct cgraph_edge
*cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
2407 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
2408 bi
->cg_edges
.safe_push (cs
);
2411 for (struct cgraph_edge
*cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
2413 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
2414 bi
->cg_edges
.safe_push (cs
);
2417 analysis_dom_walker (&fbi
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
2420 struct ipa_bb_info
*bi
;
2421 FOR_EACH_VEC_ELT (fbi
.bb_infos
, i
, bi
)
2422 free_ipa_bb_info (bi
);
2423 fbi
.bb_infos
.release ();
2424 free_dominance_info (CDI_DOMINATORS
);
2428 /* Update the jump functions associated with call graph edge E when the call
2429 graph edge CS is being inlined, assuming that E->caller is already (possibly
2430 indirectly) inlined into CS->callee and that E has not been inlined. */
2433 update_jump_functions_after_inlining (struct cgraph_edge
*cs
,
2434 struct cgraph_edge
*e
)
2436 struct ipa_edge_args
*top
= IPA_EDGE_REF (cs
);
2437 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
2438 int count
= ipa_get_cs_argument_count (args
);
2441 for (i
= 0; i
< count
; i
++)
2443 struct ipa_jump_func
*dst
= ipa_get_ith_jump_func (args
, i
);
2444 struct ipa_polymorphic_call_context
*dst_ctx
2445 = ipa_get_ith_polymorhic_call_context (args
, i
);
2447 if (dst
->type
== IPA_JF_ANCESTOR
)
2449 struct ipa_jump_func
*src
;
2450 int dst_fid
= dst
->value
.ancestor
.formal_id
;
2451 struct ipa_polymorphic_call_context
*src_ctx
2452 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
2454 /* Variable number of arguments can cause havoc if we try to access
2455 one that does not exist in the inlined edge. So make sure we
2457 if (dst_fid
>= ipa_get_cs_argument_count (top
))
2459 ipa_set_jf_unknown (dst
);
2463 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2465 if (src_ctx
&& !src_ctx
->useless_p ())
2467 struct ipa_polymorphic_call_context ctx
= *src_ctx
;
2469 /* TODO: Make type preserved safe WRT contexts. */
2470 if (!ipa_get_jf_ancestor_type_preserved (dst
))
2471 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
2472 ctx
.offset_by (dst
->value
.ancestor
.offset
);
2473 if (!ctx
.useless_p ())
2475 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
2477 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
2479 dst_ctx
->combine_with (ctx
);
2483 && (dst
->value
.ancestor
.agg_preserved
|| !src
->agg
.by_ref
))
2485 struct ipa_agg_jf_item
*item
;
2488 /* Currently we do not produce clobber aggregate jump functions,
2489 replace with merging when we do. */
2490 gcc_assert (!dst
->agg
.items
);
2492 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2493 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2494 FOR_EACH_VEC_SAFE_ELT (dst
->agg
.items
, j
, item
)
2495 item
->offset
-= dst
->value
.ancestor
.offset
;
2498 if (src
->type
== IPA_JF_PASS_THROUGH
2499 && src
->value
.pass_through
.operation
== NOP_EXPR
)
2501 dst
->value
.ancestor
.formal_id
= src
->value
.pass_through
.formal_id
;
2502 dst
->value
.ancestor
.agg_preserved
&=
2503 src
->value
.pass_through
.agg_preserved
;
2505 else if (src
->type
== IPA_JF_ANCESTOR
)
2507 dst
->value
.ancestor
.formal_id
= src
->value
.ancestor
.formal_id
;
2508 dst
->value
.ancestor
.offset
+= src
->value
.ancestor
.offset
;
2509 dst
->value
.ancestor
.agg_preserved
&=
2510 src
->value
.ancestor
.agg_preserved
;
2513 ipa_set_jf_unknown (dst
);
2515 else if (dst
->type
== IPA_JF_PASS_THROUGH
)
2517 struct ipa_jump_func
*src
;
2518 /* We must check range due to calls with variable number of arguments
2519 and we cannot combine jump functions with operations. */
2520 if (dst
->value
.pass_through
.operation
== NOP_EXPR
2521 && (dst
->value
.pass_through
.formal_id
2522 < ipa_get_cs_argument_count (top
)))
2524 int dst_fid
= dst
->value
.pass_through
.formal_id
;
2525 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2526 bool dst_agg_p
= ipa_get_jf_pass_through_agg_preserved (dst
);
2527 struct ipa_polymorphic_call_context
*src_ctx
2528 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
2530 if (src_ctx
&& !src_ctx
->useless_p ())
2532 struct ipa_polymorphic_call_context ctx
= *src_ctx
;
2534 /* TODO: Make type preserved safe WRT contexts. */
2535 if (!ipa_get_jf_pass_through_type_preserved (dst
))
2536 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
2537 if (!ctx
.useless_p ())
2541 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
2543 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
2545 dst_ctx
->combine_with (ctx
);
2550 case IPA_JF_UNKNOWN
:
2551 ipa_set_jf_unknown (dst
);
2554 ipa_set_jf_cst_copy (dst
, src
);
2557 case IPA_JF_PASS_THROUGH
:
2559 int formal_id
= ipa_get_jf_pass_through_formal_id (src
);
2560 enum tree_code operation
;
2561 operation
= ipa_get_jf_pass_through_operation (src
);
2563 if (operation
== NOP_EXPR
)
2567 && ipa_get_jf_pass_through_agg_preserved (src
);
2568 ipa_set_jf_simple_pass_through (dst
, formal_id
, agg_p
);
2572 tree operand
= ipa_get_jf_pass_through_operand (src
);
2573 ipa_set_jf_arith_pass_through (dst
, formal_id
, operand
,
2578 case IPA_JF_ANCESTOR
:
2582 && ipa_get_jf_ancestor_agg_preserved (src
);
2583 ipa_set_ancestor_jf (dst
,
2584 ipa_get_jf_ancestor_offset (src
),
2585 ipa_get_jf_ancestor_formal_id (src
),
2594 && (dst_agg_p
|| !src
->agg
.by_ref
))
2596 /* Currently we do not produce clobber aggregate jump
2597 functions, replace with merging when we do. */
2598 gcc_assert (!dst
->agg
.items
);
2600 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2601 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2605 ipa_set_jf_unknown (dst
);
2610 /* If TARGET is an addr_expr of a function declaration, make it the
2611 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2612 Otherwise, return NULL. */
2614 struct cgraph_edge
*
2615 ipa_make_edge_direct_to_target (struct cgraph_edge
*ie
, tree target
,
2618 struct cgraph_node
*callee
;
2619 struct inline_edge_summary
*es
= inline_edge_summary (ie
);
2620 bool unreachable
= false;
2622 if (TREE_CODE (target
) == ADDR_EXPR
)
2623 target
= TREE_OPERAND (target
, 0);
2624 if (TREE_CODE (target
) != FUNCTION_DECL
)
2626 target
= canonicalize_constructor_val (target
, NULL
);
2627 if (!target
|| TREE_CODE (target
) != FUNCTION_DECL
)
2629 if (ie
->indirect_info
->member_ptr
)
2630 /* Member pointer call that goes through a VMT lookup. */
2633 if (dump_enabled_p ())
2635 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2636 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2637 "discovered direct call to non-function in %s/%i, "
2638 "making it __builtin_unreachable\n",
2639 ie
->caller
->name (), ie
->caller
->order
);
2642 target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2643 callee
= cgraph_node::get_create (target
);
2647 callee
= cgraph_node::get (target
);
2650 callee
= cgraph_node::get (target
);
2652 /* Because may-edges are not explicitely represented and vtable may be external,
2653 we may create the first reference to the object in the unit. */
2654 if (!callee
|| callee
->global
.inlined_to
)
2657 /* We are better to ensure we can refer to it.
2658 In the case of static functions we are out of luck, since we already
2659 removed its body. In the case of public functions we may or may
2660 not introduce the reference. */
2661 if (!canonicalize_constructor_val (target
, NULL
)
2662 || !TREE_PUBLIC (target
))
2665 fprintf (dump_file
, "ipa-prop: Discovered call to a known target "
2666 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2667 xstrdup_for_dump (ie
->caller
->name ()),
2669 xstrdup_for_dump (ie
->callee
->name ()),
2673 callee
= cgraph_node::get_create (target
);
2676 /* If the edge is already speculated. */
2677 if (speculative
&& ie
->speculative
)
2679 struct cgraph_edge
*e2
;
2680 struct ipa_ref
*ref
;
2681 ie
->speculative_call_info (e2
, ie
, ref
);
2682 if (e2
->callee
->ultimate_alias_target ()
2683 != callee
->ultimate_alias_target ())
2686 fprintf (dump_file
, "ipa-prop: Discovered call to a speculative target "
2687 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2688 xstrdup_for_dump (ie
->caller
->name ()),
2690 xstrdup_for_dump (callee
->name ()),
2692 xstrdup_for_dump (e2
->callee
->name ()),
2698 fprintf (dump_file
, "ipa-prop: Discovered call to a speculative target "
2699 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2700 xstrdup_for_dump (ie
->caller
->name ()),
2702 xstrdup_for_dump (callee
->name ()),
2708 if (!dbg_cnt (devirt
))
2711 ipa_check_create_node_params ();
2713 /* We can not make edges to inline clones. It is bug that someone removed
2714 the cgraph node too early. */
2715 gcc_assert (!callee
->global
.inlined_to
);
2717 if (dump_file
&& !unreachable
)
2719 fprintf (dump_file
, "ipa-prop: Discovered %s call to a %s target "
2720 "(%s/%i -> %s/%i), for stmt ",
2721 ie
->indirect_info
->polymorphic
? "a virtual" : "an indirect",
2722 speculative
? "speculative" : "known",
2723 xstrdup_for_dump (ie
->caller
->name ()),
2725 xstrdup_for_dump (callee
->name ()),
2728 print_gimple_stmt (dump_file
, ie
->call_stmt
, 2, TDF_SLIM
);
2730 fprintf (dump_file
, "with uid %i\n", ie
->lto_stmt_uid
);
2732 if (dump_enabled_p ())
2734 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2736 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2737 "converting indirect call in %s to direct call to %s\n",
2738 ie
->caller
->name (), callee
->name ());
2742 struct cgraph_edge
*orig
= ie
;
2743 ie
= ie
->make_direct (callee
);
2744 /* If we resolved speculative edge the cost is already up to date
2745 for direct call (adjusted by inline_edge_duplication_hook). */
2748 es
= inline_edge_summary (ie
);
2749 es
->call_stmt_size
-= (eni_size_weights
.indirect_call_cost
2750 - eni_size_weights
.call_cost
);
2751 es
->call_stmt_time
-= (eni_time_weights
.indirect_call_cost
2752 - eni_time_weights
.call_cost
);
2757 if (!callee
->can_be_discarded_p ())
2760 alias
= dyn_cast
<cgraph_node
*> (callee
->noninterposable_alias ());
2764 /* make_speculative will update ie's cost to direct call cost. */
2765 ie
= ie
->make_speculative
2766 (callee
, ie
->count
* 8 / 10, ie
->frequency
* 8 / 10);
2772 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2773 return NULL if there is not any. BY_REF specifies whether the value has to
2774 be passed by reference or by value. */
2777 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function
*agg
,
2778 HOST_WIDE_INT offset
, bool by_ref
)
2780 struct ipa_agg_jf_item
*item
;
2783 if (by_ref
!= agg
->by_ref
)
2786 FOR_EACH_VEC_SAFE_ELT (agg
->items
, i
, item
)
2787 if (item
->offset
== offset
)
2789 /* Currently we do not have clobber values, return NULL for them once
2791 gcc_checking_assert (is_gimple_ip_invariant (item
->value
));
2797 /* Remove a reference to SYMBOL from the list of references of a node given by
2798 reference description RDESC. Return true if the reference has been
2799 successfully found and removed. */
2802 remove_described_reference (symtab_node
*symbol
, struct ipa_cst_ref_desc
*rdesc
)
2804 struct ipa_ref
*to_del
;
2805 struct cgraph_edge
*origin
;
2810 to_del
= origin
->caller
->find_reference (symbol
, origin
->call_stmt
,
2811 origin
->lto_stmt_uid
);
2815 to_del
->remove_reference ();
2817 fprintf (dump_file
, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2818 xstrdup_for_dump (origin
->caller
->name ()),
2819 origin
->caller
->order
, xstrdup_for_dump (symbol
->name ()));
2823 /* If JFUNC has a reference description with refcount different from
2824 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2825 NULL. JFUNC must be a constant jump function. */
2827 static struct ipa_cst_ref_desc
*
2828 jfunc_rdesc_usable (struct ipa_jump_func
*jfunc
)
2830 struct ipa_cst_ref_desc
*rdesc
= ipa_get_jf_constant_rdesc (jfunc
);
2831 if (rdesc
&& rdesc
->refcount
!= IPA_UNDESCRIBED_USE
)
2837 /* If the value of constant jump function JFUNC is an address of a function
2838 declaration, return the associated call graph node. Otherwise return
2841 static cgraph_node
*
2842 cgraph_node_for_jfunc (struct ipa_jump_func
*jfunc
)
2844 gcc_checking_assert (jfunc
->type
== IPA_JF_CONST
);
2845 tree cst
= ipa_get_jf_constant (jfunc
);
2846 if (TREE_CODE (cst
) != ADDR_EXPR
2847 || TREE_CODE (TREE_OPERAND (cst
, 0)) != FUNCTION_DECL
)
2850 return cgraph_node::get (TREE_OPERAND (cst
, 0));
2854 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2855 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2856 the edge specified in the rdesc. Return false if either the symbol or the
2857 reference could not be found, otherwise return true. */
2860 try_decrement_rdesc_refcount (struct ipa_jump_func
*jfunc
)
2862 struct ipa_cst_ref_desc
*rdesc
;
2863 if (jfunc
->type
== IPA_JF_CONST
2864 && (rdesc
= jfunc_rdesc_usable (jfunc
))
2865 && --rdesc
->refcount
== 0)
2867 symtab_node
*symbol
= cgraph_node_for_jfunc (jfunc
);
2871 return remove_described_reference (symbol
, rdesc
);
2876 /* Try to find a destination for indirect edge IE that corresponds to a simple
2877 call or a call of a member function pointer and where the destination is a
2878 pointer formal parameter described by jump function JFUNC. If it can be
2879 determined, return the newly direct edge, otherwise return NULL.
2880 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2882 static struct cgraph_edge
*
2883 try_make_edge_direct_simple_call (struct cgraph_edge
*ie
,
2884 struct ipa_jump_func
*jfunc
,
2885 struct ipa_node_params
*new_root_info
)
2887 struct cgraph_edge
*cs
;
2889 bool agg_contents
= ie
->indirect_info
->agg_contents
;
2891 if (ie
->indirect_info
->agg_contents
)
2892 target
= ipa_find_agg_cst_for_param (&jfunc
->agg
,
2893 ie
->indirect_info
->offset
,
2894 ie
->indirect_info
->by_ref
);
2896 target
= ipa_value_from_jfunc (new_root_info
, jfunc
);
2899 cs
= ipa_make_edge_direct_to_target (ie
, target
);
2901 if (cs
&& !agg_contents
)
2904 gcc_checking_assert (cs
->callee
2906 || jfunc
->type
!= IPA_JF_CONST
2907 || !cgraph_node_for_jfunc (jfunc
)
2908 || cs
->callee
== cgraph_node_for_jfunc (jfunc
)));
2909 ok
= try_decrement_rdesc_refcount (jfunc
);
2910 gcc_checking_assert (ok
);
2916 /* Return the target to be used in cases of impossible devirtualization. IE
2917 and target (the latter can be NULL) are dumped when dumping is enabled. */
2920 ipa_impossible_devirt_target (struct cgraph_edge
*ie
, tree target
)
2926 "Type inconsistent devirtualization: %s/%i->%s\n",
2927 ie
->caller
->name (), ie
->caller
->order
,
2928 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target
)));
2931 "No devirtualization target in %s/%i\n",
2932 ie
->caller
->name (), ie
->caller
->order
);
2934 tree new_target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2935 cgraph_node::get_create (new_target
);
2939 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2940 call based on a formal parameter which is described by jump function JFUNC
2941 and if it can be determined, make it direct and return the direct edge.
2942 Otherwise, return NULL. CTX describes the polymorphic context that the
2943 parameter the call is based on brings along with it. */
2945 static struct cgraph_edge
*
2946 try_make_edge_direct_virtual_call (struct cgraph_edge
*ie
,
2947 struct ipa_jump_func
*jfunc
,
2948 struct ipa_polymorphic_call_context ctx
)
2951 bool speculative
= false;
2953 if (!opt_for_fn (ie
->caller
->decl
, flag_devirtualize
))
2956 gcc_assert (!ie
->indirect_info
->by_ref
);
2958 /* Try to do lookup via known virtual table pointer value. */
2959 if (!ie
->indirect_info
->vptr_changed
2960 || opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
))
2963 unsigned HOST_WIDE_INT offset
;
2964 tree t
= ipa_find_agg_cst_for_param (&jfunc
->agg
,
2965 ie
->indirect_info
->offset
,
2967 if (t
&& vtable_pointer_value_to_vtable (t
, &vtable
, &offset
))
2969 t
= gimple_get_virt_method_for_vtable (ie
->indirect_info
->otr_token
,
2973 if ((TREE_CODE (TREE_TYPE (t
)) == FUNCTION_TYPE
2974 && DECL_FUNCTION_CODE (t
) == BUILT_IN_UNREACHABLE
)
2975 || !possible_polymorphic_call_target_p
2976 (ie
, cgraph_node::get (t
)))
2978 /* Do not speculate builtin_unreachable, it is stupid! */
2979 if (!ie
->indirect_info
->vptr_changed
)
2980 target
= ipa_impossible_devirt_target (ie
, target
);
2985 speculative
= ie
->indirect_info
->vptr_changed
;
2991 ipa_polymorphic_call_context
ie_context (ie
);
2992 vec
<cgraph_node
*>targets
;
2995 ctx
.offset_by (ie
->indirect_info
->offset
);
2996 if (ie
->indirect_info
->vptr_changed
)
2997 ctx
.possible_dynamic_type_change (ie
->in_polymorphic_cdtor
,
2998 ie
->indirect_info
->otr_type
);
2999 ctx
.combine_with (ie_context
, ie
->indirect_info
->otr_type
);
3000 targets
= possible_polymorphic_call_targets
3001 (ie
->indirect_info
->otr_type
,
3002 ie
->indirect_info
->otr_token
,
3004 if (final
&& targets
.length () <= 1)
3006 speculative
= false;
3007 if (targets
.length () == 1)
3008 target
= targets
[0]->decl
;
3010 target
= ipa_impossible_devirt_target (ie
, NULL_TREE
);
3012 else if (!target
&& opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
)
3013 && !ie
->speculative
&& ie
->maybe_hot_p ())
3016 n
= try_speculative_devirtualization (ie
->indirect_info
->otr_type
,
3017 ie
->indirect_info
->otr_token
,
3018 ie
->indirect_info
->context
);
3028 if (!possible_polymorphic_call_target_p
3029 (ie
, cgraph_node::get_create (target
)))
3033 target
= ipa_impossible_devirt_target (ie
, target
);
3035 return ipa_make_edge_direct_to_target (ie
, target
, speculative
);
3041 /* Update the param called notes associated with NODE when CS is being inlined,
3042 assuming NODE is (potentially indirectly) inlined into CS->callee.
3043 Moreover, if the callee is discovered to be constant, create a new cgraph
3044 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3045 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3048 update_indirect_edges_after_inlining (struct cgraph_edge
*cs
,
3049 struct cgraph_node
*node
,
3050 vec
<cgraph_edge
*> *new_edges
)
3052 struct ipa_edge_args
*top
;
3053 struct cgraph_edge
*ie
, *next_ie
, *new_direct_edge
;
3054 struct ipa_node_params
*new_root_info
;
3057 ipa_check_create_edge_args ();
3058 top
= IPA_EDGE_REF (cs
);
3059 new_root_info
= IPA_NODE_REF (cs
->caller
->global
.inlined_to
3060 ? cs
->caller
->global
.inlined_to
3063 for (ie
= node
->indirect_calls
; ie
; ie
= next_ie
)
3065 struct cgraph_indirect_call_info
*ici
= ie
->indirect_info
;
3066 struct ipa_jump_func
*jfunc
;
3068 cgraph_node
*spec_target
= NULL
;
3070 next_ie
= ie
->next_callee
;
3072 if (ici
->param_index
== -1)
3075 /* We must check range due to calls with variable number of arguments: */
3076 if (ici
->param_index
>= ipa_get_cs_argument_count (top
))
3078 ici
->param_index
= -1;
3082 param_index
= ici
->param_index
;
3083 jfunc
= ipa_get_ith_jump_func (top
, param_index
);
3085 if (ie
->speculative
)
3087 struct cgraph_edge
*de
;
3088 struct ipa_ref
*ref
;
3089 ie
->speculative_call_info (de
, ie
, ref
);
3090 spec_target
= de
->callee
;
3093 if (!opt_for_fn (node
->decl
, flag_indirect_inlining
))
3094 new_direct_edge
= NULL
;
3095 else if (ici
->polymorphic
)
3097 ipa_polymorphic_call_context ctx
;
3098 ctx
= ipa_context_from_jfunc (new_root_info
, cs
, param_index
, jfunc
);
3099 new_direct_edge
= try_make_edge_direct_virtual_call (ie
, jfunc
, ctx
);
3102 new_direct_edge
= try_make_edge_direct_simple_call (ie
, jfunc
,
3104 /* If speculation was removed, then we need to do nothing. */
3105 if (new_direct_edge
&& new_direct_edge
!= ie
3106 && new_direct_edge
->callee
== spec_target
)
3108 new_direct_edge
->indirect_inlining_edge
= 1;
3109 top
= IPA_EDGE_REF (cs
);
3111 if (!new_direct_edge
->speculative
)
3114 else if (new_direct_edge
)
3116 new_direct_edge
->indirect_inlining_edge
= 1;
3117 if (new_direct_edge
->call_stmt
)
3118 new_direct_edge
->call_stmt_cannot_inline_p
3119 = !gimple_check_call_matching_types (
3120 new_direct_edge
->call_stmt
,
3121 new_direct_edge
->callee
->decl
, false);
3124 new_edges
->safe_push (new_direct_edge
);
3127 top
= IPA_EDGE_REF (cs
);
3128 /* If speculative edge was introduced we still need to update
3129 call info of the indirect edge. */
3130 if (!new_direct_edge
->speculative
)
3133 if (jfunc
->type
== IPA_JF_PASS_THROUGH
3134 && ipa_get_jf_pass_through_operation (jfunc
) == NOP_EXPR
)
3136 if (ici
->agg_contents
3137 && !ipa_get_jf_pass_through_agg_preserved (jfunc
)
3138 && !ici
->polymorphic
)
3139 ici
->param_index
= -1;
3142 ici
->param_index
= ipa_get_jf_pass_through_formal_id (jfunc
);
3143 if (ici
->polymorphic
3144 && !ipa_get_jf_pass_through_type_preserved (jfunc
))
3145 ici
->vptr_changed
= true;
3148 else if (jfunc
->type
== IPA_JF_ANCESTOR
)
3150 if (ici
->agg_contents
3151 && !ipa_get_jf_ancestor_agg_preserved (jfunc
)
3152 && !ici
->polymorphic
)
3153 ici
->param_index
= -1;
3156 ici
->param_index
= ipa_get_jf_ancestor_formal_id (jfunc
);
3157 ici
->offset
+= ipa_get_jf_ancestor_offset (jfunc
);
3158 if (ici
->polymorphic
3159 && !ipa_get_jf_ancestor_type_preserved (jfunc
))
3160 ici
->vptr_changed
= true;
3164 /* Either we can find a destination for this edge now or never. */
3165 ici
->param_index
= -1;
3171 /* Recursively traverse subtree of NODE (including node) made of inlined
3172 cgraph_edges when CS has been inlined and invoke
3173 update_indirect_edges_after_inlining on all nodes and
3174 update_jump_functions_after_inlining on all non-inlined edges that lead out
3175 of this subtree. Newly discovered indirect edges will be added to
3176 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3180 propagate_info_to_inlined_callees (struct cgraph_edge
*cs
,
3181 struct cgraph_node
*node
,
3182 vec
<cgraph_edge
*> *new_edges
)
3184 struct cgraph_edge
*e
;
3187 res
= update_indirect_edges_after_inlining (cs
, node
, new_edges
);
3189 for (e
= node
->callees
; e
; e
= e
->next_callee
)
3190 if (!e
->inline_failed
)
3191 res
|= propagate_info_to_inlined_callees (cs
, e
->callee
, new_edges
);
3193 update_jump_functions_after_inlining (cs
, e
);
3194 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
3195 update_jump_functions_after_inlining (cs
, e
);
3200 /* Combine two controlled uses counts as done during inlining. */
3203 combine_controlled_uses_counters (int c
, int d
)
3205 if (c
== IPA_UNDESCRIBED_USE
|| d
== IPA_UNDESCRIBED_USE
)
3206 return IPA_UNDESCRIBED_USE
;
3211 /* Propagate number of controlled users from CS->caleee to the new root of the
3212 tree of inlined nodes. */
3215 propagate_controlled_uses (struct cgraph_edge
*cs
)
3217 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
3218 struct cgraph_node
*new_root
= cs
->caller
->global
.inlined_to
3219 ? cs
->caller
->global
.inlined_to
: cs
->caller
;
3220 struct ipa_node_params
*new_root_info
= IPA_NODE_REF (new_root
);
3221 struct ipa_node_params
*old_root_info
= IPA_NODE_REF (cs
->callee
);
3224 count
= MIN (ipa_get_cs_argument_count (args
),
3225 ipa_get_param_count (old_root_info
));
3226 for (i
= 0; i
< count
; i
++)
3228 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
3229 struct ipa_cst_ref_desc
*rdesc
;
3231 if (jf
->type
== IPA_JF_PASS_THROUGH
)
3234 src_idx
= ipa_get_jf_pass_through_formal_id (jf
);
3235 c
= ipa_get_controlled_uses (new_root_info
, src_idx
);
3236 d
= ipa_get_controlled_uses (old_root_info
, i
);
3238 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf
)
3239 == NOP_EXPR
|| c
== IPA_UNDESCRIBED_USE
);
3240 c
= combine_controlled_uses_counters (c
, d
);
3241 ipa_set_controlled_uses (new_root_info
, src_idx
, c
);
3242 if (c
== 0 && new_root_info
->ipcp_orig_node
)
3244 struct cgraph_node
*n
;
3245 struct ipa_ref
*ref
;
3246 tree t
= new_root_info
->known_csts
[src_idx
];
3248 if (t
&& TREE_CODE (t
) == ADDR_EXPR
3249 && TREE_CODE (TREE_OPERAND (t
, 0)) == FUNCTION_DECL
3250 && (n
= cgraph_node::get (TREE_OPERAND (t
, 0)))
3251 && (ref
= new_root
->find_reference (n
, NULL
, 0)))
3254 fprintf (dump_file
, "ipa-prop: Removing cloning-created "
3255 "reference from %s/%i to %s/%i.\n",
3256 xstrdup_for_dump (new_root
->name ()),
3258 xstrdup_for_dump (n
->name ()), n
->order
);
3259 ref
->remove_reference ();
3263 else if (jf
->type
== IPA_JF_CONST
3264 && (rdesc
= jfunc_rdesc_usable (jf
)))
3266 int d
= ipa_get_controlled_uses (old_root_info
, i
);
3267 int c
= rdesc
->refcount
;
3268 rdesc
->refcount
= combine_controlled_uses_counters (c
, d
);
3269 if (rdesc
->refcount
== 0)
3271 tree cst
= ipa_get_jf_constant (jf
);
3272 struct cgraph_node
*n
;
3273 gcc_checking_assert (TREE_CODE (cst
) == ADDR_EXPR
3274 && TREE_CODE (TREE_OPERAND (cst
, 0))
3276 n
= cgraph_node::get (TREE_OPERAND (cst
, 0));
3279 struct cgraph_node
*clone
;
3281 ok
= remove_described_reference (n
, rdesc
);
3282 gcc_checking_assert (ok
);
3285 while (clone
->global
.inlined_to
3286 && clone
!= rdesc
->cs
->caller
3287 && IPA_NODE_REF (clone
)->ipcp_orig_node
)
3289 struct ipa_ref
*ref
;
3290 ref
= clone
->find_reference (n
, NULL
, 0);
3294 fprintf (dump_file
, "ipa-prop: Removing "
3295 "cloning-created reference "
3296 "from %s/%i to %s/%i.\n",
3297 xstrdup_for_dump (clone
->name ()),
3299 xstrdup_for_dump (n
->name ()),
3301 ref
->remove_reference ();
3303 clone
= clone
->callers
->caller
;
3310 for (i
= ipa_get_param_count (old_root_info
);
3311 i
< ipa_get_cs_argument_count (args
);
3314 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
3316 if (jf
->type
== IPA_JF_CONST
)
3318 struct ipa_cst_ref_desc
*rdesc
= jfunc_rdesc_usable (jf
);
3320 rdesc
->refcount
= IPA_UNDESCRIBED_USE
;
3322 else if (jf
->type
== IPA_JF_PASS_THROUGH
)
3323 ipa_set_controlled_uses (new_root_info
,
3324 jf
->value
.pass_through
.formal_id
,
3325 IPA_UNDESCRIBED_USE
);
3329 /* Update jump functions and call note functions on inlining the call site CS.
3330 CS is expected to lead to a node already cloned by
3331 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3332 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3336 ipa_propagate_indirect_call_infos (struct cgraph_edge
*cs
,
3337 vec
<cgraph_edge
*> *new_edges
)
3340 /* Do nothing if the preparation phase has not been carried out yet
3341 (i.e. during early inlining). */
3342 if (!ipa_node_params_sum
)
3344 gcc_assert (ipa_edge_args_vector
);
3346 propagate_controlled_uses (cs
);
3347 changed
= propagate_info_to_inlined_callees (cs
, cs
->callee
, new_edges
);
3352 /* Frees all dynamically allocated structures that the argument info points
3356 ipa_free_edge_args_substructures (struct ipa_edge_args
*args
)
3358 vec_free (args
->jump_functions
);
3359 memset (args
, 0, sizeof (*args
));
3362 /* Free all ipa_edge structures. */
3365 ipa_free_all_edge_args (void)
3368 struct ipa_edge_args
*args
;
3370 if (!ipa_edge_args_vector
)
3373 FOR_EACH_VEC_ELT (*ipa_edge_args_vector
, i
, args
)
3374 ipa_free_edge_args_substructures (args
);
3376 vec_free (ipa_edge_args_vector
);
3379 /* Frees all dynamically allocated structures that the param info points
3382 ipa_node_params::~ipa_node_params ()
3384 descriptors
.release ();
3386 /* Lattice values and their sources are deallocated with their alocation
3388 known_contexts
.release ();
3391 ipcp_orig_node
= NULL
;
3394 do_clone_for_all_contexts
= 0;
3395 is_all_contexts_clone
= 0;
3399 /* Free all ipa_node_params structures. */
3402 ipa_free_all_node_params (void)
3404 delete ipa_node_params_sum
;
3405 ipa_node_params_sum
= NULL
;
3408 /* Grow ipcp_transformations if necessary. */
3411 ipcp_grow_transformations_if_necessary (void)
3413 if (vec_safe_length (ipcp_transformations
)
3414 <= (unsigned) symtab
->cgraph_max_uid
)
3415 vec_safe_grow_cleared (ipcp_transformations
, symtab
->cgraph_max_uid
+ 1);
3418 /* Set the aggregate replacements of NODE to be AGGVALS. */
3421 ipa_set_node_agg_value_chain (struct cgraph_node
*node
,
3422 struct ipa_agg_replacement_value
*aggvals
)
3424 ipcp_grow_transformations_if_necessary ();
3425 (*ipcp_transformations
)[node
->uid
].agg_values
= aggvals
;
3428 /* Hook that is called by cgraph.c when an edge is removed. */
3431 ipa_edge_removal_hook (struct cgraph_edge
*cs
, void *data ATTRIBUTE_UNUSED
)
3433 struct ipa_edge_args
*args
;
3435 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3436 if (vec_safe_length (ipa_edge_args_vector
) <= (unsigned)cs
->uid
)
3439 args
= IPA_EDGE_REF (cs
);
3440 if (args
->jump_functions
)
3442 struct ipa_jump_func
*jf
;
3444 FOR_EACH_VEC_ELT (*args
->jump_functions
, i
, jf
)
3446 struct ipa_cst_ref_desc
*rdesc
;
3447 try_decrement_rdesc_refcount (jf
);
3448 if (jf
->type
== IPA_JF_CONST
3449 && (rdesc
= ipa_get_jf_constant_rdesc (jf
))
3455 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs
));
3458 /* Hook that is called by cgraph.c when an edge is duplicated. */
3461 ipa_edge_duplication_hook (struct cgraph_edge
*src
, struct cgraph_edge
*dst
,
3464 struct ipa_edge_args
*old_args
, *new_args
;
3467 ipa_check_create_edge_args ();
3469 old_args
= IPA_EDGE_REF (src
);
3470 new_args
= IPA_EDGE_REF (dst
);
3472 new_args
->jump_functions
= vec_safe_copy (old_args
->jump_functions
);
3473 if (old_args
->polymorphic_call_contexts
)
3474 new_args
->polymorphic_call_contexts
3475 = vec_safe_copy (old_args
->polymorphic_call_contexts
);
3477 for (i
= 0; i
< vec_safe_length (old_args
->jump_functions
); i
++)
3479 struct ipa_jump_func
*src_jf
= ipa_get_ith_jump_func (old_args
, i
);
3480 struct ipa_jump_func
*dst_jf
= ipa_get_ith_jump_func (new_args
, i
);
3482 dst_jf
->agg
.items
= vec_safe_copy (dst_jf
->agg
.items
);
3484 if (src_jf
->type
== IPA_JF_CONST
)
3486 struct ipa_cst_ref_desc
*src_rdesc
= jfunc_rdesc_usable (src_jf
);
3489 dst_jf
->value
.constant
.rdesc
= NULL
;
3490 else if (src
->caller
== dst
->caller
)
3492 struct ipa_ref
*ref
;
3493 symtab_node
*n
= cgraph_node_for_jfunc (src_jf
);
3494 gcc_checking_assert (n
);
3495 ref
= src
->caller
->find_reference (n
, src
->call_stmt
,
3497 gcc_checking_assert (ref
);
3498 dst
->caller
->clone_reference (ref
, ref
->stmt
);
3500 gcc_checking_assert (ipa_refdesc_pool
);
3501 struct ipa_cst_ref_desc
*dst_rdesc
3502 = (struct ipa_cst_ref_desc
*) pool_alloc (ipa_refdesc_pool
);
3503 dst_rdesc
->cs
= dst
;
3504 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3505 dst_rdesc
->next_duplicate
= NULL
;
3506 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3508 else if (src_rdesc
->cs
== src
)
3510 struct ipa_cst_ref_desc
*dst_rdesc
;
3511 gcc_checking_assert (ipa_refdesc_pool
);
3513 = (struct ipa_cst_ref_desc
*) pool_alloc (ipa_refdesc_pool
);
3514 dst_rdesc
->cs
= dst
;
3515 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3516 dst_rdesc
->next_duplicate
= src_rdesc
->next_duplicate
;
3517 src_rdesc
->next_duplicate
= dst_rdesc
;
3518 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3522 struct ipa_cst_ref_desc
*dst_rdesc
;
3523 /* This can happen during inlining, when a JFUNC can refer to a
3524 reference taken in a function up in the tree of inline clones.
3525 We need to find the duplicate that refers to our tree of
3528 gcc_assert (dst
->caller
->global
.inlined_to
);
3529 for (dst_rdesc
= src_rdesc
->next_duplicate
;
3531 dst_rdesc
= dst_rdesc
->next_duplicate
)
3533 struct cgraph_node
*top
;
3534 top
= dst_rdesc
->cs
->caller
->global
.inlined_to
3535 ? dst_rdesc
->cs
->caller
->global
.inlined_to
3536 : dst_rdesc
->cs
->caller
;
3537 if (dst
->caller
->global
.inlined_to
== top
)
3540 gcc_assert (dst_rdesc
);
3541 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3544 else if (dst_jf
->type
== IPA_JF_PASS_THROUGH
3545 && src
->caller
== dst
->caller
)
3547 struct cgraph_node
*inline_root
= dst
->caller
->global
.inlined_to
3548 ? dst
->caller
->global
.inlined_to
: dst
->caller
;
3549 struct ipa_node_params
*root_info
= IPA_NODE_REF (inline_root
);
3550 int idx
= ipa_get_jf_pass_through_formal_id (dst_jf
);
3552 int c
= ipa_get_controlled_uses (root_info
, idx
);
3553 if (c
!= IPA_UNDESCRIBED_USE
)
3556 ipa_set_controlled_uses (root_info
, idx
, c
);
3562 /* Analyze newly added function into callgraph. */
3565 ipa_add_new_function (cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
3567 if (node
->has_gimple_body_p ())
3568 ipa_analyze_node (node
);
3571 /* Hook that is called by summary when a node is duplicated. */
3574 ipa_node_params_t::duplicate(cgraph_node
*src
, cgraph_node
*dst
,
3575 ipa_node_params
*old_info
,
3576 ipa_node_params
*new_info
)
3578 ipa_agg_replacement_value
*old_av
, *new_av
;
3580 new_info
->descriptors
= old_info
->descriptors
.copy ();
3581 new_info
->lattices
= NULL
;
3582 new_info
->ipcp_orig_node
= old_info
->ipcp_orig_node
;
3584 new_info
->analysis_done
= old_info
->analysis_done
;
3585 new_info
->node_enqueued
= old_info
->node_enqueued
;
3587 old_av
= ipa_get_agg_replacements_for_node (src
);
3593 struct ipa_agg_replacement_value
*v
;
3595 v
= ggc_alloc
<ipa_agg_replacement_value
> ();
3596 memcpy (v
, old_av
, sizeof (*v
));
3599 old_av
= old_av
->next
;
3601 ipa_set_node_agg_value_chain (dst
, new_av
);
3604 ipcp_transformation_summary
*src_trans
= ipcp_get_transformation_summary (src
);
3606 if (src_trans
&& vec_safe_length (src_trans
->alignments
) > 0)
3608 ipcp_grow_transformations_if_necessary ();
3609 src_trans
= ipcp_get_transformation_summary (src
);
3610 const vec
<ipa_alignment
, va_gc
> *src_alignments
= src_trans
->alignments
;
3611 vec
<ipa_alignment
, va_gc
> *&dst_alignments
3612 = ipcp_get_transformation_summary (dst
)->alignments
;
3613 vec_safe_reserve_exact (dst_alignments
, src_alignments
->length ());
3614 for (unsigned i
= 0; i
< src_alignments
->length (); ++i
)
3615 dst_alignments
->quick_push ((*src_alignments
)[i
]);
3619 /* Register our cgraph hooks if they are not already there. */
3622 ipa_register_cgraph_hooks (void)
3624 ipa_check_create_node_params ();
3626 if (!edge_removal_hook_holder
)
3627 edge_removal_hook_holder
=
3628 symtab
->add_edge_removal_hook (&ipa_edge_removal_hook
, NULL
);
3629 if (!edge_duplication_hook_holder
)
3630 edge_duplication_hook_holder
=
3631 symtab
->add_edge_duplication_hook (&ipa_edge_duplication_hook
, NULL
);
3632 function_insertion_hook_holder
=
3633 symtab
->add_cgraph_insertion_hook (&ipa_add_new_function
, NULL
);
3636 /* Unregister our cgraph hooks if they are not already there. */
3639 ipa_unregister_cgraph_hooks (void)
3641 symtab
->remove_edge_removal_hook (edge_removal_hook_holder
);
3642 edge_removal_hook_holder
= NULL
;
3643 symtab
->remove_edge_duplication_hook (edge_duplication_hook_holder
);
3644 edge_duplication_hook_holder
= NULL
;
3645 symtab
->remove_cgraph_insertion_hook (function_insertion_hook_holder
);
3646 function_insertion_hook_holder
= NULL
;
3649 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3650 longer needed after ipa-cp. */
3653 ipa_free_all_structures_after_ipa_cp (void)
3655 if (!optimize
&& !in_lto_p
)
3657 ipa_free_all_edge_args ();
3658 ipa_free_all_node_params ();
3659 free_alloc_pool (ipcp_sources_pool
);
3660 free_alloc_pool (ipcp_cst_values_pool
);
3661 free_alloc_pool (ipcp_poly_ctx_values_pool
);
3662 free_alloc_pool (ipcp_agg_lattice_pool
);
3663 ipa_unregister_cgraph_hooks ();
3664 if (ipa_refdesc_pool
)
3665 free_alloc_pool (ipa_refdesc_pool
);
3669 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3670 longer needed after indirect inlining. */
3673 ipa_free_all_structures_after_iinln (void)
3675 ipa_free_all_edge_args ();
3676 ipa_free_all_node_params ();
3677 ipa_unregister_cgraph_hooks ();
3678 if (ipcp_sources_pool
)
3679 free_alloc_pool (ipcp_sources_pool
);
3680 if (ipcp_cst_values_pool
)
3681 free_alloc_pool (ipcp_cst_values_pool
);
3682 if (ipcp_poly_ctx_values_pool
)
3683 free_alloc_pool (ipcp_poly_ctx_values_pool
);
3684 if (ipcp_agg_lattice_pool
)
3685 free_alloc_pool (ipcp_agg_lattice_pool
);
3686 if (ipa_refdesc_pool
)
3687 free_alloc_pool (ipa_refdesc_pool
);
3690 /* Print ipa_tree_map data structures of all functions in the
3694 ipa_print_node_params (FILE *f
, struct cgraph_node
*node
)
3697 struct ipa_node_params
*info
;
3699 if (!node
->definition
)
3701 info
= IPA_NODE_REF (node
);
3702 fprintf (f
, " function %s/%i parameter descriptors:\n",
3703 node
->name (), node
->order
);
3704 count
= ipa_get_param_count (info
);
3705 for (i
= 0; i
< count
; i
++)
3710 ipa_dump_param (f
, info
, i
);
3711 if (ipa_is_param_used (info
, i
))
3712 fprintf (f
, " used");
3713 c
= ipa_get_controlled_uses (info
, i
);
3714 if (c
== IPA_UNDESCRIBED_USE
)
3715 fprintf (f
, " undescribed_use");
3717 fprintf (f
, " controlled_uses=%i", c
);
3722 /* Print ipa_tree_map data structures of all functions in the
3726 ipa_print_all_params (FILE * f
)
3728 struct cgraph_node
*node
;
3730 fprintf (f
, "\nFunction parameters:\n");
3731 FOR_EACH_FUNCTION (node
)
3732 ipa_print_node_params (f
, node
);
3735 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3738 ipa_get_vector_of_formal_parms (tree fndecl
)
3744 gcc_assert (!flag_wpa
);
3745 count
= count_formal_params (fndecl
);
3746 args
.create (count
);
3747 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
3748 args
.quick_push (parm
);
3753 /* Return a heap allocated vector containing types of formal parameters of
3754 function type FNTYPE. */
3757 ipa_get_vector_of_formal_parm_types (tree fntype
)
3763 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
3766 types
.create (count
);
3767 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
3768 types
.quick_push (TREE_VALUE (t
));
3773 /* Modify the function declaration FNDECL and its type according to the plan in
3774 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3775 to reflect the actual parameters being modified which are determined by the
3776 base_index field. */
3779 ipa_modify_formal_parameters (tree fndecl
, ipa_parm_adjustment_vec adjustments
)
3781 vec
<tree
> oparms
= ipa_get_vector_of_formal_parms (fndecl
);
3782 tree orig_type
= TREE_TYPE (fndecl
);
3783 tree old_arg_types
= TYPE_ARG_TYPES (orig_type
);
3785 /* The following test is an ugly hack, some functions simply don't have any
3786 arguments in their type. This is probably a bug but well... */
3787 bool care_for_types
= (old_arg_types
!= NULL_TREE
);
3788 bool last_parm_void
;
3792 last_parm_void
= (TREE_VALUE (tree_last (old_arg_types
))
3794 otypes
= ipa_get_vector_of_formal_parm_types (orig_type
);
3796 gcc_assert (oparms
.length () + 1 == otypes
.length ());
3798 gcc_assert (oparms
.length () == otypes
.length ());
3802 last_parm_void
= false;
3806 int len
= adjustments
.length ();
3807 tree
*link
= &DECL_ARGUMENTS (fndecl
);
3808 tree new_arg_types
= NULL
;
3809 for (int i
= 0; i
< len
; i
++)
3811 struct ipa_parm_adjustment
*adj
;
3814 adj
= &adjustments
[i
];
3816 if (adj
->op
== IPA_PARM_OP_NEW
)
3819 parm
= oparms
[adj
->base_index
];
3822 if (adj
->op
== IPA_PARM_OP_COPY
)
3825 new_arg_types
= tree_cons (NULL_TREE
, otypes
[adj
->base_index
],
3828 link
= &DECL_CHAIN (parm
);
3830 else if (adj
->op
!= IPA_PARM_OP_REMOVE
)
3836 ptype
= build_pointer_type (adj
->type
);
3840 if (is_gimple_reg_type (ptype
))
3842 unsigned malign
= GET_MODE_ALIGNMENT (TYPE_MODE (ptype
));
3843 if (TYPE_ALIGN (ptype
) < malign
)
3844 ptype
= build_aligned_type (ptype
, malign
);
3849 new_arg_types
= tree_cons (NULL_TREE
, ptype
, new_arg_types
);
3851 new_parm
= build_decl (UNKNOWN_LOCATION
, PARM_DECL
, NULL_TREE
,
3853 const char *prefix
= adj
->arg_prefix
? adj
->arg_prefix
: "SYNTH";
3854 DECL_NAME (new_parm
) = create_tmp_var_name (prefix
);
3855 DECL_ARTIFICIAL (new_parm
) = 1;
3856 DECL_ARG_TYPE (new_parm
) = ptype
;
3857 DECL_CONTEXT (new_parm
) = fndecl
;
3858 TREE_USED (new_parm
) = 1;
3859 DECL_IGNORED_P (new_parm
) = 1;
3860 layout_decl (new_parm
, 0);
3862 if (adj
->op
== IPA_PARM_OP_NEW
)
3866 adj
->new_decl
= new_parm
;
3869 link
= &DECL_CHAIN (new_parm
);
3875 tree new_reversed
= NULL
;
3878 new_reversed
= nreverse (new_arg_types
);
3882 TREE_CHAIN (new_arg_types
) = void_list_node
;
3884 new_reversed
= void_list_node
;
3888 /* Use copy_node to preserve as much as possible from original type
3889 (debug info, attribute lists etc.)
3890 Exception is METHOD_TYPEs must have THIS argument.
3891 When we are asked to remove it, we need to build new FUNCTION_TYPE
3893 tree new_type
= NULL
;
3894 if (TREE_CODE (orig_type
) != METHOD_TYPE
3895 || (adjustments
[0].op
== IPA_PARM_OP_COPY
3896 && adjustments
[0].base_index
== 0))
3898 new_type
= build_distinct_type_copy (orig_type
);
3899 TYPE_ARG_TYPES (new_type
) = new_reversed
;
3904 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type
),
3906 TYPE_CONTEXT (new_type
) = TYPE_CONTEXT (orig_type
);
3907 DECL_VINDEX (fndecl
) = NULL_TREE
;
3910 /* When signature changes, we need to clear builtin info. */
3911 if (DECL_BUILT_IN (fndecl
))
3913 DECL_BUILT_IN_CLASS (fndecl
) = NOT_BUILT_IN
;
3914 DECL_FUNCTION_CODE (fndecl
) = (enum built_in_function
) 0;
3917 TREE_TYPE (fndecl
) = new_type
;
3918 DECL_VIRTUAL_P (fndecl
) = 0;
3919 DECL_LANG_SPECIFIC (fndecl
) = NULL
;
3924 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3925 If this is a directly recursive call, CS must be NULL. Otherwise it must
3926 contain the corresponding call graph edge. */
3929 ipa_modify_call_arguments (struct cgraph_edge
*cs
, gcall
*stmt
,
3930 ipa_parm_adjustment_vec adjustments
)
3932 struct cgraph_node
*current_node
= cgraph_node::get (current_function_decl
);
3934 vec
<tree
, va_gc
> **debug_args
= NULL
;
3936 gimple_stmt_iterator gsi
, prev_gsi
;
3940 len
= adjustments
.length ();
3942 callee_decl
= !cs
? gimple_call_fndecl (stmt
) : cs
->callee
->decl
;
3943 current_node
->remove_stmt_references (stmt
);
3945 gsi
= gsi_for_stmt (stmt
);
3947 gsi_prev (&prev_gsi
);
3948 for (i
= 0; i
< len
; i
++)
3950 struct ipa_parm_adjustment
*adj
;
3952 adj
= &adjustments
[i
];
3954 if (adj
->op
== IPA_PARM_OP_COPY
)
3956 tree arg
= gimple_call_arg (stmt
, adj
->base_index
);
3958 vargs
.quick_push (arg
);
3960 else if (adj
->op
!= IPA_PARM_OP_REMOVE
)
3962 tree expr
, base
, off
;
3964 unsigned int deref_align
= 0;
3965 bool deref_base
= false;
3967 /* We create a new parameter out of the value of the old one, we can
3968 do the following kind of transformations:
3970 - A scalar passed by reference is converted to a scalar passed by
3971 value. (adj->by_ref is false and the type of the original
3972 actual argument is a pointer to a scalar).
3974 - A part of an aggregate is passed instead of the whole aggregate.
3975 The part can be passed either by value or by reference, this is
3976 determined by value of adj->by_ref. Moreover, the code below
3977 handles both situations when the original aggregate is passed by
3978 value (its type is not a pointer) and when it is passed by
3979 reference (it is a pointer to an aggregate).
3981 When the new argument is passed by reference (adj->by_ref is true)
3982 it must be a part of an aggregate and therefore we form it by
3983 simply taking the address of a reference inside the original
3986 gcc_checking_assert (adj
->offset
% BITS_PER_UNIT
== 0);
3987 base
= gimple_call_arg (stmt
, adj
->base_index
);
3988 loc
= DECL_P (base
) ? DECL_SOURCE_LOCATION (base
)
3989 : EXPR_LOCATION (base
);
3991 if (TREE_CODE (base
) != ADDR_EXPR
3992 && POINTER_TYPE_P (TREE_TYPE (base
)))
3993 off
= build_int_cst (adj
->alias_ptr_type
,
3994 adj
->offset
/ BITS_PER_UNIT
);
3997 HOST_WIDE_INT base_offset
;
4001 if (TREE_CODE (base
) == ADDR_EXPR
)
4003 base
= TREE_OPERAND (base
, 0);
4009 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
4010 /* Aggregate arguments can have non-invariant addresses. */
4013 base
= build_fold_addr_expr (prev_base
);
4014 off
= build_int_cst (adj
->alias_ptr_type
,
4015 adj
->offset
/ BITS_PER_UNIT
);
4017 else if (TREE_CODE (base
) == MEM_REF
)
4022 deref_align
= TYPE_ALIGN (TREE_TYPE (base
));
4024 off
= build_int_cst (adj
->alias_ptr_type
,
4026 + adj
->offset
/ BITS_PER_UNIT
);
4027 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1),
4029 base
= TREE_OPERAND (base
, 0);
4033 off
= build_int_cst (adj
->alias_ptr_type
,
4035 + adj
->offset
/ BITS_PER_UNIT
);
4036 base
= build_fold_addr_expr (base
);
4042 tree type
= adj
->type
;
4044 unsigned HOST_WIDE_INT misalign
;
4048 align
= deref_align
;
4053 get_pointer_alignment_1 (base
, &align
, &misalign
);
4054 if (TYPE_ALIGN (type
) > align
)
4055 align
= TYPE_ALIGN (type
);
4057 misalign
+= (offset_int::from (off
, SIGNED
).to_short_addr ()
4059 misalign
= misalign
& (align
- 1);
4061 align
= (misalign
& -misalign
);
4062 if (align
< TYPE_ALIGN (type
))
4063 type
= build_aligned_type (type
, align
);
4064 base
= force_gimple_operand_gsi (&gsi
, base
,
4065 true, NULL
, true, GSI_SAME_STMT
);
4066 expr
= fold_build2_loc (loc
, MEM_REF
, type
, base
, off
);
4067 /* If expr is not a valid gimple call argument emit
4068 a load into a temporary. */
4069 if (is_gimple_reg_type (TREE_TYPE (expr
)))
4071 gimple tem
= gimple_build_assign (NULL_TREE
, expr
);
4072 if (gimple_in_ssa_p (cfun
))
4074 gimple_set_vuse (tem
, gimple_vuse (stmt
));
4075 expr
= make_ssa_name (TREE_TYPE (expr
), tem
);
4078 expr
= create_tmp_reg (TREE_TYPE (expr
));
4079 gimple_assign_set_lhs (tem
, expr
);
4080 gsi_insert_before (&gsi
, tem
, GSI_SAME_STMT
);
4085 expr
= fold_build2_loc (loc
, MEM_REF
, adj
->type
, base
, off
);
4086 expr
= build_fold_addr_expr (expr
);
4087 expr
= force_gimple_operand_gsi (&gsi
, expr
,
4088 true, NULL
, true, GSI_SAME_STMT
);
4090 vargs
.quick_push (expr
);
4092 if (adj
->op
!= IPA_PARM_OP_COPY
&& MAY_HAVE_DEBUG_STMTS
)
4095 tree ddecl
= NULL_TREE
, origin
= DECL_ORIGIN (adj
->base
), arg
;
4098 arg
= gimple_call_arg (stmt
, adj
->base_index
);
4099 if (!useless_type_conversion_p (TREE_TYPE (origin
), TREE_TYPE (arg
)))
4101 if (!fold_convertible_p (TREE_TYPE (origin
), arg
))
4103 arg
= fold_convert_loc (gimple_location (stmt
),
4104 TREE_TYPE (origin
), arg
);
4106 if (debug_args
== NULL
)
4107 debug_args
= decl_debug_args_insert (callee_decl
);
4108 for (ix
= 0; vec_safe_iterate (*debug_args
, ix
, &ddecl
); ix
+= 2)
4109 if (ddecl
== origin
)
4111 ddecl
= (**debug_args
)[ix
+ 1];
4116 ddecl
= make_node (DEBUG_EXPR_DECL
);
4117 DECL_ARTIFICIAL (ddecl
) = 1;
4118 TREE_TYPE (ddecl
) = TREE_TYPE (origin
);
4119 DECL_MODE (ddecl
) = DECL_MODE (origin
);
4121 vec_safe_push (*debug_args
, origin
);
4122 vec_safe_push (*debug_args
, ddecl
);
4124 def_temp
= gimple_build_debug_bind (ddecl
, unshare_expr (arg
), stmt
);
4125 gsi_insert_before (&gsi
, def_temp
, GSI_SAME_STMT
);
4129 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4131 fprintf (dump_file
, "replacing stmt:");
4132 print_gimple_stmt (dump_file
, gsi_stmt (gsi
), 0, 0);
4135 new_stmt
= gimple_build_call_vec (callee_decl
, vargs
);
4137 if (gimple_call_lhs (stmt
))
4138 gimple_call_set_lhs (new_stmt
, gimple_call_lhs (stmt
));
4140 gimple_set_block (new_stmt
, gimple_block (stmt
));
4141 if (gimple_has_location (stmt
))
4142 gimple_set_location (new_stmt
, gimple_location (stmt
));
4143 gimple_call_set_chain (new_stmt
, gimple_call_chain (stmt
));
4144 gimple_call_copy_flags (new_stmt
, stmt
);
4145 if (gimple_in_ssa_p (cfun
))
4147 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
4148 if (gimple_vdef (stmt
))
4150 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
4151 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
4155 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4157 fprintf (dump_file
, "with stmt:");
4158 print_gimple_stmt (dump_file
, new_stmt
, 0, 0);
4159 fprintf (dump_file
, "\n");
4161 gsi_replace (&gsi
, new_stmt
, true);
4163 cs
->set_call_stmt (new_stmt
);
4166 current_node
->record_stmt_references (gsi_stmt (gsi
));
4169 while (gsi_stmt (gsi
) != gsi_stmt (prev_gsi
));
4172 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4173 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4174 specifies whether the function should care about type incompatibility the
4175 current and new expressions. If it is false, the function will leave
4176 incompatibility issues to the caller. Return true iff the expression
4180 ipa_modify_expr (tree
*expr
, bool convert
,
4181 ipa_parm_adjustment_vec adjustments
)
4183 struct ipa_parm_adjustment
*cand
4184 = ipa_get_adjustment_candidate (&expr
, &convert
, adjustments
, false);
4190 src
= build_simple_mem_ref (cand
->new_decl
);
4192 src
= cand
->new_decl
;
4194 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4196 fprintf (dump_file
, "About to replace expr ");
4197 print_generic_expr (dump_file
, *expr
, 0);
4198 fprintf (dump_file
, " with ");
4199 print_generic_expr (dump_file
, src
, 0);
4200 fprintf (dump_file
, "\n");
4203 if (convert
&& !useless_type_conversion_p (TREE_TYPE (*expr
), cand
->type
))
4205 tree vce
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (*expr
), src
);
4213 /* If T is an SSA_NAME, return NULL if it is not a default def or
4214 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4215 the base variable is always returned, regardless if it is a default
4216 def. Return T if it is not an SSA_NAME. */
4219 get_ssa_base_param (tree t
, bool ignore_default_def
)
4221 if (TREE_CODE (t
) == SSA_NAME
)
4223 if (ignore_default_def
|| SSA_NAME_IS_DEFAULT_DEF (t
))
4224 return SSA_NAME_VAR (t
);
4231 /* Given an expression, return an adjustment entry specifying the
4232 transformation to be done on EXPR. If no suitable adjustment entry
4233 was found, returns NULL.
4235 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4236 default def, otherwise bail on them.
4238 If CONVERT is non-NULL, this function will set *CONVERT if the
4239 expression provided is a component reference. ADJUSTMENTS is the
4240 adjustments vector. */
4242 ipa_parm_adjustment
*
4243 ipa_get_adjustment_candidate (tree
**expr
, bool *convert
,
4244 ipa_parm_adjustment_vec adjustments
,
4245 bool ignore_default_def
)
4247 if (TREE_CODE (**expr
) == BIT_FIELD_REF
4248 || TREE_CODE (**expr
) == IMAGPART_EXPR
4249 || TREE_CODE (**expr
) == REALPART_EXPR
)
4251 *expr
= &TREE_OPERAND (**expr
, 0);
4256 HOST_WIDE_INT offset
, size
, max_size
;
4257 tree base
= get_ref_base_and_extent (**expr
, &offset
, &size
, &max_size
);
4258 if (!base
|| size
== -1 || max_size
== -1)
4261 if (TREE_CODE (base
) == MEM_REF
)
4263 offset
+= mem_ref_offset (base
).to_short_addr () * BITS_PER_UNIT
;
4264 base
= TREE_OPERAND (base
, 0);
4267 base
= get_ssa_base_param (base
, ignore_default_def
);
4268 if (!base
|| TREE_CODE (base
) != PARM_DECL
)
4271 struct ipa_parm_adjustment
*cand
= NULL
;
4272 unsigned int len
= adjustments
.length ();
4273 for (unsigned i
= 0; i
< len
; i
++)
4275 struct ipa_parm_adjustment
*adj
= &adjustments
[i
];
4277 if (adj
->base
== base
4278 && (adj
->offset
== offset
|| adj
->op
== IPA_PARM_OP_REMOVE
))
4285 if (!cand
|| cand
->op
== IPA_PARM_OP_COPY
|| cand
->op
== IPA_PARM_OP_REMOVE
)
4290 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4293 index_in_adjustments_multiple_times_p (int base_index
,
4294 ipa_parm_adjustment_vec adjustments
)
4296 int i
, len
= adjustments
.length ();
4299 for (i
= 0; i
< len
; i
++)
4301 struct ipa_parm_adjustment
*adj
;
4302 adj
= &adjustments
[i
];
4304 if (adj
->base_index
== base_index
)
4316 /* Return adjustments that should have the same effect on function parameters
4317 and call arguments as if they were first changed according to adjustments in
4318 INNER and then by adjustments in OUTER. */
4320 ipa_parm_adjustment_vec
4321 ipa_combine_adjustments (ipa_parm_adjustment_vec inner
,
4322 ipa_parm_adjustment_vec outer
)
4324 int i
, outlen
= outer
.length ();
4325 int inlen
= inner
.length ();
4327 ipa_parm_adjustment_vec adjustments
, tmp
;
4330 for (i
= 0; i
< inlen
; i
++)
4332 struct ipa_parm_adjustment
*n
;
4335 if (n
->op
== IPA_PARM_OP_REMOVE
)
4339 /* FIXME: Handling of new arguments are not implemented yet. */
4340 gcc_assert (n
->op
!= IPA_PARM_OP_NEW
);
4341 tmp
.quick_push (*n
);
4345 adjustments
.create (outlen
+ removals
);
4346 for (i
= 0; i
< outlen
; i
++)
4348 struct ipa_parm_adjustment r
;
4349 struct ipa_parm_adjustment
*out
= &outer
[i
];
4350 struct ipa_parm_adjustment
*in
= &tmp
[out
->base_index
];
4352 memset (&r
, 0, sizeof (r
));
4353 gcc_assert (in
->op
!= IPA_PARM_OP_REMOVE
);
4354 if (out
->op
== IPA_PARM_OP_REMOVE
)
4356 if (!index_in_adjustments_multiple_times_p (in
->base_index
, tmp
))
4358 r
.op
= IPA_PARM_OP_REMOVE
;
4359 adjustments
.quick_push (r
);
4365 /* FIXME: Handling of new arguments are not implemented yet. */
4366 gcc_assert (out
->op
!= IPA_PARM_OP_NEW
);
4369 r
.base_index
= in
->base_index
;
4372 /* FIXME: Create nonlocal value too. */
4374 if (in
->op
== IPA_PARM_OP_COPY
&& out
->op
== IPA_PARM_OP_COPY
)
4375 r
.op
= IPA_PARM_OP_COPY
;
4376 else if (in
->op
== IPA_PARM_OP_COPY
)
4377 r
.offset
= out
->offset
;
4378 else if (out
->op
== IPA_PARM_OP_COPY
)
4379 r
.offset
= in
->offset
;
4381 r
.offset
= in
->offset
+ out
->offset
;
4382 adjustments
.quick_push (r
);
4385 for (i
= 0; i
< inlen
; i
++)
4387 struct ipa_parm_adjustment
*n
= &inner
[i
];
4389 if (n
->op
== IPA_PARM_OP_REMOVE
)
4390 adjustments
.quick_push (*n
);
4397 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4398 friendly way, assuming they are meant to be applied to FNDECL. */
4401 ipa_dump_param_adjustments (FILE *file
, ipa_parm_adjustment_vec adjustments
,
4404 int i
, len
= adjustments
.length ();
4406 vec
<tree
> parms
= ipa_get_vector_of_formal_parms (fndecl
);
4408 fprintf (file
, "IPA param adjustments: ");
4409 for (i
= 0; i
< len
; i
++)
4411 struct ipa_parm_adjustment
*adj
;
4412 adj
= &adjustments
[i
];
4415 fprintf (file
, " ");
4419 fprintf (file
, "%i. base_index: %i - ", i
, adj
->base_index
);
4420 print_generic_expr (file
, parms
[adj
->base_index
], 0);
4423 fprintf (file
, ", base: ");
4424 print_generic_expr (file
, adj
->base
, 0);
4428 fprintf (file
, ", new_decl: ");
4429 print_generic_expr (file
, adj
->new_decl
, 0);
4431 if (adj
->new_ssa_base
)
4433 fprintf (file
, ", new_ssa_base: ");
4434 print_generic_expr (file
, adj
->new_ssa_base
, 0);
4437 if (adj
->op
== IPA_PARM_OP_COPY
)
4438 fprintf (file
, ", copy_param");
4439 else if (adj
->op
== IPA_PARM_OP_REMOVE
)
4440 fprintf (file
, ", remove_param");
4442 fprintf (file
, ", offset %li", (long) adj
->offset
);
4444 fprintf (file
, ", by_ref");
4445 print_node_brief (file
, ", type: ", adj
->type
, 0);
4446 fprintf (file
, "\n");
4451 /* Dump the AV linked list. */
4454 ipa_dump_agg_replacement_values (FILE *f
, struct ipa_agg_replacement_value
*av
)
4457 fprintf (f
, " Aggregate replacements:");
4458 for (; av
; av
= av
->next
)
4460 fprintf (f
, "%s %i[" HOST_WIDE_INT_PRINT_DEC
"]=", comma
? "," : "",
4461 av
->index
, av
->offset
);
4462 print_generic_expr (f
, av
->value
, 0);
4468 /* Stream out jump function JUMP_FUNC to OB. */
4471 ipa_write_jump_function (struct output_block
*ob
,
4472 struct ipa_jump_func
*jump_func
)
4474 struct ipa_agg_jf_item
*item
;
4475 struct bitpack_d bp
;
4478 streamer_write_uhwi (ob
, jump_func
->type
);
4479 switch (jump_func
->type
)
4481 case IPA_JF_UNKNOWN
:
4485 EXPR_LOCATION (jump_func
->value
.constant
.value
) == UNKNOWN_LOCATION
);
4486 stream_write_tree (ob
, jump_func
->value
.constant
.value
, true);
4488 case IPA_JF_PASS_THROUGH
:
4489 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.operation
);
4490 if (jump_func
->value
.pass_through
.operation
== NOP_EXPR
)
4492 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4493 bp
= bitpack_create (ob
->main_stream
);
4494 bp_pack_value (&bp
, jump_func
->value
.pass_through
.agg_preserved
, 1);
4495 streamer_write_bitpack (&bp
);
4499 stream_write_tree (ob
, jump_func
->value
.pass_through
.operand
, true);
4500 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4503 case IPA_JF_ANCESTOR
:
4504 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.offset
);
4505 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.formal_id
);
4506 bp
= bitpack_create (ob
->main_stream
);
4507 bp_pack_value (&bp
, jump_func
->value
.ancestor
.agg_preserved
, 1);
4508 streamer_write_bitpack (&bp
);
4512 count
= vec_safe_length (jump_func
->agg
.items
);
4513 streamer_write_uhwi (ob
, count
);
4516 bp
= bitpack_create (ob
->main_stream
);
4517 bp_pack_value (&bp
, jump_func
->agg
.by_ref
, 1);
4518 streamer_write_bitpack (&bp
);
4521 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, i
, item
)
4523 streamer_write_uhwi (ob
, item
->offset
);
4524 stream_write_tree (ob
, item
->value
, true);
4527 bp
= bitpack_create (ob
->main_stream
);
4528 bp_pack_value (&bp
, jump_func
->alignment
.known
, 1);
4529 streamer_write_bitpack (&bp
);
4530 if (jump_func
->alignment
.known
)
4532 streamer_write_uhwi (ob
, jump_func
->alignment
.align
);
4533 streamer_write_uhwi (ob
, jump_func
->alignment
.misalign
);
4537 /* Read in jump function JUMP_FUNC from IB. */
4540 ipa_read_jump_function (struct lto_input_block
*ib
,
4541 struct ipa_jump_func
*jump_func
,
4542 struct cgraph_edge
*cs
,
4543 struct data_in
*data_in
)
4545 enum jump_func_type jftype
;
4546 enum tree_code operation
;
4549 jftype
= (enum jump_func_type
) streamer_read_uhwi (ib
);
4552 case IPA_JF_UNKNOWN
:
4553 ipa_set_jf_unknown (jump_func
);
4556 ipa_set_jf_constant (jump_func
, stream_read_tree (ib
, data_in
), cs
);
4558 case IPA_JF_PASS_THROUGH
:
4559 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
4560 if (operation
== NOP_EXPR
)
4562 int formal_id
= streamer_read_uhwi (ib
);
4563 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4564 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4565 ipa_set_jf_simple_pass_through (jump_func
, formal_id
, agg_preserved
);
4569 tree operand
= stream_read_tree (ib
, data_in
);
4570 int formal_id
= streamer_read_uhwi (ib
);
4571 ipa_set_jf_arith_pass_through (jump_func
, formal_id
, operand
,
4575 case IPA_JF_ANCESTOR
:
4577 HOST_WIDE_INT offset
= streamer_read_uhwi (ib
);
4578 int formal_id
= streamer_read_uhwi (ib
);
4579 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4580 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4581 ipa_set_ancestor_jf (jump_func
, offset
, formal_id
, agg_preserved
);
4586 count
= streamer_read_uhwi (ib
);
4587 vec_alloc (jump_func
->agg
.items
, count
);
4590 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4591 jump_func
->agg
.by_ref
= bp_unpack_value (&bp
, 1);
4593 for (i
= 0; i
< count
; i
++)
4595 struct ipa_agg_jf_item item
;
4596 item
.offset
= streamer_read_uhwi (ib
);
4597 item
.value
= stream_read_tree (ib
, data_in
);
4598 jump_func
->agg
.items
->quick_push (item
);
4601 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4602 bool alignment_known
= bp_unpack_value (&bp
, 1);
4603 if (alignment_known
)
4605 jump_func
->alignment
.known
= true;
4606 jump_func
->alignment
.align
= streamer_read_uhwi (ib
);
4607 jump_func
->alignment
.misalign
= streamer_read_uhwi (ib
);
4610 jump_func
->alignment
.known
= false;
4613 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4614 relevant to indirect inlining to OB. */
4617 ipa_write_indirect_edge_info (struct output_block
*ob
,
4618 struct cgraph_edge
*cs
)
4620 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4621 struct bitpack_d bp
;
4623 streamer_write_hwi (ob
, ii
->param_index
);
4624 bp
= bitpack_create (ob
->main_stream
);
4625 bp_pack_value (&bp
, ii
->polymorphic
, 1);
4626 bp_pack_value (&bp
, ii
->agg_contents
, 1);
4627 bp_pack_value (&bp
, ii
->member_ptr
, 1);
4628 bp_pack_value (&bp
, ii
->by_ref
, 1);
4629 bp_pack_value (&bp
, ii
->vptr_changed
, 1);
4630 streamer_write_bitpack (&bp
);
4631 if (ii
->agg_contents
|| ii
->polymorphic
)
4632 streamer_write_hwi (ob
, ii
->offset
);
4634 gcc_assert (ii
->offset
== 0);
4636 if (ii
->polymorphic
)
4638 streamer_write_hwi (ob
, ii
->otr_token
);
4639 stream_write_tree (ob
, ii
->otr_type
, true);
4640 ii
->context
.stream_out (ob
);
4644 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4645 relevant to indirect inlining from IB. */
4648 ipa_read_indirect_edge_info (struct lto_input_block
*ib
,
4649 struct data_in
*data_in
,
4650 struct cgraph_edge
*cs
)
4652 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4653 struct bitpack_d bp
;
4655 ii
->param_index
= (int) streamer_read_hwi (ib
);
4656 bp
= streamer_read_bitpack (ib
);
4657 ii
->polymorphic
= bp_unpack_value (&bp
, 1);
4658 ii
->agg_contents
= bp_unpack_value (&bp
, 1);
4659 ii
->member_ptr
= bp_unpack_value (&bp
, 1);
4660 ii
->by_ref
= bp_unpack_value (&bp
, 1);
4661 ii
->vptr_changed
= bp_unpack_value (&bp
, 1);
4662 if (ii
->agg_contents
|| ii
->polymorphic
)
4663 ii
->offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4666 if (ii
->polymorphic
)
4668 ii
->otr_token
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4669 ii
->otr_type
= stream_read_tree (ib
, data_in
);
4670 ii
->context
.stream_in (ib
, data_in
);
4674 /* Stream out NODE info to OB. */
4677 ipa_write_node_info (struct output_block
*ob
, struct cgraph_node
*node
)
4680 lto_symtab_encoder_t encoder
;
4681 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
4683 struct cgraph_edge
*e
;
4684 struct bitpack_d bp
;
4686 encoder
= ob
->decl_state
->symtab_node_encoder
;
4687 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4688 streamer_write_uhwi (ob
, node_ref
);
4690 streamer_write_uhwi (ob
, ipa_get_param_count (info
));
4691 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4692 streamer_write_uhwi (ob
, ipa_get_param_move_cost (info
, j
));
4693 bp
= bitpack_create (ob
->main_stream
);
4694 gcc_assert (info
->analysis_done
4695 || ipa_get_param_count (info
) == 0);
4696 gcc_assert (!info
->node_enqueued
);
4697 gcc_assert (!info
->ipcp_orig_node
);
4698 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4699 bp_pack_value (&bp
, ipa_is_param_used (info
, j
), 1);
4700 streamer_write_bitpack (&bp
);
4701 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4702 streamer_write_hwi (ob
, ipa_get_controlled_uses (info
, j
));
4703 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4705 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4707 streamer_write_uhwi (ob
,
4708 ipa_get_cs_argument_count (args
) * 2
4709 + (args
->polymorphic_call_contexts
!= NULL
));
4710 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4712 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4713 if (args
->polymorphic_call_contexts
!= NULL
)
4714 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
4717 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4719 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4721 streamer_write_uhwi (ob
,
4722 ipa_get_cs_argument_count (args
) * 2
4723 + (args
->polymorphic_call_contexts
!= NULL
));
4724 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4726 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4727 if (args
->polymorphic_call_contexts
!= NULL
)
4728 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
4730 ipa_write_indirect_edge_info (ob
, e
);
4734 /* Stream in NODE info from IB. */
4737 ipa_read_node_info (struct lto_input_block
*ib
, struct cgraph_node
*node
,
4738 struct data_in
*data_in
)
4740 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
4742 struct cgraph_edge
*e
;
4743 struct bitpack_d bp
;
4745 ipa_alloc_node_params (node
, streamer_read_uhwi (ib
));
4747 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4748 info
->descriptors
[k
].move_cost
= streamer_read_uhwi (ib
);
4750 bp
= streamer_read_bitpack (ib
);
4751 if (ipa_get_param_count (info
) != 0)
4752 info
->analysis_done
= true;
4753 info
->node_enqueued
= false;
4754 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4755 ipa_set_param_used (info
, k
, bp_unpack_value (&bp
, 1));
4756 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4757 ipa_set_controlled_uses (info
, k
, streamer_read_hwi (ib
));
4758 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4760 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4761 int count
= streamer_read_uhwi (ib
);
4762 bool contexts_computed
= count
& 1;
4767 vec_safe_grow_cleared (args
->jump_functions
, count
);
4768 if (contexts_computed
)
4769 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
);
4771 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
4773 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4775 if (contexts_computed
)
4776 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in (ib
, data_in
);
4779 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4781 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4782 int count
= streamer_read_uhwi (ib
);
4783 bool contexts_computed
= count
& 1;
4788 vec_safe_grow_cleared (args
->jump_functions
, count
);
4789 if (contexts_computed
)
4790 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
);
4791 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
4793 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4795 if (contexts_computed
)
4796 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in (ib
, data_in
);
4799 ipa_read_indirect_edge_info (ib
, data_in
, e
);
4803 /* Write jump functions for nodes in SET. */
4806 ipa_prop_write_jump_functions (void)
4808 struct cgraph_node
*node
;
4809 struct output_block
*ob
;
4810 unsigned int count
= 0;
4811 lto_symtab_encoder_iterator lsei
;
4812 lto_symtab_encoder_t encoder
;
4814 if (!ipa_node_params_sum
)
4817 ob
= create_output_block (LTO_section_jump_functions
);
4818 encoder
= ob
->decl_state
->symtab_node_encoder
;
4820 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4821 lsei_next_function_in_partition (&lsei
))
4823 node
= lsei_cgraph_node (lsei
);
4824 if (node
->has_gimple_body_p ()
4825 && IPA_NODE_REF (node
) != NULL
)
4829 streamer_write_uhwi (ob
, count
);
4831 /* Process all of the functions. */
4832 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4833 lsei_next_function_in_partition (&lsei
))
4835 node
= lsei_cgraph_node (lsei
);
4836 if (node
->has_gimple_body_p ()
4837 && IPA_NODE_REF (node
) != NULL
)
4838 ipa_write_node_info (ob
, node
);
4840 streamer_write_char_stream (ob
->main_stream
, 0);
4841 produce_asm (ob
, NULL
);
4842 destroy_output_block (ob
);
4845 /* Read section in file FILE_DATA of length LEN with data DATA. */
4848 ipa_prop_read_section (struct lto_file_decl_data
*file_data
, const char *data
,
4851 const struct lto_function_header
*header
=
4852 (const struct lto_function_header
*) data
;
4853 const int cfg_offset
= sizeof (struct lto_function_header
);
4854 const int main_offset
= cfg_offset
+ header
->cfg_size
;
4855 const int string_offset
= main_offset
+ header
->main_size
;
4856 struct data_in
*data_in
;
4860 lto_input_block
ib_main ((const char *) data
+ main_offset
,
4861 header
->main_size
, file_data
->mode_table
);
4864 lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
4865 header
->string_size
, vNULL
);
4866 count
= streamer_read_uhwi (&ib_main
);
4868 for (i
= 0; i
< count
; i
++)
4871 struct cgraph_node
*node
;
4872 lto_symtab_encoder_t encoder
;
4874 index
= streamer_read_uhwi (&ib_main
);
4875 encoder
= file_data
->symtab_node_encoder
;
4876 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
4878 gcc_assert (node
->definition
);
4879 ipa_read_node_info (&ib_main
, node
, data_in
);
4881 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
4883 lto_data_in_delete (data_in
);
4886 /* Read ipcp jump functions. */
4889 ipa_prop_read_jump_functions (void)
4891 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
4892 struct lto_file_decl_data
*file_data
;
4895 ipa_check_create_node_params ();
4896 ipa_check_create_edge_args ();
4897 ipa_register_cgraph_hooks ();
4899 while ((file_data
= file_data_vec
[j
++]))
4902 const char *data
= lto_get_section_data (file_data
, LTO_section_jump_functions
, NULL
, &len
);
4905 ipa_prop_read_section (file_data
, data
, len
);
4909 /* After merging units, we can get mismatch in argument counts.
4910 Also decl merging might've rendered parameter lists obsolete.
4911 Also compute called_with_variable_arg info. */
4914 ipa_update_after_lto_read (void)
4916 ipa_check_create_node_params ();
4917 ipa_check_create_edge_args ();
4921 write_ipcp_transformation_info (output_block
*ob
, cgraph_node
*node
)
4924 unsigned int count
= 0;
4925 lto_symtab_encoder_t encoder
;
4926 struct ipa_agg_replacement_value
*aggvals
, *av
;
4928 aggvals
= ipa_get_agg_replacements_for_node (node
);
4929 encoder
= ob
->decl_state
->symtab_node_encoder
;
4930 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4931 streamer_write_uhwi (ob
, node_ref
);
4933 for (av
= aggvals
; av
; av
= av
->next
)
4935 streamer_write_uhwi (ob
, count
);
4937 for (av
= aggvals
; av
; av
= av
->next
)
4939 struct bitpack_d bp
;
4941 streamer_write_uhwi (ob
, av
->offset
);
4942 streamer_write_uhwi (ob
, av
->index
);
4943 stream_write_tree (ob
, av
->value
, true);
4945 bp
= bitpack_create (ob
->main_stream
);
4946 bp_pack_value (&bp
, av
->by_ref
, 1);
4947 streamer_write_bitpack (&bp
);
4950 ipcp_transformation_summary
*ts
= ipcp_get_transformation_summary (node
);
4951 if (ts
&& vec_safe_length (ts
->alignments
) > 0)
4953 count
= ts
->alignments
->length ();
4955 streamer_write_uhwi (ob
, count
);
4956 for (unsigned i
= 0; i
< count
; ++i
)
4958 ipa_alignment
*parm_al
= &(*ts
->alignments
)[i
];
4960 struct bitpack_d bp
;
4961 bp
= bitpack_create (ob
->main_stream
);
4962 bp_pack_value (&bp
, parm_al
->known
, 1);
4963 streamer_write_bitpack (&bp
);
4966 streamer_write_uhwi (ob
, parm_al
->align
);
4967 streamer_write_hwi_in_range (ob
->main_stream
, 0, parm_al
->align
,
4973 streamer_write_uhwi (ob
, 0);
4976 /* Stream in the aggregate value replacement chain for NODE from IB. */
4979 read_ipcp_transformation_info (lto_input_block
*ib
, cgraph_node
*node
,
4982 struct ipa_agg_replacement_value
*aggvals
= NULL
;
4983 unsigned int count
, i
;
4985 count
= streamer_read_uhwi (ib
);
4986 for (i
= 0; i
<count
; i
++)
4988 struct ipa_agg_replacement_value
*av
;
4989 struct bitpack_d bp
;
4991 av
= ggc_alloc
<ipa_agg_replacement_value
> ();
4992 av
->offset
= streamer_read_uhwi (ib
);
4993 av
->index
= streamer_read_uhwi (ib
);
4994 av
->value
= stream_read_tree (ib
, data_in
);
4995 bp
= streamer_read_bitpack (ib
);
4996 av
->by_ref
= bp_unpack_value (&bp
, 1);
5000 ipa_set_node_agg_value_chain (node
, aggvals
);
5002 count
= streamer_read_uhwi (ib
);
5005 ipcp_grow_transformations_if_necessary ();
5007 ipcp_transformation_summary
*ts
= ipcp_get_transformation_summary (node
);
5008 vec_safe_grow_cleared (ts
->alignments
, count
);
5010 for (i
= 0; i
< count
; i
++)
5012 ipa_alignment
*parm_al
;
5013 parm_al
= &(*ts
->alignments
)[i
];
5014 struct bitpack_d bp
;
5015 bp
= streamer_read_bitpack (ib
);
5016 parm_al
->known
= bp_unpack_value (&bp
, 1);
5019 parm_al
->align
= streamer_read_uhwi (ib
);
5021 = streamer_read_hwi_in_range (ib
, "ipa-prop misalign",
5028 /* Write all aggregate replacement for nodes in set. */
5031 ipcp_write_transformation_summaries (void)
5033 struct cgraph_node
*node
;
5034 struct output_block
*ob
;
5035 unsigned int count
= 0;
5036 lto_symtab_encoder_iterator lsei
;
5037 lto_symtab_encoder_t encoder
;
5039 ob
= create_output_block (LTO_section_ipcp_transform
);
5040 encoder
= ob
->decl_state
->symtab_node_encoder
;
5042 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5043 lsei_next_function_in_partition (&lsei
))
5045 node
= lsei_cgraph_node (lsei
);
5046 if (node
->has_gimple_body_p ())
5050 streamer_write_uhwi (ob
, count
);
5052 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5053 lsei_next_function_in_partition (&lsei
))
5055 node
= lsei_cgraph_node (lsei
);
5056 if (node
->has_gimple_body_p ())
5057 write_ipcp_transformation_info (ob
, node
);
5059 streamer_write_char_stream (ob
->main_stream
, 0);
5060 produce_asm (ob
, NULL
);
5061 destroy_output_block (ob
);
5064 /* Read replacements section in file FILE_DATA of length LEN with data
5068 read_replacements_section (struct lto_file_decl_data
*file_data
,
5072 const struct lto_function_header
*header
=
5073 (const struct lto_function_header
*) data
;
5074 const int cfg_offset
= sizeof (struct lto_function_header
);
5075 const int main_offset
= cfg_offset
+ header
->cfg_size
;
5076 const int string_offset
= main_offset
+ header
->main_size
;
5077 struct data_in
*data_in
;
5081 lto_input_block
ib_main ((const char *) data
+ main_offset
,
5082 header
->main_size
, file_data
->mode_table
);
5084 data_in
= lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
5085 header
->string_size
, vNULL
);
5086 count
= streamer_read_uhwi (&ib_main
);
5088 for (i
= 0; i
< count
; i
++)
5091 struct cgraph_node
*node
;
5092 lto_symtab_encoder_t encoder
;
5094 index
= streamer_read_uhwi (&ib_main
);
5095 encoder
= file_data
->symtab_node_encoder
;
5096 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
5098 gcc_assert (node
->definition
);
5099 read_ipcp_transformation_info (&ib_main
, node
, data_in
);
5101 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
5103 lto_data_in_delete (data_in
);
5106 /* Read IPA-CP aggregate replacements. */
5109 ipcp_read_transformation_summaries (void)
5111 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
5112 struct lto_file_decl_data
*file_data
;
5115 while ((file_data
= file_data_vec
[j
++]))
5118 const char *data
= lto_get_section_data (file_data
,
5119 LTO_section_ipcp_transform
,
5122 read_replacements_section (file_data
, data
, len
);
5126 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5130 adjust_agg_replacement_values (struct cgraph_node
*node
,
5131 struct ipa_agg_replacement_value
*aggval
)
5133 struct ipa_agg_replacement_value
*v
;
5134 int i
, c
= 0, d
= 0, *adj
;
5136 if (!node
->clone
.combined_args_to_skip
)
5139 for (v
= aggval
; v
; v
= v
->next
)
5141 gcc_assert (v
->index
>= 0);
5147 adj
= XALLOCAVEC (int, c
);
5148 for (i
= 0; i
< c
; i
++)
5149 if (bitmap_bit_p (node
->clone
.combined_args_to_skip
, i
))
5157 for (v
= aggval
; v
; v
= v
->next
)
5158 v
->index
= adj
[v
->index
];
5161 /* Dominator walker driving the ipcp modification phase. */
5163 class ipcp_modif_dom_walker
: public dom_walker
5166 ipcp_modif_dom_walker (struct func_body_info
*fbi
,
5167 vec
<ipa_param_descriptor
> descs
,
5168 struct ipa_agg_replacement_value
*av
,
5170 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
), m_descriptors (descs
),
5171 m_aggval (av
), m_something_changed (sc
), m_cfg_changed (cc
) {}
5173 virtual void before_dom_children (basic_block
);
5176 struct func_body_info
*m_fbi
;
5177 vec
<ipa_param_descriptor
> m_descriptors
;
5178 struct ipa_agg_replacement_value
*m_aggval
;
5179 bool *m_something_changed
, *m_cfg_changed
;
5183 ipcp_modif_dom_walker::before_dom_children (basic_block bb
)
5185 gimple_stmt_iterator gsi
;
5186 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5188 struct ipa_agg_replacement_value
*v
;
5189 gimple stmt
= gsi_stmt (gsi
);
5191 HOST_WIDE_INT offset
, size
;
5195 if (!gimple_assign_load_p (stmt
))
5197 rhs
= gimple_assign_rhs1 (stmt
);
5198 if (!is_gimple_reg_type (TREE_TYPE (rhs
)))
5203 while (handled_component_p (t
))
5205 /* V_C_E can do things like convert an array of integers to one
5206 bigger integer and similar things we do not handle below. */
5207 if (TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
)
5212 t
= TREE_OPERAND (t
, 0);
5217 if (!ipa_load_from_parm_agg_1 (m_fbi
, m_descriptors
, stmt
, rhs
, &index
,
5218 &offset
, &size
, &by_ref
))
5220 for (v
= m_aggval
; v
; v
= v
->next
)
5221 if (v
->index
== index
5222 && v
->offset
== offset
)
5225 || v
->by_ref
!= by_ref
5226 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v
->value
))) != size
)
5229 gcc_checking_assert (is_gimple_ip_invariant (v
->value
));
5230 if (!useless_type_conversion_p (TREE_TYPE (rhs
), TREE_TYPE (v
->value
)))
5232 if (fold_convertible_p (TREE_TYPE (rhs
), v
->value
))
5233 val
= fold_build1 (NOP_EXPR
, TREE_TYPE (rhs
), v
->value
);
5234 else if (TYPE_SIZE (TREE_TYPE (rhs
))
5235 == TYPE_SIZE (TREE_TYPE (v
->value
)))
5236 val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (rhs
), v
->value
);
5241 fprintf (dump_file
, " const ");
5242 print_generic_expr (dump_file
, v
->value
, 0);
5243 fprintf (dump_file
, " can't be converted to type of ");
5244 print_generic_expr (dump_file
, rhs
, 0);
5245 fprintf (dump_file
, "\n");
5253 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5255 fprintf (dump_file
, "Modifying stmt:\n ");
5256 print_gimple_stmt (dump_file
, stmt
, 0, 0);
5258 gimple_assign_set_rhs_from_tree (&gsi
, val
);
5261 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5263 fprintf (dump_file
, "into:\n ");
5264 print_gimple_stmt (dump_file
, stmt
, 0, 0);
5265 fprintf (dump_file
, "\n");
5268 *m_something_changed
= true;
5269 if (maybe_clean_eh_stmt (stmt
)
5270 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
5271 *m_cfg_changed
= true;
5276 /* Update alignment of formal parameters as described in
5277 ipcp_transformation_summary. */
5280 ipcp_update_alignments (struct cgraph_node
*node
)
5282 tree fndecl
= node
->decl
;
5283 tree parm
= DECL_ARGUMENTS (fndecl
);
5284 tree next_parm
= parm
;
5285 ipcp_transformation_summary
*ts
= ipcp_get_transformation_summary (node
);
5286 if (!ts
|| vec_safe_length (ts
->alignments
) == 0)
5288 const vec
<ipa_alignment
, va_gc
> &alignments
= *ts
->alignments
;
5289 unsigned count
= alignments
.length ();
5291 for (unsigned i
= 0; i
< count
; ++i
, parm
= next_parm
)
5293 if (node
->clone
.combined_args_to_skip
5294 && bitmap_bit_p (node
->clone
.combined_args_to_skip
, i
))
5296 gcc_checking_assert (parm
);
5297 next_parm
= DECL_CHAIN (parm
);
5299 if (!alignments
[i
].known
|| !is_gimple_reg (parm
))
5301 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
), parm
);
5306 fprintf (dump_file
, " Adjusting alignment of param %u to %u, "
5307 "misalignment to %u\n", i
, alignments
[i
].align
,
5308 alignments
[i
].misalign
);
5310 struct ptr_info_def
*pi
= get_ptr_info (ddef
);
5311 gcc_checking_assert (pi
);
5313 unsigned old_misalign
;
5314 bool old_known
= get_ptr_info_alignment (pi
, &old_align
, &old_misalign
);
5317 && old_align
>= alignments
[i
].align
)
5320 fprintf (dump_file
, " But the alignment was already %u.\n",
5324 set_ptr_info_alignment (pi
, alignments
[i
].align
, alignments
[i
].misalign
);
5328 /* IPCP transformation phase doing propagation of aggregate values. */
5331 ipcp_transform_function (struct cgraph_node
*node
)
5333 vec
<ipa_param_descriptor
> descriptors
= vNULL
;
5334 struct func_body_info fbi
;
5335 struct ipa_agg_replacement_value
*aggval
;
5337 bool cfg_changed
= false, something_changed
= false;
5339 gcc_checking_assert (cfun
);
5340 gcc_checking_assert (current_function_decl
);
5343 fprintf (dump_file
, "Modification phase of node %s/%i\n",
5344 node
->name (), node
->order
);
5346 ipcp_update_alignments (node
);
5347 aggval
= ipa_get_agg_replacements_for_node (node
);
5350 param_count
= count_formal_params (node
->decl
);
5351 if (param_count
== 0)
5353 adjust_agg_replacement_values (node
, aggval
);
5355 ipa_dump_agg_replacement_values (dump_file
, aggval
);
5359 fbi
.bb_infos
= vNULL
;
5360 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
));
5361 fbi
.param_count
= param_count
;
5364 descriptors
.safe_grow_cleared (param_count
);
5365 ipa_populate_param_decls (node
, descriptors
);
5366 calculate_dominance_info (CDI_DOMINATORS
);
5367 ipcp_modif_dom_walker (&fbi
, descriptors
, aggval
, &something_changed
,
5368 &cfg_changed
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5371 struct ipa_bb_info
*bi
;
5372 FOR_EACH_VEC_ELT (fbi
.bb_infos
, i
, bi
)
5373 free_ipa_bb_info (bi
);
5374 fbi
.bb_infos
.release ();
5375 free_dominance_info (CDI_DOMINATORS
);
5376 (*ipcp_transformations
)[node
->uid
].agg_values
= NULL
;
5377 (*ipcp_transformations
)[node
->uid
].alignments
= NULL
;
5378 descriptors
.release ();
5380 if (!something_changed
)
5382 else if (cfg_changed
)
5383 return TODO_update_ssa_only_virtuals
| TODO_cleanup_cfg
;
5385 return TODO_update_ssa_only_virtuals
;