1 /* Interprocedural analyses.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "fold-const.h"
30 #include "hard-reg-set.h"
32 #include "dominance.h"
34 #include "basic-block.h"
35 #include "tree-ssa-alias.h"
36 #include "internal-fn.h"
37 #include "gimple-fold.h"
39 #include "gimple-expr.h"
43 #include "insn-config.h"
52 #include "stor-layout.h"
53 #include "print-tree.h"
55 #include "gimple-iterator.h"
56 #include "gimplify-me.h"
57 #include "gimple-walk.h"
58 #include "langhooks.h"
61 #include "alloc-pool.h"
62 #include "symbol-summary.h"
65 #include "gimple-ssa.h"
67 #include "tree-phinodes.h"
68 #include "ssa-iterators.h"
69 #include "tree-into-ssa.h"
71 #include "tree-pass.h"
72 #include "tree-inline.h"
73 #include "ipa-inline.h"
74 #include "diagnostic.h"
75 #include "gimple-pretty-print.h"
76 #include "lto-streamer.h"
77 #include "data-streamer.h"
78 #include "tree-streamer.h"
80 #include "ipa-utils.h"
81 #include "stringpool.h"
82 #include "tree-ssanames.h"
87 /* Intermediate information that we get from alias analysis about a particular
88 parameter in a particular basic_block. When a parameter or the memory it
89 references is marked modified, we use that information in all dominatd
90 blocks without cosulting alias analysis oracle. */
92 struct param_aa_status
94 /* Set when this structure contains meaningful information. If not, the
95 structure describing a dominating BB should be used instead. */
98 /* Whether we have seen something which might have modified the data in
99 question. PARM is for the parameter itself, REF is for data it points to
100 but using the alias type of individual accesses and PT is the same thing
101 but for computing aggregate pass-through functions using a very inclusive
103 bool parm_modified
, ref_modified
, pt_modified
;
106 /* Information related to a given BB that used only when looking at function
111 /* Call graph edges going out of this BB. */
112 vec
<cgraph_edge
*> cg_edges
;
113 /* Alias analysis statuses of each formal parameter at this bb. */
114 vec
<param_aa_status
> param_aa_statuses
;
117 /* Structure with global information that is only used when looking at function
120 struct func_body_info
122 /* The node that is being analyzed. */
126 struct ipa_node_params
*info
;
128 /* Information about individual BBs. */
129 vec
<ipa_bb_info
> bb_infos
;
131 /* Number of parameters. */
134 /* Number of statements already walked by when analyzing this function. */
135 unsigned int aa_walked
;
138 /* Function summary where the parameter infos are actually stored. */
139 ipa_node_params_t
*ipa_node_params_sum
= NULL
;
140 /* Vector of IPA-CP transformation data for each clone. */
141 vec
<ipcp_transformation_summary
, va_gc
> *ipcp_transformations
;
142 /* Vector where the parameter infos are actually stored. */
143 vec
<ipa_edge_args
, va_gc
> *ipa_edge_args_vector
;
145 /* Holders of ipa cgraph hooks: */
146 static struct cgraph_edge_hook_list
*edge_removal_hook_holder
;
147 static struct cgraph_2edge_hook_list
*edge_duplication_hook_holder
;
148 static struct cgraph_node_hook_list
*function_insertion_hook_holder
;
150 /* Description of a reference to an IPA constant. */
151 struct ipa_cst_ref_desc
153 /* Edge that corresponds to the statement which took the reference. */
154 struct cgraph_edge
*cs
;
155 /* Linked list of duplicates created when call graph edges are cloned. */
156 struct ipa_cst_ref_desc
*next_duplicate
;
157 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
158 if out of control. */
162 /* Allocation pool for reference descriptions. */
164 static pool_allocator
<ipa_cst_ref_desc
> ipa_refdesc_pool
165 ("IPA-PROP ref descriptions", 32);
167 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
168 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
171 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node
*node
)
173 tree fs_opts
= DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node
->decl
);
177 return !opt_for_fn (node
->decl
, optimize
) || !opt_for_fn (node
->decl
, flag_ipa_cp
);
180 /* Return index of the formal whose tree is PTREE in function which corresponds
184 ipa_get_param_decl_index_1 (vec
<ipa_param_descriptor
> descriptors
, tree ptree
)
188 count
= descriptors
.length ();
189 for (i
= 0; i
< count
; i
++)
190 if (descriptors
[i
].decl
== ptree
)
196 /* Return index of the formal whose tree is PTREE in function which corresponds
200 ipa_get_param_decl_index (struct ipa_node_params
*info
, tree ptree
)
202 return ipa_get_param_decl_index_1 (info
->descriptors
, ptree
);
205 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
209 ipa_populate_param_decls (struct cgraph_node
*node
,
210 vec
<ipa_param_descriptor
> &descriptors
)
218 gcc_assert (gimple_has_body_p (fndecl
));
219 fnargs
= DECL_ARGUMENTS (fndecl
);
221 for (parm
= fnargs
; parm
; parm
= DECL_CHAIN (parm
))
223 descriptors
[param_num
].decl
= parm
;
224 descriptors
[param_num
].move_cost
= estimate_move_cost (TREE_TYPE (parm
),
230 /* Return how many formal parameters FNDECL has. */
233 count_formal_params (tree fndecl
)
237 gcc_assert (gimple_has_body_p (fndecl
));
239 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
245 /* Return the declaration of Ith formal parameter of the function corresponding
246 to INFO. Note there is no setter function as this array is built just once
247 using ipa_initialize_node_params. */
250 ipa_dump_param (FILE *file
, struct ipa_node_params
*info
, int i
)
252 fprintf (file
, "param #%i", i
);
253 if (info
->descriptors
[i
].decl
)
256 print_generic_expr (file
, info
->descriptors
[i
].decl
, 0);
260 /* Initialize the ipa_node_params structure associated with NODE
261 to hold PARAM_COUNT parameters. */
264 ipa_alloc_node_params (struct cgraph_node
*node
, int param_count
)
266 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
268 if (!info
->descriptors
.exists () && param_count
)
269 info
->descriptors
.safe_grow_cleared (param_count
);
272 /* Initialize the ipa_node_params structure associated with NODE by counting
273 the function parameters, creating the descriptors and populating their
277 ipa_initialize_node_params (struct cgraph_node
*node
)
279 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
281 if (!info
->descriptors
.exists ())
283 ipa_alloc_node_params (node
, count_formal_params (node
->decl
));
284 ipa_populate_param_decls (node
, info
->descriptors
);
288 /* Print the jump functions associated with call graph edge CS to file F. */
291 ipa_print_node_jump_functions_for_edge (FILE *f
, struct cgraph_edge
*cs
)
295 count
= ipa_get_cs_argument_count (IPA_EDGE_REF (cs
));
296 for (i
= 0; i
< count
; i
++)
298 struct ipa_jump_func
*jump_func
;
299 enum jump_func_type type
;
301 jump_func
= ipa_get_ith_jump_func (IPA_EDGE_REF (cs
), i
);
302 type
= jump_func
->type
;
304 fprintf (f
, " param %d: ", i
);
305 if (type
== IPA_JF_UNKNOWN
)
306 fprintf (f
, "UNKNOWN\n");
307 else if (type
== IPA_JF_CONST
)
309 tree val
= jump_func
->value
.constant
.value
;
310 fprintf (f
, "CONST: ");
311 print_generic_expr (f
, val
, 0);
312 if (TREE_CODE (val
) == ADDR_EXPR
313 && TREE_CODE (TREE_OPERAND (val
, 0)) == CONST_DECL
)
316 print_generic_expr (f
, DECL_INITIAL (TREE_OPERAND (val
, 0)),
321 else if (type
== IPA_JF_PASS_THROUGH
)
323 fprintf (f
, "PASS THROUGH: ");
324 fprintf (f
, "%d, op %s",
325 jump_func
->value
.pass_through
.formal_id
,
326 get_tree_code_name(jump_func
->value
.pass_through
.operation
));
327 if (jump_func
->value
.pass_through
.operation
!= NOP_EXPR
)
330 print_generic_expr (f
,
331 jump_func
->value
.pass_through
.operand
, 0);
333 if (jump_func
->value
.pass_through
.agg_preserved
)
334 fprintf (f
, ", agg_preserved");
337 else if (type
== IPA_JF_ANCESTOR
)
339 fprintf (f
, "ANCESTOR: ");
340 fprintf (f
, "%d, offset " HOST_WIDE_INT_PRINT_DEC
,
341 jump_func
->value
.ancestor
.formal_id
,
342 jump_func
->value
.ancestor
.offset
);
343 if (jump_func
->value
.ancestor
.agg_preserved
)
344 fprintf (f
, ", agg_preserved");
348 if (jump_func
->agg
.items
)
350 struct ipa_agg_jf_item
*item
;
353 fprintf (f
, " Aggregate passed by %s:\n",
354 jump_func
->agg
.by_ref
? "reference" : "value");
355 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, j
, item
)
357 fprintf (f
, " offset: " HOST_WIDE_INT_PRINT_DEC
", ",
359 if (TYPE_P (item
->value
))
360 fprintf (f
, "clobber of " HOST_WIDE_INT_PRINT_DEC
" bits",
361 tree_to_uhwi (TYPE_SIZE (item
->value
)));
364 fprintf (f
, "cst: ");
365 print_generic_expr (f
, item
->value
, 0);
371 struct ipa_polymorphic_call_context
*ctx
372 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs
), i
);
373 if (ctx
&& !ctx
->useless_p ())
375 fprintf (f
, " Context: ");
376 ctx
->dump (dump_file
);
379 if (jump_func
->alignment
.known
)
381 fprintf (f
, " Alignment: %u, misalignment: %u\n",
382 jump_func
->alignment
.align
,
383 jump_func
->alignment
.misalign
);
386 fprintf (f
, " Unknown alignment\n");
391 /* Print the jump functions of all arguments on all call graph edges going from
395 ipa_print_node_jump_functions (FILE *f
, struct cgraph_node
*node
)
397 struct cgraph_edge
*cs
;
399 fprintf (f
, " Jump functions of caller %s/%i:\n", node
->name (),
401 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
403 if (!ipa_edge_args_info_available_for_edge_p (cs
))
406 fprintf (f
, " callsite %s/%i -> %s/%i : \n",
407 xstrdup_for_dump (node
->name ()), node
->order
,
408 xstrdup_for_dump (cs
->callee
->name ()),
410 ipa_print_node_jump_functions_for_edge (f
, cs
);
413 for (cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
415 struct cgraph_indirect_call_info
*ii
;
416 if (!ipa_edge_args_info_available_for_edge_p (cs
))
419 ii
= cs
->indirect_info
;
420 if (ii
->agg_contents
)
421 fprintf (f
, " indirect %s callsite, calling param %i, "
422 "offset " HOST_WIDE_INT_PRINT_DEC
", %s",
423 ii
->member_ptr
? "member ptr" : "aggregate",
424 ii
->param_index
, ii
->offset
,
425 ii
->by_ref
? "by reference" : "by_value");
427 fprintf (f
, " indirect %s callsite, calling param %i, "
428 "offset " HOST_WIDE_INT_PRINT_DEC
,
429 ii
->polymorphic
? "polymorphic" : "simple", ii
->param_index
,
434 fprintf (f
, ", for stmt ");
435 print_gimple_stmt (f
, cs
->call_stmt
, 0, TDF_SLIM
);
440 ii
->context
.dump (f
);
441 ipa_print_node_jump_functions_for_edge (f
, cs
);
445 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
448 ipa_print_all_jump_functions (FILE *f
)
450 struct cgraph_node
*node
;
452 fprintf (f
, "\nJump functions:\n");
453 FOR_EACH_FUNCTION (node
)
455 ipa_print_node_jump_functions (f
, node
);
459 /* Set jfunc to be a know-really nothing jump function. */
462 ipa_set_jf_unknown (struct ipa_jump_func
*jfunc
)
464 jfunc
->type
= IPA_JF_UNKNOWN
;
465 jfunc
->alignment
.known
= false;
468 /* Set JFUNC to be a copy of another jmp (to be used by jump function
469 combination code). The two functions will share their rdesc. */
472 ipa_set_jf_cst_copy (struct ipa_jump_func
*dst
,
473 struct ipa_jump_func
*src
)
476 gcc_checking_assert (src
->type
== IPA_JF_CONST
);
477 dst
->type
= IPA_JF_CONST
;
478 dst
->value
.constant
= src
->value
.constant
;
481 /* Set JFUNC to be a constant jmp function. */
484 ipa_set_jf_constant (struct ipa_jump_func
*jfunc
, tree constant
,
485 struct cgraph_edge
*cs
)
487 constant
= unshare_expr (constant
);
488 if (constant
&& EXPR_P (constant
))
489 SET_EXPR_LOCATION (constant
, UNKNOWN_LOCATION
);
490 jfunc
->type
= IPA_JF_CONST
;
491 jfunc
->value
.constant
.value
= unshare_expr_without_location (constant
);
493 if (TREE_CODE (constant
) == ADDR_EXPR
494 && TREE_CODE (TREE_OPERAND (constant
, 0)) == FUNCTION_DECL
)
496 struct ipa_cst_ref_desc
*rdesc
;
498 rdesc
= ipa_refdesc_pool
.allocate ();
500 rdesc
->next_duplicate
= NULL
;
502 jfunc
->value
.constant
.rdesc
= rdesc
;
505 jfunc
->value
.constant
.rdesc
= NULL
;
508 /* Set JFUNC to be a simple pass-through jump function. */
510 ipa_set_jf_simple_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
513 jfunc
->type
= IPA_JF_PASS_THROUGH
;
514 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
515 jfunc
->value
.pass_through
.formal_id
= formal_id
;
516 jfunc
->value
.pass_through
.operation
= NOP_EXPR
;
517 jfunc
->value
.pass_through
.agg_preserved
= agg_preserved
;
520 /* Set JFUNC to be an arithmetic pass through jump function. */
523 ipa_set_jf_arith_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
524 tree operand
, enum tree_code operation
)
526 jfunc
->type
= IPA_JF_PASS_THROUGH
;
527 jfunc
->value
.pass_through
.operand
= unshare_expr_without_location (operand
);
528 jfunc
->value
.pass_through
.formal_id
= formal_id
;
529 jfunc
->value
.pass_through
.operation
= operation
;
530 jfunc
->value
.pass_through
.agg_preserved
= false;
533 /* Set JFUNC to be an ancestor jump function. */
536 ipa_set_ancestor_jf (struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
,
537 int formal_id
, bool agg_preserved
)
539 jfunc
->type
= IPA_JF_ANCESTOR
;
540 jfunc
->value
.ancestor
.formal_id
= formal_id
;
541 jfunc
->value
.ancestor
.offset
= offset
;
542 jfunc
->value
.ancestor
.agg_preserved
= agg_preserved
;
545 /* Get IPA BB information about the given BB. FBI is the context of analyzis
546 of this function body. */
548 static struct ipa_bb_info
*
549 ipa_get_bb_info (struct func_body_info
*fbi
, basic_block bb
)
551 gcc_checking_assert (fbi
);
552 return &fbi
->bb_infos
[bb
->index
];
555 /* Structure to be passed in between detect_type_change and
556 check_stmt_for_type_change. */
558 struct prop_type_change_info
560 /* Offset into the object where there is the virtual method pointer we are
562 HOST_WIDE_INT offset
;
563 /* The declaration or SSA_NAME pointer of the base that we are checking for
566 /* Set to true if dynamic type change has been detected. */
567 bool type_maybe_changed
;
570 /* Return true if STMT can modify a virtual method table pointer.
572 This function makes special assumptions about both constructors and
573 destructors which are all the functions that are allowed to alter the VMT
574 pointers. It assumes that destructors begin with assignment into all VMT
575 pointers and that constructors essentially look in the following way:
577 1) The very first thing they do is that they call constructors of ancestor
578 sub-objects that have them.
580 2) Then VMT pointers of this and all its ancestors is set to new values
581 corresponding to the type corresponding to the constructor.
583 3) Only afterwards, other stuff such as constructor of member sub-objects
584 and the code written by the user is run. Only this may include calling
585 virtual functions, directly or indirectly.
587 There is no way to call a constructor of an ancestor sub-object in any
590 This means that we do not have to care whether constructors get the correct
591 type information because they will always change it (in fact, if we define
592 the type to be given by the VMT pointer, it is undefined).
594 The most important fact to derive from the above is that if, for some
595 statement in the section 3, we try to detect whether the dynamic type has
596 changed, we can safely ignore all calls as we examine the function body
597 backwards until we reach statements in section 2 because these calls cannot
598 be ancestor constructors or destructors (if the input is not bogus) and so
599 do not change the dynamic type (this holds true only for automatically
600 allocated objects but at the moment we devirtualize only these). We then
601 must detect that statements in section 2 change the dynamic type and can try
602 to derive the new type. That is enough and we can stop, we will never see
603 the calls into constructors of sub-objects in this code. Therefore we can
604 safely ignore all call statements that we traverse.
608 stmt_may_be_vtbl_ptr_store (gimple stmt
)
610 if (is_gimple_call (stmt
))
612 if (gimple_clobber_p (stmt
))
614 else if (is_gimple_assign (stmt
))
616 tree lhs
= gimple_assign_lhs (stmt
);
618 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs
)))
620 if (flag_strict_aliasing
621 && !POINTER_TYPE_P (TREE_TYPE (lhs
)))
624 if (TREE_CODE (lhs
) == COMPONENT_REF
625 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
627 /* In the future we might want to use get_base_ref_and_offset to find
628 if there is a field corresponding to the offset and if so, proceed
629 almost like if it was a component ref. */
635 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
636 to check whether a particular statement may modify the virtual table
637 pointerIt stores its result into DATA, which points to a
638 prop_type_change_info structure. */
641 check_stmt_for_type_change (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef
, void *data
)
643 gimple stmt
= SSA_NAME_DEF_STMT (vdef
);
644 struct prop_type_change_info
*tci
= (struct prop_type_change_info
*) data
;
646 if (stmt_may_be_vtbl_ptr_store (stmt
))
648 tci
->type_maybe_changed
= true;
655 /* See if ARG is PARAM_DECl describing instance passed by pointer
656 or reference in FUNCTION. Return false if the dynamic type may change
657 in between beggining of the function until CALL is invoked.
659 Generally functions are not allowed to change type of such instances,
660 but they call destructors. We assume that methods can not destroy the THIS
661 pointer. Also as a special cases, constructor and destructors may change
662 type of the THIS pointer. */
665 param_type_may_change_p (tree function
, tree arg
, gimple call
)
667 /* Pure functions can not do any changes on the dynamic type;
668 that require writting to memory. */
669 if (flags_from_decl_or_type (function
) & (ECF_PURE
| ECF_CONST
))
671 /* We need to check if we are within inlined consturctor
672 or destructor (ideally we would have way to check that the
673 inline cdtor is actually working on ARG, but we don't have
674 easy tie on this, so punt on all non-pure cdtors.
675 We may also record the types of cdtors and once we know type
676 of the instance match them.
678 Also code unification optimizations may merge calls from
679 different blocks making return values unreliable. So
680 do nothing during late optimization. */
681 if (DECL_STRUCT_FUNCTION (function
)->after_inlining
)
683 if (TREE_CODE (arg
) == SSA_NAME
684 && SSA_NAME_IS_DEFAULT_DEF (arg
)
685 && TREE_CODE (SSA_NAME_VAR (arg
)) == PARM_DECL
)
687 /* Normal (non-THIS) argument. */
688 if ((SSA_NAME_VAR (arg
) != DECL_ARGUMENTS (function
)
689 || TREE_CODE (TREE_TYPE (function
)) != METHOD_TYPE
)
690 /* THIS pointer of an method - here we we want to watch constructors
691 and destructors as those definitely may change the dynamic
693 || (TREE_CODE (TREE_TYPE (function
)) == METHOD_TYPE
694 && !DECL_CXX_CONSTRUCTOR_P (function
)
695 && !DECL_CXX_DESTRUCTOR_P (function
)
696 && (SSA_NAME_VAR (arg
) == DECL_ARGUMENTS (function
))))
698 /* Walk the inline stack and watch out for ctors/dtors. */
699 for (tree block
= gimple_block (call
); block
&& TREE_CODE (block
) == BLOCK
;
700 block
= BLOCK_SUPERCONTEXT (block
))
701 if (inlined_polymorphic_ctor_dtor_block_p (block
, false))
709 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
710 callsite CALL) by looking for assignments to its virtual table pointer. If
711 it is, return true and fill in the jump function JFUNC with relevant type
712 information or set it to unknown. ARG is the object itself (not a pointer
713 to it, unless dereferenced). BASE is the base of the memory access as
714 returned by get_ref_base_and_extent, as is the offset.
716 This is helper function for detect_type_change and detect_type_change_ssa
717 that does the heavy work which is usually unnecesary. */
720 detect_type_change_from_memory_writes (tree arg
, tree base
, tree comp_type
,
721 gcall
*call
, struct ipa_jump_func
*jfunc
,
722 HOST_WIDE_INT offset
)
724 struct prop_type_change_info tci
;
726 bool entry_reached
= false;
728 gcc_checking_assert (DECL_P (arg
)
729 || TREE_CODE (arg
) == MEM_REF
730 || handled_component_p (arg
));
732 comp_type
= TYPE_MAIN_VARIANT (comp_type
);
734 /* Const calls cannot call virtual methods through VMT and so type changes do
736 if (!flag_devirtualize
|| !gimple_vuse (call
)
737 /* Be sure expected_type is polymorphic. */
739 || TREE_CODE (comp_type
) != RECORD_TYPE
740 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))
741 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))))
744 ao_ref_init (&ao
, arg
);
747 ao
.size
= POINTER_SIZE
;
748 ao
.max_size
= ao
.size
;
751 tci
.object
= get_base_address (arg
);
752 tci
.type_maybe_changed
= false;
754 walk_aliased_vdefs (&ao
, gimple_vuse (call
), check_stmt_for_type_change
,
755 &tci
, NULL
, &entry_reached
);
756 if (!tci
.type_maybe_changed
)
759 ipa_set_jf_unknown (jfunc
);
763 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
764 If it is, return true and fill in the jump function JFUNC with relevant type
765 information or set it to unknown. ARG is the object itself (not a pointer
766 to it, unless dereferenced). BASE is the base of the memory access as
767 returned by get_ref_base_and_extent, as is the offset. */
770 detect_type_change (tree arg
, tree base
, tree comp_type
, gcall
*call
,
771 struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
)
773 if (!flag_devirtualize
)
776 if (TREE_CODE (base
) == MEM_REF
777 && !param_type_may_change_p (current_function_decl
,
778 TREE_OPERAND (base
, 0),
781 return detect_type_change_from_memory_writes (arg
, base
, comp_type
,
782 call
, jfunc
, offset
);
785 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
786 SSA name (its dereference will become the base and the offset is assumed to
790 detect_type_change_ssa (tree arg
, tree comp_type
,
791 gcall
*call
, struct ipa_jump_func
*jfunc
)
793 gcc_checking_assert (TREE_CODE (arg
) == SSA_NAME
);
794 if (!flag_devirtualize
795 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
798 if (!param_type_may_change_p (current_function_decl
, arg
, call
))
801 arg
= build2 (MEM_REF
, ptr_type_node
, arg
,
802 build_int_cst (ptr_type_node
, 0));
804 return detect_type_change_from_memory_writes (arg
, arg
, comp_type
,
808 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
809 boolean variable pointed to by DATA. */
812 mark_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
815 bool *b
= (bool *) data
;
820 /* Return true if we have already walked so many statements in AA that we
821 should really just start giving up. */
824 aa_overwalked (struct func_body_info
*fbi
)
826 gcc_checking_assert (fbi
);
827 return fbi
->aa_walked
> (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS
);
830 /* Find the nearest valid aa status for parameter specified by INDEX that
833 static struct param_aa_status
*
834 find_dominating_aa_status (struct func_body_info
*fbi
, basic_block bb
,
839 bb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
842 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
843 if (!bi
->param_aa_statuses
.is_empty ()
844 && bi
->param_aa_statuses
[index
].valid
)
845 return &bi
->param_aa_statuses
[index
];
849 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
850 structures and/or intialize the result with a dominating description as
853 static struct param_aa_status
*
854 parm_bb_aa_status_for_bb (struct func_body_info
*fbi
, basic_block bb
,
857 gcc_checking_assert (fbi
);
858 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
859 if (bi
->param_aa_statuses
.is_empty ())
860 bi
->param_aa_statuses
.safe_grow_cleared (fbi
->param_count
);
861 struct param_aa_status
*paa
= &bi
->param_aa_statuses
[index
];
864 gcc_checking_assert (!paa
->parm_modified
865 && !paa
->ref_modified
866 && !paa
->pt_modified
);
867 struct param_aa_status
*dom_paa
;
868 dom_paa
= find_dominating_aa_status (fbi
, bb
, index
);
878 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
879 a value known not to be modified in this function before reaching the
880 statement STMT. FBI holds information about the function we have so far
881 gathered but do not survive the summary building stage. */
884 parm_preserved_before_stmt_p (struct func_body_info
*fbi
, int index
,
885 gimple stmt
, tree parm_load
)
887 struct param_aa_status
*paa
;
888 bool modified
= false;
891 /* FIXME: FBI can be NULL if we are being called from outside
892 ipa_node_analysis or ipcp_transform_function, which currently happens
893 during inlining analysis. It would be great to extend fbi's lifetime and
894 always have it. Currently, we are just not afraid of too much walking in
898 if (aa_overwalked (fbi
))
900 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
901 if (paa
->parm_modified
)
907 gcc_checking_assert (gimple_vuse (stmt
) != NULL_TREE
);
908 ao_ref_init (&refd
, parm_load
);
909 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
912 fbi
->aa_walked
+= walked
;
914 paa
->parm_modified
= true;
918 /* If STMT is an assignment that loads a value from an parameter declaration,
919 return the index of the parameter in ipa_node_params which has not been
920 modified. Otherwise return -1. */
923 load_from_unmodified_param (struct func_body_info
*fbi
,
924 vec
<ipa_param_descriptor
> descriptors
,
930 if (!gimple_assign_single_p (stmt
))
933 op1
= gimple_assign_rhs1 (stmt
);
934 if (TREE_CODE (op1
) != PARM_DECL
)
937 index
= ipa_get_param_decl_index_1 (descriptors
, op1
);
939 || !parm_preserved_before_stmt_p (fbi
, index
, stmt
, op1
))
945 /* Return true if memory reference REF (which must be a load through parameter
946 with INDEX) loads data that are known to be unmodified in this function
947 before reaching statement STMT. */
950 parm_ref_data_preserved_p (struct func_body_info
*fbi
,
951 int index
, gimple stmt
, tree ref
)
953 struct param_aa_status
*paa
;
954 bool modified
= false;
957 /* FIXME: FBI can be NULL if we are being called from outside
958 ipa_node_analysis or ipcp_transform_function, which currently happens
959 during inlining analysis. It would be great to extend fbi's lifetime and
960 always have it. Currently, we are just not afraid of too much walking in
964 if (aa_overwalked (fbi
))
966 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
967 if (paa
->ref_modified
)
973 gcc_checking_assert (gimple_vuse (stmt
));
974 ao_ref_init (&refd
, ref
);
975 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
978 fbi
->aa_walked
+= walked
;
980 paa
->ref_modified
= true;
984 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
985 is known to be unmodified in this function before reaching call statement
986 CALL into which it is passed. FBI describes the function body. */
989 parm_ref_data_pass_through_p (struct func_body_info
*fbi
, int index
,
990 gimple call
, tree parm
)
992 bool modified
= false;
995 /* It's unnecessary to calculate anything about memory contnets for a const
996 function because it is not goin to use it. But do not cache the result
997 either. Also, no such calculations for non-pointers. */
998 if (!gimple_vuse (call
)
999 || !POINTER_TYPE_P (TREE_TYPE (parm
))
1000 || aa_overwalked (fbi
))
1003 struct param_aa_status
*paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (call
),
1005 if (paa
->pt_modified
)
1008 ao_ref_init_from_ptr_and_size (&refd
, parm
, NULL_TREE
);
1009 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (call
), mark_modified
,
1011 fbi
->aa_walked
+= walked
;
1013 paa
->pt_modified
= true;
1017 /* Return true if we can prove that OP is a memory reference loading unmodified
1018 data from an aggregate passed as a parameter and if the aggregate is passed
1019 by reference, that the alias type of the load corresponds to the type of the
1020 formal parameter (so that we can rely on this type for TBAA in callers).
1021 INFO and PARMS_AINFO describe parameters of the current function (but the
1022 latter can be NULL), STMT is the load statement. If function returns true,
1023 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1024 within the aggregate and whether it is a load from a value passed by
1025 reference respectively. */
1028 ipa_load_from_parm_agg_1 (struct func_body_info
*fbi
,
1029 vec
<ipa_param_descriptor
> descriptors
,
1030 gimple stmt
, tree op
, int *index_p
,
1031 HOST_WIDE_INT
*offset_p
, HOST_WIDE_INT
*size_p
,
1035 HOST_WIDE_INT size
, max_size
;
1036 tree base
= get_ref_base_and_extent (op
, offset_p
, &size
, &max_size
);
1038 if (max_size
== -1 || max_size
!= size
|| *offset_p
< 0)
1043 int index
= ipa_get_param_decl_index_1 (descriptors
, base
);
1045 && parm_preserved_before_stmt_p (fbi
, index
, stmt
, op
))
1056 if (TREE_CODE (base
) != MEM_REF
1057 || TREE_CODE (TREE_OPERAND (base
, 0)) != SSA_NAME
1058 || !integer_zerop (TREE_OPERAND (base
, 1)))
1061 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base
, 0)))
1063 tree parm
= SSA_NAME_VAR (TREE_OPERAND (base
, 0));
1064 index
= ipa_get_param_decl_index_1 (descriptors
, parm
);
1068 /* This branch catches situations where a pointer parameter is not a
1069 gimple register, for example:
1071 void hip7(S*) (struct S * p)
1073 void (*<T2e4>) (struct S *) D.1867;
1078 D.1867_2 = p.1_1->f;
1083 gimple def
= SSA_NAME_DEF_STMT (TREE_OPERAND (base
, 0));
1084 index
= load_from_unmodified_param (fbi
, descriptors
, def
);
1088 && parm_ref_data_preserved_p (fbi
, index
, stmt
, op
))
1099 /* Just like the previous function, just without the param_analysis_info
1100 pointer, for users outside of this file. */
1103 ipa_load_from_parm_agg (struct ipa_node_params
*info
, gimple stmt
,
1104 tree op
, int *index_p
, HOST_WIDE_INT
*offset_p
,
1107 return ipa_load_from_parm_agg_1 (NULL
, info
->descriptors
, stmt
, op
, index_p
,
1108 offset_p
, NULL
, by_ref_p
);
1111 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1112 of an assignment statement STMT, try to determine whether we are actually
1113 handling any of the following cases and construct an appropriate jump
1114 function into JFUNC if so:
1116 1) The passed value is loaded from a formal parameter which is not a gimple
1117 register (most probably because it is addressable, the value has to be
1118 scalar) and we can guarantee the value has not changed. This case can
1119 therefore be described by a simple pass-through jump function. For example:
1128 2) The passed value can be described by a simple arithmetic pass-through
1135 D.2064_4 = a.1(D) + 4;
1138 This case can also occur in combination of the previous one, e.g.:
1146 D.2064_4 = a.0_3 + 4;
1149 3) The passed value is an address of an object within another one (which
1150 also passed by reference). Such situations are described by an ancestor
1151 jump function and describe situations such as:
1153 B::foo() (struct B * const this)
1157 D.1845_2 = &this_1(D)->D.1748;
1160 INFO is the structure describing individual parameters access different
1161 stages of IPA optimizations. PARMS_AINFO contains the information that is
1162 only needed for intraprocedural analysis. */
1165 compute_complex_assign_jump_func (struct func_body_info
*fbi
,
1166 struct ipa_node_params
*info
,
1167 struct ipa_jump_func
*jfunc
,
1168 gcall
*call
, gimple stmt
, tree name
,
1171 HOST_WIDE_INT offset
, size
, max_size
;
1172 tree op1
, tc_ssa
, base
, ssa
;
1175 op1
= gimple_assign_rhs1 (stmt
);
1177 if (TREE_CODE (op1
) == SSA_NAME
)
1179 if (SSA_NAME_IS_DEFAULT_DEF (op1
))
1180 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (op1
));
1182 index
= load_from_unmodified_param (fbi
, info
->descriptors
,
1183 SSA_NAME_DEF_STMT (op1
));
1188 index
= load_from_unmodified_param (fbi
, info
->descriptors
, stmt
);
1189 tc_ssa
= gimple_assign_lhs (stmt
);
1194 tree op2
= gimple_assign_rhs2 (stmt
);
1198 if (!is_gimple_ip_invariant (op2
)
1199 || (TREE_CODE_CLASS (gimple_expr_code (stmt
)) != tcc_comparison
1200 && !useless_type_conversion_p (TREE_TYPE (name
),
1204 ipa_set_jf_arith_pass_through (jfunc
, index
, op2
,
1205 gimple_assign_rhs_code (stmt
));
1207 else if (gimple_assign_single_p (stmt
))
1209 bool agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
, tc_ssa
);
1210 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1215 if (TREE_CODE (op1
) != ADDR_EXPR
)
1217 op1
= TREE_OPERAND (op1
, 0);
1218 if (TREE_CODE (TREE_TYPE (op1
)) != RECORD_TYPE
)
1220 base
= get_ref_base_and_extent (op1
, &offset
, &size
, &max_size
);
1221 if (TREE_CODE (base
) != MEM_REF
1222 /* If this is a varying address, punt. */
1224 || max_size
!= size
)
1226 offset
+= mem_ref_offset (base
).to_short_addr () * BITS_PER_UNIT
;
1227 ssa
= TREE_OPERAND (base
, 0);
1228 if (TREE_CODE (ssa
) != SSA_NAME
1229 || !SSA_NAME_IS_DEFAULT_DEF (ssa
)
1233 /* Dynamic types are changed in constructors and destructors. */
1234 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (ssa
));
1235 if (index
>= 0 && param_type
&& POINTER_TYPE_P (param_type
))
1236 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1237 parm_ref_data_pass_through_p (fbi
, index
, call
, ssa
));
1240 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1243 iftmp.1_3 = &obj_2(D)->D.1762;
1245 The base of the MEM_REF must be a default definition SSA NAME of a
1246 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1247 whole MEM_REF expression is returned and the offset calculated from any
1248 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1249 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1252 get_ancestor_addr_info (gimple assign
, tree
*obj_p
, HOST_WIDE_INT
*offset
)
1254 HOST_WIDE_INT size
, max_size
;
1255 tree expr
, parm
, obj
;
1257 if (!gimple_assign_single_p (assign
))
1259 expr
= gimple_assign_rhs1 (assign
);
1261 if (TREE_CODE (expr
) != ADDR_EXPR
)
1263 expr
= TREE_OPERAND (expr
, 0);
1265 expr
= get_ref_base_and_extent (expr
, offset
, &size
, &max_size
);
1267 if (TREE_CODE (expr
) != MEM_REF
1268 /* If this is a varying address, punt. */
1273 parm
= TREE_OPERAND (expr
, 0);
1274 if (TREE_CODE (parm
) != SSA_NAME
1275 || !SSA_NAME_IS_DEFAULT_DEF (parm
)
1276 || TREE_CODE (SSA_NAME_VAR (parm
)) != PARM_DECL
)
1279 *offset
+= mem_ref_offset (expr
).to_short_addr () * BITS_PER_UNIT
;
1285 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1286 statement PHI, try to find out whether NAME is in fact a
1287 multiple-inheritance typecast from a descendant into an ancestor of a formal
1288 parameter and thus can be described by an ancestor jump function and if so,
1289 write the appropriate function into JFUNC.
1291 Essentially we want to match the following pattern:
1299 iftmp.1_3 = &obj_2(D)->D.1762;
1302 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1303 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1307 compute_complex_ancestor_jump_func (struct func_body_info
*fbi
,
1308 struct ipa_node_params
*info
,
1309 struct ipa_jump_func
*jfunc
,
1310 gcall
*call
, gphi
*phi
)
1312 HOST_WIDE_INT offset
;
1313 gimple assign
, cond
;
1314 basic_block phi_bb
, assign_bb
, cond_bb
;
1315 tree tmp
, parm
, expr
, obj
;
1318 if (gimple_phi_num_args (phi
) != 2)
1321 if (integer_zerop (PHI_ARG_DEF (phi
, 1)))
1322 tmp
= PHI_ARG_DEF (phi
, 0);
1323 else if (integer_zerop (PHI_ARG_DEF (phi
, 0)))
1324 tmp
= PHI_ARG_DEF (phi
, 1);
1327 if (TREE_CODE (tmp
) != SSA_NAME
1328 || SSA_NAME_IS_DEFAULT_DEF (tmp
)
1329 || !POINTER_TYPE_P (TREE_TYPE (tmp
))
1330 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp
))) != RECORD_TYPE
)
1333 assign
= SSA_NAME_DEF_STMT (tmp
);
1334 assign_bb
= gimple_bb (assign
);
1335 if (!single_pred_p (assign_bb
))
1337 expr
= get_ancestor_addr_info (assign
, &obj
, &offset
);
1340 parm
= TREE_OPERAND (expr
, 0);
1341 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (parm
));
1345 cond_bb
= single_pred (assign_bb
);
1346 cond
= last_stmt (cond_bb
);
1348 || gimple_code (cond
) != GIMPLE_COND
1349 || gimple_cond_code (cond
) != NE_EXPR
1350 || gimple_cond_lhs (cond
) != parm
1351 || !integer_zerop (gimple_cond_rhs (cond
)))
1354 phi_bb
= gimple_bb (phi
);
1355 for (i
= 0; i
< 2; i
++)
1357 basic_block pred
= EDGE_PRED (phi_bb
, i
)->src
;
1358 if (pred
!= assign_bb
&& pred
!= cond_bb
)
1362 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1363 parm_ref_data_pass_through_p (fbi
, index
, call
, parm
));
1366 /* Inspect the given TYPE and return true iff it has the same structure (the
1367 same number of fields of the same types) as a C++ member pointer. If
1368 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1369 corresponding fields there. */
1372 type_like_member_ptr_p (tree type
, tree
*method_ptr
, tree
*delta
)
1376 if (TREE_CODE (type
) != RECORD_TYPE
)
1379 fld
= TYPE_FIELDS (type
);
1380 if (!fld
|| !POINTER_TYPE_P (TREE_TYPE (fld
))
1381 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld
))) != METHOD_TYPE
1382 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1388 fld
= DECL_CHAIN (fld
);
1389 if (!fld
|| INTEGRAL_TYPE_P (fld
)
1390 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1395 if (DECL_CHAIN (fld
))
1401 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1402 return the rhs of its defining statement. Otherwise return RHS as it
1406 get_ssa_def_if_simple_copy (tree rhs
)
1408 while (TREE_CODE (rhs
) == SSA_NAME
&& !SSA_NAME_IS_DEFAULT_DEF (rhs
))
1410 gimple def_stmt
= SSA_NAME_DEF_STMT (rhs
);
1412 if (gimple_assign_single_p (def_stmt
))
1413 rhs
= gimple_assign_rhs1 (def_stmt
);
1420 /* Simple linked list, describing known contents of an aggregate beforere
1423 struct ipa_known_agg_contents_list
1425 /* Offset and size of the described part of the aggregate. */
1426 HOST_WIDE_INT offset
, size
;
1427 /* Known constant value or NULL if the contents is known to be unknown. */
1429 /* Pointer to the next structure in the list. */
1430 struct ipa_known_agg_contents_list
*next
;
1433 /* Find the proper place in linked list of ipa_known_agg_contents_list
1434 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1435 unless there is a partial overlap, in which case return NULL, or such
1436 element is already there, in which case set *ALREADY_THERE to true. */
1438 static struct ipa_known_agg_contents_list
**
1439 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list
**list
,
1440 HOST_WIDE_INT lhs_offset
,
1441 HOST_WIDE_INT lhs_size
,
1442 bool *already_there
)
1444 struct ipa_known_agg_contents_list
**p
= list
;
1445 while (*p
&& (*p
)->offset
< lhs_offset
)
1447 if ((*p
)->offset
+ (*p
)->size
> lhs_offset
)
1452 if (*p
&& (*p
)->offset
< lhs_offset
+ lhs_size
)
1454 if ((*p
)->offset
== lhs_offset
&& (*p
)->size
== lhs_size
)
1455 /* We already know this value is subsequently overwritten with
1457 *already_there
= true;
1459 /* Otherwise this is a partial overlap which we cannot
1466 /* Build aggregate jump function from LIST, assuming there are exactly
1467 CONST_COUNT constant entries there and that th offset of the passed argument
1468 is ARG_OFFSET and store it into JFUNC. */
1471 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list
*list
,
1472 int const_count
, HOST_WIDE_INT arg_offset
,
1473 struct ipa_jump_func
*jfunc
)
1475 vec_alloc (jfunc
->agg
.items
, const_count
);
1480 struct ipa_agg_jf_item item
;
1481 item
.offset
= list
->offset
- arg_offset
;
1482 gcc_assert ((item
.offset
% BITS_PER_UNIT
) == 0);
1483 item
.value
= unshare_expr_without_location (list
->constant
);
1484 jfunc
->agg
.items
->quick_push (item
);
1490 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1491 in ARG is filled in with constant values. ARG can either be an aggregate
1492 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1493 aggregate. JFUNC is the jump function into which the constants are
1494 subsequently stored. */
1497 determine_locally_known_aggregate_parts (gcall
*call
, tree arg
,
1499 struct ipa_jump_func
*jfunc
)
1501 struct ipa_known_agg_contents_list
*list
= NULL
;
1502 int item_count
= 0, const_count
= 0;
1503 HOST_WIDE_INT arg_offset
, arg_size
;
1504 gimple_stmt_iterator gsi
;
1506 bool check_ref
, by_ref
;
1509 /* The function operates in three stages. First, we prepare check_ref, r,
1510 arg_base and arg_offset based on what is actually passed as an actual
1513 if (POINTER_TYPE_P (arg_type
))
1516 if (TREE_CODE (arg
) == SSA_NAME
)
1519 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type
))))
1524 type_size
= TYPE_SIZE (TREE_TYPE (arg_type
));
1525 arg_size
= tree_to_uhwi (type_size
);
1526 ao_ref_init_from_ptr_and_size (&r
, arg_base
, NULL_TREE
);
1528 else if (TREE_CODE (arg
) == ADDR_EXPR
)
1530 HOST_WIDE_INT arg_max_size
;
1532 arg
= TREE_OPERAND (arg
, 0);
1533 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1535 if (arg_max_size
== -1
1536 || arg_max_size
!= arg_size
1539 if (DECL_P (arg_base
))
1542 ao_ref_init (&r
, arg_base
);
1552 HOST_WIDE_INT arg_max_size
;
1554 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg
)));
1558 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1560 if (arg_max_size
== -1
1561 || arg_max_size
!= arg_size
1565 ao_ref_init (&r
, arg
);
1568 /* Second stage walks back the BB, looks at individual statements and as long
1569 as it is confident of how the statements affect contents of the
1570 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1572 gsi
= gsi_for_stmt (call
);
1574 for (; !gsi_end_p (gsi
); gsi_prev (&gsi
))
1576 struct ipa_known_agg_contents_list
*n
, **p
;
1577 gimple stmt
= gsi_stmt (gsi
);
1578 HOST_WIDE_INT lhs_offset
, lhs_size
, lhs_max_size
;
1579 tree lhs
, rhs
, lhs_base
;
1581 if (!stmt_may_clobber_ref_p_1 (stmt
, &r
))
1583 if (!gimple_assign_single_p (stmt
))
1586 lhs
= gimple_assign_lhs (stmt
);
1587 rhs
= gimple_assign_rhs1 (stmt
);
1588 if (!is_gimple_reg_type (TREE_TYPE (rhs
))
1589 || TREE_CODE (lhs
) == BIT_FIELD_REF
1590 || contains_bitfld_component_ref_p (lhs
))
1593 lhs_base
= get_ref_base_and_extent (lhs
, &lhs_offset
, &lhs_size
,
1595 if (lhs_max_size
== -1
1596 || lhs_max_size
!= lhs_size
)
1601 if (TREE_CODE (lhs_base
) != MEM_REF
1602 || TREE_OPERAND (lhs_base
, 0) != arg_base
1603 || !integer_zerop (TREE_OPERAND (lhs_base
, 1)))
1606 else if (lhs_base
!= arg_base
)
1608 if (DECL_P (lhs_base
))
1614 bool already_there
= false;
1615 p
= get_place_in_agg_contents_list (&list
, lhs_offset
, lhs_size
,
1622 rhs
= get_ssa_def_if_simple_copy (rhs
);
1623 n
= XALLOCA (struct ipa_known_agg_contents_list
);
1625 n
->offset
= lhs_offset
;
1626 if (is_gimple_ip_invariant (rhs
))
1632 n
->constant
= NULL_TREE
;
1637 if (const_count
== PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
)
1638 || item_count
== 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
))
1642 /* Third stage just goes over the list and creates an appropriate vector of
1643 ipa_agg_jf_item structures out of it, of sourse only if there are
1644 any known constants to begin with. */
1648 jfunc
->agg
.by_ref
= by_ref
;
1649 build_agg_jump_func_from_list (list
, const_count
, arg_offset
, jfunc
);
1654 ipa_get_callee_param_type (struct cgraph_edge
*e
, int i
)
1657 tree type
= (e
->callee
1658 ? TREE_TYPE (e
->callee
->decl
)
1659 : gimple_call_fntype (e
->call_stmt
));
1660 tree t
= TYPE_ARG_TYPES (type
);
1662 for (n
= 0; n
< i
; n
++)
1669 return TREE_VALUE (t
);
1672 t
= DECL_ARGUMENTS (e
->callee
->decl
);
1673 for (n
= 0; n
< i
; n
++)
1680 return TREE_TYPE (t
);
1684 /* Compute jump function for all arguments of callsite CS and insert the
1685 information in the jump_functions array in the ipa_edge_args corresponding
1686 to this callsite. */
1689 ipa_compute_jump_functions_for_edge (struct func_body_info
*fbi
,
1690 struct cgraph_edge
*cs
)
1692 struct ipa_node_params
*info
= IPA_NODE_REF (cs
->caller
);
1693 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
1694 gcall
*call
= cs
->call_stmt
;
1695 int n
, arg_num
= gimple_call_num_args (call
);
1696 bool useful_context
= false;
1698 if (arg_num
== 0 || args
->jump_functions
)
1700 vec_safe_grow_cleared (args
->jump_functions
, arg_num
);
1701 if (flag_devirtualize
)
1702 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, arg_num
);
1704 if (gimple_call_internal_p (call
))
1706 if (ipa_func_spec_opts_forbid_analysis_p (cs
->caller
))
1709 for (n
= 0; n
< arg_num
; n
++)
1711 struct ipa_jump_func
*jfunc
= ipa_get_ith_jump_func (args
, n
);
1712 tree arg
= gimple_call_arg (call
, n
);
1713 tree param_type
= ipa_get_callee_param_type (cs
, n
);
1714 if (flag_devirtualize
&& POINTER_TYPE_P (TREE_TYPE (arg
)))
1717 struct ipa_polymorphic_call_context
context (cs
->caller
->decl
,
1720 context
.get_dynamic_type (instance
, arg
, NULL
, cs
->call_stmt
);
1721 *ipa_get_ith_polymorhic_call_context (args
, n
) = context
;
1722 if (!context
.useless_p ())
1723 useful_context
= true;
1726 if (POINTER_TYPE_P (TREE_TYPE(arg
)))
1728 unsigned HOST_WIDE_INT hwi_bitpos
;
1731 if (get_pointer_alignment_1 (arg
, &align
, &hwi_bitpos
)
1732 && align
% BITS_PER_UNIT
== 0
1733 && hwi_bitpos
% BITS_PER_UNIT
== 0)
1735 jfunc
->alignment
.known
= true;
1736 jfunc
->alignment
.align
= align
/ BITS_PER_UNIT
;
1737 jfunc
->alignment
.misalign
= hwi_bitpos
/ BITS_PER_UNIT
;
1740 gcc_assert (!jfunc
->alignment
.known
);
1743 gcc_assert (!jfunc
->alignment
.known
);
1745 if (is_gimple_ip_invariant (arg
))
1746 ipa_set_jf_constant (jfunc
, arg
, cs
);
1747 else if (!is_gimple_reg_type (TREE_TYPE (arg
))
1748 && TREE_CODE (arg
) == PARM_DECL
)
1750 int index
= ipa_get_param_decl_index (info
, arg
);
1752 gcc_assert (index
>=0);
1753 /* Aggregate passed by value, check for pass-through, otherwise we
1754 will attempt to fill in aggregate contents later in this
1756 if (parm_preserved_before_stmt_p (fbi
, index
, call
, arg
))
1758 ipa_set_jf_simple_pass_through (jfunc
, index
, false);
1762 else if (TREE_CODE (arg
) == SSA_NAME
)
1764 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
1766 int index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (arg
));
1770 agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
, arg
);
1771 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1776 gimple stmt
= SSA_NAME_DEF_STMT (arg
);
1777 if (is_gimple_assign (stmt
))
1778 compute_complex_assign_jump_func (fbi
, info
, jfunc
,
1779 call
, stmt
, arg
, param_type
);
1780 else if (gimple_code (stmt
) == GIMPLE_PHI
)
1781 compute_complex_ancestor_jump_func (fbi
, info
, jfunc
,
1783 as_a
<gphi
*> (stmt
));
1787 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1788 passed (because type conversions are ignored in gimple). Usually we can
1789 safely get type from function declaration, but in case of K&R prototypes or
1790 variadic functions we can try our luck with type of the pointer passed.
1791 TODO: Since we look for actual initialization of the memory object, we may better
1792 work out the type based on the memory stores we find. */
1794 param_type
= TREE_TYPE (arg
);
1796 if ((jfunc
->type
!= IPA_JF_PASS_THROUGH
1797 || !ipa_get_jf_pass_through_agg_preserved (jfunc
))
1798 && (jfunc
->type
!= IPA_JF_ANCESTOR
1799 || !ipa_get_jf_ancestor_agg_preserved (jfunc
))
1800 && (AGGREGATE_TYPE_P (TREE_TYPE (arg
))
1801 || POINTER_TYPE_P (param_type
)))
1802 determine_locally_known_aggregate_parts (call
, arg
, param_type
, jfunc
);
1804 if (!useful_context
)
1805 vec_free (args
->polymorphic_call_contexts
);
1808 /* Compute jump functions for all edges - both direct and indirect - outgoing
1812 ipa_compute_jump_functions_for_bb (struct func_body_info
*fbi
, basic_block bb
)
1814 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
1816 struct cgraph_edge
*cs
;
1818 FOR_EACH_VEC_ELT_REVERSE (bi
->cg_edges
, i
, cs
)
1820 struct cgraph_node
*callee
= cs
->callee
;
1824 callee
->ultimate_alias_target ();
1825 /* We do not need to bother analyzing calls to unknown functions
1826 unless they may become known during lto/whopr. */
1827 if (!callee
->definition
&& !flag_lto
)
1830 ipa_compute_jump_functions_for_edge (fbi
, cs
);
1834 /* If STMT looks like a statement loading a value from a member pointer formal
1835 parameter, return that parameter and store the offset of the field to
1836 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1837 might be clobbered). If USE_DELTA, then we look for a use of the delta
1838 field rather than the pfn. */
1841 ipa_get_stmt_member_ptr_load_param (gimple stmt
, bool use_delta
,
1842 HOST_WIDE_INT
*offset_p
)
1844 tree rhs
, rec
, ref_field
, ref_offset
, fld
, ptr_field
, delta_field
;
1846 if (!gimple_assign_single_p (stmt
))
1849 rhs
= gimple_assign_rhs1 (stmt
);
1850 if (TREE_CODE (rhs
) == COMPONENT_REF
)
1852 ref_field
= TREE_OPERAND (rhs
, 1);
1853 rhs
= TREE_OPERAND (rhs
, 0);
1856 ref_field
= NULL_TREE
;
1857 if (TREE_CODE (rhs
) != MEM_REF
)
1859 rec
= TREE_OPERAND (rhs
, 0);
1860 if (TREE_CODE (rec
) != ADDR_EXPR
)
1862 rec
= TREE_OPERAND (rec
, 0);
1863 if (TREE_CODE (rec
) != PARM_DECL
1864 || !type_like_member_ptr_p (TREE_TYPE (rec
), &ptr_field
, &delta_field
))
1866 ref_offset
= TREE_OPERAND (rhs
, 1);
1873 *offset_p
= int_bit_position (fld
);
1877 if (integer_nonzerop (ref_offset
))
1879 return ref_field
== fld
? rec
: NULL_TREE
;
1882 return tree_int_cst_equal (byte_position (fld
), ref_offset
) ? rec
1886 /* Returns true iff T is an SSA_NAME defined by a statement. */
1889 ipa_is_ssa_with_stmt_def (tree t
)
1891 if (TREE_CODE (t
) == SSA_NAME
1892 && !SSA_NAME_IS_DEFAULT_DEF (t
))
1898 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1899 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1900 indirect call graph edge. */
1902 static struct cgraph_edge
*
1903 ipa_note_param_call (struct cgraph_node
*node
, int param_index
,
1906 struct cgraph_edge
*cs
;
1908 cs
= node
->get_edge (stmt
);
1909 cs
->indirect_info
->param_index
= param_index
;
1910 cs
->indirect_info
->agg_contents
= 0;
1911 cs
->indirect_info
->member_ptr
= 0;
1915 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1916 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1917 intermediate information about each formal parameter. Currently it checks
1918 whether the call calls a pointer that is a formal parameter and if so, the
1919 parameter is marked with the called flag and an indirect call graph edge
1920 describing the call is created. This is very simple for ordinary pointers
1921 represented in SSA but not-so-nice when it comes to member pointers. The
1922 ugly part of this function does nothing more than trying to match the
1923 pattern of such a call. An example of such a pattern is the gimple dump
1924 below, the call is on the last line:
1927 f$__delta_5 = f.__delta;
1928 f$__pfn_24 = f.__pfn;
1932 f$__delta_5 = MEM[(struct *)&f];
1933 f$__pfn_24 = MEM[(struct *)&f + 4B];
1935 and a few lines below:
1938 D.2496_3 = (int) f$__pfn_24;
1939 D.2497_4 = D.2496_3 & 1;
1946 D.2500_7 = (unsigned int) f$__delta_5;
1947 D.2501_8 = &S + D.2500_7;
1948 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1949 D.2503_10 = *D.2502_9;
1950 D.2504_12 = f$__pfn_24 + -1;
1951 D.2505_13 = (unsigned int) D.2504_12;
1952 D.2506_14 = D.2503_10 + D.2505_13;
1953 D.2507_15 = *D.2506_14;
1954 iftmp.11_16 = (String:: *) D.2507_15;
1957 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1958 D.2500_19 = (unsigned int) f$__delta_5;
1959 D.2508_20 = &S + D.2500_19;
1960 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1962 Such patterns are results of simple calls to a member pointer:
1964 int doprinting (int (MyString::* f)(int) const)
1966 MyString S ("somestring");
1971 Moreover, the function also looks for called pointers loaded from aggregates
1972 passed by value or reference. */
1975 ipa_analyze_indirect_call_uses (struct func_body_info
*fbi
, gcall
*call
,
1978 struct ipa_node_params
*info
= fbi
->info
;
1979 HOST_WIDE_INT offset
;
1982 if (SSA_NAME_IS_DEFAULT_DEF (target
))
1984 tree var
= SSA_NAME_VAR (target
);
1985 int index
= ipa_get_param_decl_index (info
, var
);
1987 ipa_note_param_call (fbi
->node
, index
, call
);
1992 gimple def
= SSA_NAME_DEF_STMT (target
);
1993 if (gimple_assign_single_p (def
)
1994 && ipa_load_from_parm_agg_1 (fbi
, info
->descriptors
, def
,
1995 gimple_assign_rhs1 (def
), &index
, &offset
,
1998 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
1999 cs
->indirect_info
->offset
= offset
;
2000 cs
->indirect_info
->agg_contents
= 1;
2001 cs
->indirect_info
->by_ref
= by_ref
;
2005 /* Now we need to try to match the complex pattern of calling a member
2007 if (gimple_code (def
) != GIMPLE_PHI
2008 || gimple_phi_num_args (def
) != 2
2009 || !POINTER_TYPE_P (TREE_TYPE (target
))
2010 || TREE_CODE (TREE_TYPE (TREE_TYPE (target
))) != METHOD_TYPE
)
2013 /* First, we need to check whether one of these is a load from a member
2014 pointer that is a parameter to this function. */
2015 tree n1
= PHI_ARG_DEF (def
, 0);
2016 tree n2
= PHI_ARG_DEF (def
, 1);
2017 if (!ipa_is_ssa_with_stmt_def (n1
) || !ipa_is_ssa_with_stmt_def (n2
))
2019 gimple d1
= SSA_NAME_DEF_STMT (n1
);
2020 gimple d2
= SSA_NAME_DEF_STMT (n2
);
2023 basic_block bb
, virt_bb
;
2024 basic_block join
= gimple_bb (def
);
2025 if ((rec
= ipa_get_stmt_member_ptr_load_param (d1
, false, &offset
)))
2027 if (ipa_get_stmt_member_ptr_load_param (d2
, false, NULL
))
2030 bb
= EDGE_PRED (join
, 0)->src
;
2031 virt_bb
= gimple_bb (d2
);
2033 else if ((rec
= ipa_get_stmt_member_ptr_load_param (d2
, false, &offset
)))
2035 bb
= EDGE_PRED (join
, 1)->src
;
2036 virt_bb
= gimple_bb (d1
);
2041 /* Second, we need to check that the basic blocks are laid out in the way
2042 corresponding to the pattern. */
2044 if (!single_pred_p (virt_bb
) || !single_succ_p (virt_bb
)
2045 || single_pred (virt_bb
) != bb
2046 || single_succ (virt_bb
) != join
)
2049 /* Third, let's see that the branching is done depending on the least
2050 significant bit of the pfn. */
2052 gimple branch
= last_stmt (bb
);
2053 if (!branch
|| gimple_code (branch
) != GIMPLE_COND
)
2056 if ((gimple_cond_code (branch
) != NE_EXPR
2057 && gimple_cond_code (branch
) != EQ_EXPR
)
2058 || !integer_zerop (gimple_cond_rhs (branch
)))
2061 tree cond
= gimple_cond_lhs (branch
);
2062 if (!ipa_is_ssa_with_stmt_def (cond
))
2065 def
= SSA_NAME_DEF_STMT (cond
);
2066 if (!is_gimple_assign (def
)
2067 || gimple_assign_rhs_code (def
) != BIT_AND_EXPR
2068 || !integer_onep (gimple_assign_rhs2 (def
)))
2071 cond
= gimple_assign_rhs1 (def
);
2072 if (!ipa_is_ssa_with_stmt_def (cond
))
2075 def
= SSA_NAME_DEF_STMT (cond
);
2077 if (is_gimple_assign (def
)
2078 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
2080 cond
= gimple_assign_rhs1 (def
);
2081 if (!ipa_is_ssa_with_stmt_def (cond
))
2083 def
= SSA_NAME_DEF_STMT (cond
);
2087 rec2
= ipa_get_stmt_member_ptr_load_param (def
,
2088 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2089 == ptrmemfunc_vbit_in_delta
),
2094 index
= ipa_get_param_decl_index (info
, rec
);
2096 && parm_preserved_before_stmt_p (fbi
, index
, call
, rec
))
2098 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2099 cs
->indirect_info
->offset
= offset
;
2100 cs
->indirect_info
->agg_contents
= 1;
2101 cs
->indirect_info
->member_ptr
= 1;
2107 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2108 object referenced in the expression is a formal parameter of the caller
2109 FBI->node (described by FBI->info), create a call note for the
2113 ipa_analyze_virtual_call_uses (struct func_body_info
*fbi
,
2114 gcall
*call
, tree target
)
2116 tree obj
= OBJ_TYPE_REF_OBJECT (target
);
2118 HOST_WIDE_INT anc_offset
;
2120 if (!flag_devirtualize
)
2123 if (TREE_CODE (obj
) != SSA_NAME
)
2126 struct ipa_node_params
*info
= fbi
->info
;
2127 if (SSA_NAME_IS_DEFAULT_DEF (obj
))
2129 struct ipa_jump_func jfunc
;
2130 if (TREE_CODE (SSA_NAME_VAR (obj
)) != PARM_DECL
)
2134 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (obj
));
2135 gcc_assert (index
>= 0);
2136 if (detect_type_change_ssa (obj
, obj_type_ref_class (target
),
2142 struct ipa_jump_func jfunc
;
2143 gimple stmt
= SSA_NAME_DEF_STMT (obj
);
2146 expr
= get_ancestor_addr_info (stmt
, &obj
, &anc_offset
);
2149 index
= ipa_get_param_decl_index (info
,
2150 SSA_NAME_VAR (TREE_OPERAND (expr
, 0)));
2151 gcc_assert (index
>= 0);
2152 if (detect_type_change (obj
, expr
, obj_type_ref_class (target
),
2153 call
, &jfunc
, anc_offset
))
2157 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2158 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
2159 ii
->offset
= anc_offset
;
2160 ii
->otr_token
= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target
));
2161 ii
->otr_type
= obj_type_ref_class (target
);
2162 ii
->polymorphic
= 1;
2165 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2166 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2167 containing intermediate information about each formal parameter. */
2170 ipa_analyze_call_uses (struct func_body_info
*fbi
, gcall
*call
)
2172 tree target
= gimple_call_fn (call
);
2175 || (TREE_CODE (target
) != SSA_NAME
2176 && !virtual_method_call_p (target
)))
2179 struct cgraph_edge
*cs
= fbi
->node
->get_edge (call
);
2180 /* If we previously turned the call into a direct call, there is
2181 no need to analyze. */
2182 if (cs
&& !cs
->indirect_unknown_callee
)
2185 if (cs
->indirect_info
->polymorphic
&& flag_devirtualize
)
2188 tree target
= gimple_call_fn (call
);
2189 ipa_polymorphic_call_context
context (current_function_decl
,
2190 target
, call
, &instance
);
2192 gcc_checking_assert (cs
->indirect_info
->otr_type
2193 == obj_type_ref_class (target
));
2194 gcc_checking_assert (cs
->indirect_info
->otr_token
2195 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target
)));
2197 cs
->indirect_info
->vptr_changed
2198 = !context
.get_dynamic_type (instance
,
2199 OBJ_TYPE_REF_OBJECT (target
),
2200 obj_type_ref_class (target
), call
);
2201 cs
->indirect_info
->context
= context
;
2204 if (TREE_CODE (target
) == SSA_NAME
)
2205 ipa_analyze_indirect_call_uses (fbi
, call
, target
);
2206 else if (virtual_method_call_p (target
))
2207 ipa_analyze_virtual_call_uses (fbi
, call
, target
);
2211 /* Analyze the call statement STMT with respect to formal parameters (described
2212 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2213 formal parameters are called. */
2216 ipa_analyze_stmt_uses (struct func_body_info
*fbi
, gimple stmt
)
2218 if (is_gimple_call (stmt
))
2219 ipa_analyze_call_uses (fbi
, as_a
<gcall
*> (stmt
));
2222 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2223 If OP is a parameter declaration, mark it as used in the info structure
2227 visit_ref_for_mod_analysis (gimple
, tree op
, tree
, void *data
)
2229 struct ipa_node_params
*info
= (struct ipa_node_params
*) data
;
2231 op
= get_base_address (op
);
2233 && TREE_CODE (op
) == PARM_DECL
)
2235 int index
= ipa_get_param_decl_index (info
, op
);
2236 gcc_assert (index
>= 0);
2237 ipa_set_param_used (info
, index
, true);
2243 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2244 the findings in various structures of the associated ipa_node_params
2245 structure, such as parameter flags, notes etc. FBI holds various data about
2246 the function being analyzed. */
2249 ipa_analyze_params_uses_in_bb (struct func_body_info
*fbi
, basic_block bb
)
2251 gimple_stmt_iterator gsi
;
2252 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2254 gimple stmt
= gsi_stmt (gsi
);
2256 if (is_gimple_debug (stmt
))
2259 ipa_analyze_stmt_uses (fbi
, stmt
);
2260 walk_stmt_load_store_addr_ops (stmt
, fbi
->info
,
2261 visit_ref_for_mod_analysis
,
2262 visit_ref_for_mod_analysis
,
2263 visit_ref_for_mod_analysis
);
2265 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2266 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), fbi
->info
,
2267 visit_ref_for_mod_analysis
,
2268 visit_ref_for_mod_analysis
,
2269 visit_ref_for_mod_analysis
);
2272 /* Calculate controlled uses of parameters of NODE. */
2275 ipa_analyze_controlled_uses (struct cgraph_node
*node
)
2277 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
2279 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2281 tree parm
= ipa_get_param (info
, i
);
2282 int controlled_uses
= 0;
2284 /* For SSA regs see if parameter is used. For non-SSA we compute
2285 the flag during modification analysis. */
2286 if (is_gimple_reg (parm
))
2288 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
),
2290 if (ddef
&& !has_zero_uses (ddef
))
2292 imm_use_iterator imm_iter
;
2293 use_operand_p use_p
;
2295 ipa_set_param_used (info
, i
, true);
2296 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, ddef
)
2297 if (!is_gimple_call (USE_STMT (use_p
)))
2299 if (!is_gimple_debug (USE_STMT (use_p
)))
2301 controlled_uses
= IPA_UNDESCRIBED_USE
;
2309 controlled_uses
= 0;
2312 controlled_uses
= IPA_UNDESCRIBED_USE
;
2313 ipa_set_controlled_uses (info
, i
, controlled_uses
);
2317 /* Free stuff in BI. */
2320 free_ipa_bb_info (struct ipa_bb_info
*bi
)
2322 bi
->cg_edges
.release ();
2323 bi
->param_aa_statuses
.release ();
2326 /* Dominator walker driving the analysis. */
2328 class analysis_dom_walker
: public dom_walker
2331 analysis_dom_walker (struct func_body_info
*fbi
)
2332 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
) {}
2334 virtual void before_dom_children (basic_block
);
2337 struct func_body_info
*m_fbi
;
2341 analysis_dom_walker::before_dom_children (basic_block bb
)
2343 ipa_analyze_params_uses_in_bb (m_fbi
, bb
);
2344 ipa_compute_jump_functions_for_bb (m_fbi
, bb
);
2347 /* Initialize the array describing properties of of formal parameters
2348 of NODE, analyze their uses and compute jump functions associated
2349 with actual arguments of calls from within NODE. */
2352 ipa_analyze_node (struct cgraph_node
*node
)
2354 struct func_body_info fbi
;
2355 struct ipa_node_params
*info
;
2357 ipa_check_create_node_params ();
2358 ipa_check_create_edge_args ();
2359 info
= IPA_NODE_REF (node
);
2361 if (info
->analysis_done
)
2363 info
->analysis_done
= 1;
2365 if (ipa_func_spec_opts_forbid_analysis_p (node
))
2367 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2369 ipa_set_param_used (info
, i
, true);
2370 ipa_set_controlled_uses (info
, i
, IPA_UNDESCRIBED_USE
);
2375 struct function
*func
= DECL_STRUCT_FUNCTION (node
->decl
);
2377 calculate_dominance_info (CDI_DOMINATORS
);
2378 ipa_initialize_node_params (node
);
2379 ipa_analyze_controlled_uses (node
);
2382 fbi
.info
= IPA_NODE_REF (node
);
2383 fbi
.bb_infos
= vNULL
;
2384 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
));
2385 fbi
.param_count
= ipa_get_param_count (info
);
2388 for (struct cgraph_edge
*cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
2390 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
2391 bi
->cg_edges
.safe_push (cs
);
2394 for (struct cgraph_edge
*cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
2396 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
2397 bi
->cg_edges
.safe_push (cs
);
2400 analysis_dom_walker (&fbi
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
2403 struct ipa_bb_info
*bi
;
2404 FOR_EACH_VEC_ELT (fbi
.bb_infos
, i
, bi
)
2405 free_ipa_bb_info (bi
);
2406 fbi
.bb_infos
.release ();
2407 free_dominance_info (CDI_DOMINATORS
);
2411 /* Update the jump functions associated with call graph edge E when the call
2412 graph edge CS is being inlined, assuming that E->caller is already (possibly
2413 indirectly) inlined into CS->callee and that E has not been inlined. */
2416 update_jump_functions_after_inlining (struct cgraph_edge
*cs
,
2417 struct cgraph_edge
*e
)
2419 struct ipa_edge_args
*top
= IPA_EDGE_REF (cs
);
2420 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
2421 int count
= ipa_get_cs_argument_count (args
);
2424 for (i
= 0; i
< count
; i
++)
2426 struct ipa_jump_func
*dst
= ipa_get_ith_jump_func (args
, i
);
2427 struct ipa_polymorphic_call_context
*dst_ctx
2428 = ipa_get_ith_polymorhic_call_context (args
, i
);
2430 if (dst
->type
== IPA_JF_ANCESTOR
)
2432 struct ipa_jump_func
*src
;
2433 int dst_fid
= dst
->value
.ancestor
.formal_id
;
2434 struct ipa_polymorphic_call_context
*src_ctx
2435 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
2437 /* Variable number of arguments can cause havoc if we try to access
2438 one that does not exist in the inlined edge. So make sure we
2440 if (dst_fid
>= ipa_get_cs_argument_count (top
))
2442 ipa_set_jf_unknown (dst
);
2446 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2448 if (src_ctx
&& !src_ctx
->useless_p ())
2450 struct ipa_polymorphic_call_context ctx
= *src_ctx
;
2452 /* TODO: Make type preserved safe WRT contexts. */
2453 if (!ipa_get_jf_ancestor_type_preserved (dst
))
2454 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
2455 ctx
.offset_by (dst
->value
.ancestor
.offset
);
2456 if (!ctx
.useless_p ())
2458 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
2460 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
2462 dst_ctx
->combine_with (ctx
);
2466 && (dst
->value
.ancestor
.agg_preserved
|| !src
->agg
.by_ref
))
2468 struct ipa_agg_jf_item
*item
;
2471 /* Currently we do not produce clobber aggregate jump functions,
2472 replace with merging when we do. */
2473 gcc_assert (!dst
->agg
.items
);
2475 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2476 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2477 FOR_EACH_VEC_SAFE_ELT (dst
->agg
.items
, j
, item
)
2478 item
->offset
-= dst
->value
.ancestor
.offset
;
2481 if (src
->type
== IPA_JF_PASS_THROUGH
2482 && src
->value
.pass_through
.operation
== NOP_EXPR
)
2484 dst
->value
.ancestor
.formal_id
= src
->value
.pass_through
.formal_id
;
2485 dst
->value
.ancestor
.agg_preserved
&=
2486 src
->value
.pass_through
.agg_preserved
;
2488 else if (src
->type
== IPA_JF_ANCESTOR
)
2490 dst
->value
.ancestor
.formal_id
= src
->value
.ancestor
.formal_id
;
2491 dst
->value
.ancestor
.offset
+= src
->value
.ancestor
.offset
;
2492 dst
->value
.ancestor
.agg_preserved
&=
2493 src
->value
.ancestor
.agg_preserved
;
2496 ipa_set_jf_unknown (dst
);
2498 else if (dst
->type
== IPA_JF_PASS_THROUGH
)
2500 struct ipa_jump_func
*src
;
2501 /* We must check range due to calls with variable number of arguments
2502 and we cannot combine jump functions with operations. */
2503 if (dst
->value
.pass_through
.operation
== NOP_EXPR
2504 && (dst
->value
.pass_through
.formal_id
2505 < ipa_get_cs_argument_count (top
)))
2507 int dst_fid
= dst
->value
.pass_through
.formal_id
;
2508 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2509 bool dst_agg_p
= ipa_get_jf_pass_through_agg_preserved (dst
);
2510 struct ipa_polymorphic_call_context
*src_ctx
2511 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
2513 if (src_ctx
&& !src_ctx
->useless_p ())
2515 struct ipa_polymorphic_call_context ctx
= *src_ctx
;
2517 /* TODO: Make type preserved safe WRT contexts. */
2518 if (!ipa_get_jf_pass_through_type_preserved (dst
))
2519 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
2520 if (!ctx
.useless_p ())
2524 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
2526 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
2528 dst_ctx
->combine_with (ctx
);
2533 case IPA_JF_UNKNOWN
:
2534 ipa_set_jf_unknown (dst
);
2537 ipa_set_jf_cst_copy (dst
, src
);
2540 case IPA_JF_PASS_THROUGH
:
2542 int formal_id
= ipa_get_jf_pass_through_formal_id (src
);
2543 enum tree_code operation
;
2544 operation
= ipa_get_jf_pass_through_operation (src
);
2546 if (operation
== NOP_EXPR
)
2550 && ipa_get_jf_pass_through_agg_preserved (src
);
2551 ipa_set_jf_simple_pass_through (dst
, formal_id
, agg_p
);
2555 tree operand
= ipa_get_jf_pass_through_operand (src
);
2556 ipa_set_jf_arith_pass_through (dst
, formal_id
, operand
,
2561 case IPA_JF_ANCESTOR
:
2565 && ipa_get_jf_ancestor_agg_preserved (src
);
2566 ipa_set_ancestor_jf (dst
,
2567 ipa_get_jf_ancestor_offset (src
),
2568 ipa_get_jf_ancestor_formal_id (src
),
2577 && (dst_agg_p
|| !src
->agg
.by_ref
))
2579 /* Currently we do not produce clobber aggregate jump
2580 functions, replace with merging when we do. */
2581 gcc_assert (!dst
->agg
.items
);
2583 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2584 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2588 ipa_set_jf_unknown (dst
);
2593 /* If TARGET is an addr_expr of a function declaration, make it the
2594 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2595 Otherwise, return NULL. */
2597 struct cgraph_edge
*
2598 ipa_make_edge_direct_to_target (struct cgraph_edge
*ie
, tree target
,
2601 struct cgraph_node
*callee
;
2602 struct inline_edge_summary
*es
= inline_edge_summary (ie
);
2603 bool unreachable
= false;
2605 if (TREE_CODE (target
) == ADDR_EXPR
)
2606 target
= TREE_OPERAND (target
, 0);
2607 if (TREE_CODE (target
) != FUNCTION_DECL
)
2609 target
= canonicalize_constructor_val (target
, NULL
);
2610 if (!target
|| TREE_CODE (target
) != FUNCTION_DECL
)
2612 /* Member pointer call that goes through a VMT lookup. */
2613 if (ie
->indirect_info
->member_ptr
2614 /* Or if target is not an invariant expression and we do not
2615 know if it will evaulate to function at runtime.
2616 This can happen when folding through &VAR, where &VAR
2617 is IP invariant, but VAR itself is not.
2619 TODO: Revisit this when GCC 5 is branched. It seems that
2620 member_ptr check is not needed and that we may try to fold
2621 the expression and see if VAR is readonly. */
2622 || !is_gimple_ip_invariant (target
))
2624 if (dump_enabled_p ())
2626 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2627 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2628 "discovered direct call non-invariant "
2630 ie
->caller
->name (), ie
->caller
->order
);
2636 if (dump_enabled_p ())
2638 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2639 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2640 "discovered direct call to non-function in %s/%i, "
2641 "making it __builtin_unreachable\n",
2642 ie
->caller
->name (), ie
->caller
->order
);
2645 target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2646 callee
= cgraph_node::get_create (target
);
2650 callee
= cgraph_node::get (target
);
2653 callee
= cgraph_node::get (target
);
2655 /* Because may-edges are not explicitely represented and vtable may be external,
2656 we may create the first reference to the object in the unit. */
2657 if (!callee
|| callee
->global
.inlined_to
)
2660 /* We are better to ensure we can refer to it.
2661 In the case of static functions we are out of luck, since we already
2662 removed its body. In the case of public functions we may or may
2663 not introduce the reference. */
2664 if (!canonicalize_constructor_val (target
, NULL
)
2665 || !TREE_PUBLIC (target
))
2668 fprintf (dump_file
, "ipa-prop: Discovered call to a known target "
2669 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2670 xstrdup_for_dump (ie
->caller
->name ()),
2672 xstrdup_for_dump (ie
->callee
->name ()),
2676 callee
= cgraph_node::get_create (target
);
2679 /* If the edge is already speculated. */
2680 if (speculative
&& ie
->speculative
)
2682 struct cgraph_edge
*e2
;
2683 struct ipa_ref
*ref
;
2684 ie
->speculative_call_info (e2
, ie
, ref
);
2685 if (e2
->callee
->ultimate_alias_target ()
2686 != callee
->ultimate_alias_target ())
2689 fprintf (dump_file
, "ipa-prop: Discovered call to a speculative target "
2690 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2691 xstrdup_for_dump (ie
->caller
->name ()),
2693 xstrdup_for_dump (callee
->name ()),
2695 xstrdup_for_dump (e2
->callee
->name ()),
2701 fprintf (dump_file
, "ipa-prop: Discovered call to a speculative target "
2702 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2703 xstrdup_for_dump (ie
->caller
->name ()),
2705 xstrdup_for_dump (callee
->name ()),
2711 if (!dbg_cnt (devirt
))
2714 ipa_check_create_node_params ();
2716 /* We can not make edges to inline clones. It is bug that someone removed
2717 the cgraph node too early. */
2718 gcc_assert (!callee
->global
.inlined_to
);
2720 if (dump_file
&& !unreachable
)
2722 fprintf (dump_file
, "ipa-prop: Discovered %s call to a %s target "
2723 "(%s/%i -> %s/%i), for stmt ",
2724 ie
->indirect_info
->polymorphic
? "a virtual" : "an indirect",
2725 speculative
? "speculative" : "known",
2726 xstrdup_for_dump (ie
->caller
->name ()),
2728 xstrdup_for_dump (callee
->name ()),
2731 print_gimple_stmt (dump_file
, ie
->call_stmt
, 2, TDF_SLIM
);
2733 fprintf (dump_file
, "with uid %i\n", ie
->lto_stmt_uid
);
2735 if (dump_enabled_p ())
2737 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2739 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2740 "converting indirect call in %s to direct call to %s\n",
2741 ie
->caller
->name (), callee
->name ());
2745 struct cgraph_edge
*orig
= ie
;
2746 ie
= ie
->make_direct (callee
);
2747 /* If we resolved speculative edge the cost is already up to date
2748 for direct call (adjusted by inline_edge_duplication_hook). */
2751 es
= inline_edge_summary (ie
);
2752 es
->call_stmt_size
-= (eni_size_weights
.indirect_call_cost
2753 - eni_size_weights
.call_cost
);
2754 es
->call_stmt_time
-= (eni_time_weights
.indirect_call_cost
2755 - eni_time_weights
.call_cost
);
2760 if (!callee
->can_be_discarded_p ())
2763 alias
= dyn_cast
<cgraph_node
*> (callee
->noninterposable_alias ());
2767 /* make_speculative will update ie's cost to direct call cost. */
2768 ie
= ie
->make_speculative
2769 (callee
, ie
->count
* 8 / 10, ie
->frequency
* 8 / 10);
2775 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2776 return NULL if there is not any. BY_REF specifies whether the value has to
2777 be passed by reference or by value. */
2780 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function
*agg
,
2781 HOST_WIDE_INT offset
, bool by_ref
)
2783 struct ipa_agg_jf_item
*item
;
2786 if (by_ref
!= agg
->by_ref
)
2789 FOR_EACH_VEC_SAFE_ELT (agg
->items
, i
, item
)
2790 if (item
->offset
== offset
)
2792 /* Currently we do not have clobber values, return NULL for them once
2794 gcc_checking_assert (is_gimple_ip_invariant (item
->value
));
2800 /* Remove a reference to SYMBOL from the list of references of a node given by
2801 reference description RDESC. Return true if the reference has been
2802 successfully found and removed. */
2805 remove_described_reference (symtab_node
*symbol
, struct ipa_cst_ref_desc
*rdesc
)
2807 struct ipa_ref
*to_del
;
2808 struct cgraph_edge
*origin
;
2813 to_del
= origin
->caller
->find_reference (symbol
, origin
->call_stmt
,
2814 origin
->lto_stmt_uid
);
2818 to_del
->remove_reference ();
2820 fprintf (dump_file
, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2821 xstrdup_for_dump (origin
->caller
->name ()),
2822 origin
->caller
->order
, xstrdup_for_dump (symbol
->name ()));
2826 /* If JFUNC has a reference description with refcount different from
2827 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2828 NULL. JFUNC must be a constant jump function. */
2830 static struct ipa_cst_ref_desc
*
2831 jfunc_rdesc_usable (struct ipa_jump_func
*jfunc
)
2833 struct ipa_cst_ref_desc
*rdesc
= ipa_get_jf_constant_rdesc (jfunc
);
2834 if (rdesc
&& rdesc
->refcount
!= IPA_UNDESCRIBED_USE
)
2840 /* If the value of constant jump function JFUNC is an address of a function
2841 declaration, return the associated call graph node. Otherwise return
2844 static cgraph_node
*
2845 cgraph_node_for_jfunc (struct ipa_jump_func
*jfunc
)
2847 gcc_checking_assert (jfunc
->type
== IPA_JF_CONST
);
2848 tree cst
= ipa_get_jf_constant (jfunc
);
2849 if (TREE_CODE (cst
) != ADDR_EXPR
2850 || TREE_CODE (TREE_OPERAND (cst
, 0)) != FUNCTION_DECL
)
2853 return cgraph_node::get (TREE_OPERAND (cst
, 0));
2857 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2858 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2859 the edge specified in the rdesc. Return false if either the symbol or the
2860 reference could not be found, otherwise return true. */
2863 try_decrement_rdesc_refcount (struct ipa_jump_func
*jfunc
)
2865 struct ipa_cst_ref_desc
*rdesc
;
2866 if (jfunc
->type
== IPA_JF_CONST
2867 && (rdesc
= jfunc_rdesc_usable (jfunc
))
2868 && --rdesc
->refcount
== 0)
2870 symtab_node
*symbol
= cgraph_node_for_jfunc (jfunc
);
2874 return remove_described_reference (symbol
, rdesc
);
2879 /* Try to find a destination for indirect edge IE that corresponds to a simple
2880 call or a call of a member function pointer and where the destination is a
2881 pointer formal parameter described by jump function JFUNC. If it can be
2882 determined, return the newly direct edge, otherwise return NULL.
2883 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2885 static struct cgraph_edge
*
2886 try_make_edge_direct_simple_call (struct cgraph_edge
*ie
,
2887 struct ipa_jump_func
*jfunc
,
2888 struct ipa_node_params
*new_root_info
)
2890 struct cgraph_edge
*cs
;
2892 bool agg_contents
= ie
->indirect_info
->agg_contents
;
2894 if (ie
->indirect_info
->agg_contents
)
2895 target
= ipa_find_agg_cst_for_param (&jfunc
->agg
,
2896 ie
->indirect_info
->offset
,
2897 ie
->indirect_info
->by_ref
);
2899 target
= ipa_value_from_jfunc (new_root_info
, jfunc
);
2902 cs
= ipa_make_edge_direct_to_target (ie
, target
);
2904 if (cs
&& !agg_contents
)
2907 gcc_checking_assert (cs
->callee
2909 || jfunc
->type
!= IPA_JF_CONST
2910 || !cgraph_node_for_jfunc (jfunc
)
2911 || cs
->callee
== cgraph_node_for_jfunc (jfunc
)));
2912 ok
= try_decrement_rdesc_refcount (jfunc
);
2913 gcc_checking_assert (ok
);
2919 /* Return the target to be used in cases of impossible devirtualization. IE
2920 and target (the latter can be NULL) are dumped when dumping is enabled. */
2923 ipa_impossible_devirt_target (struct cgraph_edge
*ie
, tree target
)
2929 "Type inconsistent devirtualization: %s/%i->%s\n",
2930 ie
->caller
->name (), ie
->caller
->order
,
2931 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target
)));
2934 "No devirtualization target in %s/%i\n",
2935 ie
->caller
->name (), ie
->caller
->order
);
2937 tree new_target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2938 cgraph_node::get_create (new_target
);
2942 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2943 call based on a formal parameter which is described by jump function JFUNC
2944 and if it can be determined, make it direct and return the direct edge.
2945 Otherwise, return NULL. CTX describes the polymorphic context that the
2946 parameter the call is based on brings along with it. */
2948 static struct cgraph_edge
*
2949 try_make_edge_direct_virtual_call (struct cgraph_edge
*ie
,
2950 struct ipa_jump_func
*jfunc
,
2951 struct ipa_polymorphic_call_context ctx
)
2954 bool speculative
= false;
2956 if (!opt_for_fn (ie
->caller
->decl
, flag_devirtualize
))
2959 gcc_assert (!ie
->indirect_info
->by_ref
);
2961 /* Try to do lookup via known virtual table pointer value. */
2962 if (!ie
->indirect_info
->vptr_changed
2963 || opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
))
2966 unsigned HOST_WIDE_INT offset
;
2967 tree t
= ipa_find_agg_cst_for_param (&jfunc
->agg
,
2968 ie
->indirect_info
->offset
,
2970 if (t
&& vtable_pointer_value_to_vtable (t
, &vtable
, &offset
))
2972 t
= gimple_get_virt_method_for_vtable (ie
->indirect_info
->otr_token
,
2976 if ((TREE_CODE (TREE_TYPE (t
)) == FUNCTION_TYPE
2977 && DECL_FUNCTION_CODE (t
) == BUILT_IN_UNREACHABLE
)
2978 || !possible_polymorphic_call_target_p
2979 (ie
, cgraph_node::get (t
)))
2981 /* Do not speculate builtin_unreachable, it is stupid! */
2982 if (!ie
->indirect_info
->vptr_changed
)
2983 target
= ipa_impossible_devirt_target (ie
, target
);
2988 speculative
= ie
->indirect_info
->vptr_changed
;
2994 ipa_polymorphic_call_context
ie_context (ie
);
2995 vec
<cgraph_node
*>targets
;
2998 ctx
.offset_by (ie
->indirect_info
->offset
);
2999 if (ie
->indirect_info
->vptr_changed
)
3000 ctx
.possible_dynamic_type_change (ie
->in_polymorphic_cdtor
,
3001 ie
->indirect_info
->otr_type
);
3002 ctx
.combine_with (ie_context
, ie
->indirect_info
->otr_type
);
3003 targets
= possible_polymorphic_call_targets
3004 (ie
->indirect_info
->otr_type
,
3005 ie
->indirect_info
->otr_token
,
3007 if (final
&& targets
.length () <= 1)
3009 speculative
= false;
3010 if (targets
.length () == 1)
3011 target
= targets
[0]->decl
;
3013 target
= ipa_impossible_devirt_target (ie
, NULL_TREE
);
3015 else if (!target
&& opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
)
3016 && !ie
->speculative
&& ie
->maybe_hot_p ())
3019 n
= try_speculative_devirtualization (ie
->indirect_info
->otr_type
,
3020 ie
->indirect_info
->otr_token
,
3021 ie
->indirect_info
->context
);
3031 if (!possible_polymorphic_call_target_p
3032 (ie
, cgraph_node::get_create (target
)))
3036 target
= ipa_impossible_devirt_target (ie
, target
);
3038 return ipa_make_edge_direct_to_target (ie
, target
, speculative
);
3044 /* Update the param called notes associated with NODE when CS is being inlined,
3045 assuming NODE is (potentially indirectly) inlined into CS->callee.
3046 Moreover, if the callee is discovered to be constant, create a new cgraph
3047 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3048 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3051 update_indirect_edges_after_inlining (struct cgraph_edge
*cs
,
3052 struct cgraph_node
*node
,
3053 vec
<cgraph_edge
*> *new_edges
)
3055 struct ipa_edge_args
*top
;
3056 struct cgraph_edge
*ie
, *next_ie
, *new_direct_edge
;
3057 struct ipa_node_params
*new_root_info
;
3060 ipa_check_create_edge_args ();
3061 top
= IPA_EDGE_REF (cs
);
3062 new_root_info
= IPA_NODE_REF (cs
->caller
->global
.inlined_to
3063 ? cs
->caller
->global
.inlined_to
3066 for (ie
= node
->indirect_calls
; ie
; ie
= next_ie
)
3068 struct cgraph_indirect_call_info
*ici
= ie
->indirect_info
;
3069 struct ipa_jump_func
*jfunc
;
3071 cgraph_node
*spec_target
= NULL
;
3073 next_ie
= ie
->next_callee
;
3075 if (ici
->param_index
== -1)
3078 /* We must check range due to calls with variable number of arguments: */
3079 if (ici
->param_index
>= ipa_get_cs_argument_count (top
))
3081 ici
->param_index
= -1;
3085 param_index
= ici
->param_index
;
3086 jfunc
= ipa_get_ith_jump_func (top
, param_index
);
3088 if (ie
->speculative
)
3090 struct cgraph_edge
*de
;
3091 struct ipa_ref
*ref
;
3092 ie
->speculative_call_info (de
, ie
, ref
);
3093 spec_target
= de
->callee
;
3096 if (!opt_for_fn (node
->decl
, flag_indirect_inlining
))
3097 new_direct_edge
= NULL
;
3098 else if (ici
->polymorphic
)
3100 ipa_polymorphic_call_context ctx
;
3101 ctx
= ipa_context_from_jfunc (new_root_info
, cs
, param_index
, jfunc
);
3102 new_direct_edge
= try_make_edge_direct_virtual_call (ie
, jfunc
, ctx
);
3105 new_direct_edge
= try_make_edge_direct_simple_call (ie
, jfunc
,
3107 /* If speculation was removed, then we need to do nothing. */
3108 if (new_direct_edge
&& new_direct_edge
!= ie
3109 && new_direct_edge
->callee
== spec_target
)
3111 new_direct_edge
->indirect_inlining_edge
= 1;
3112 top
= IPA_EDGE_REF (cs
);
3114 if (!new_direct_edge
->speculative
)
3117 else if (new_direct_edge
)
3119 new_direct_edge
->indirect_inlining_edge
= 1;
3120 if (new_direct_edge
->call_stmt
)
3121 new_direct_edge
->call_stmt_cannot_inline_p
3122 = !gimple_check_call_matching_types (
3123 new_direct_edge
->call_stmt
,
3124 new_direct_edge
->callee
->decl
, false);
3127 new_edges
->safe_push (new_direct_edge
);
3130 top
= IPA_EDGE_REF (cs
);
3131 /* If speculative edge was introduced we still need to update
3132 call info of the indirect edge. */
3133 if (!new_direct_edge
->speculative
)
3136 if (jfunc
->type
== IPA_JF_PASS_THROUGH
3137 && ipa_get_jf_pass_through_operation (jfunc
) == NOP_EXPR
)
3139 if (ici
->agg_contents
3140 && !ipa_get_jf_pass_through_agg_preserved (jfunc
)
3141 && !ici
->polymorphic
)
3142 ici
->param_index
= -1;
3145 ici
->param_index
= ipa_get_jf_pass_through_formal_id (jfunc
);
3146 if (ici
->polymorphic
3147 && !ipa_get_jf_pass_through_type_preserved (jfunc
))
3148 ici
->vptr_changed
= true;
3151 else if (jfunc
->type
== IPA_JF_ANCESTOR
)
3153 if (ici
->agg_contents
3154 && !ipa_get_jf_ancestor_agg_preserved (jfunc
)
3155 && !ici
->polymorphic
)
3156 ici
->param_index
= -1;
3159 ici
->param_index
= ipa_get_jf_ancestor_formal_id (jfunc
);
3160 ici
->offset
+= ipa_get_jf_ancestor_offset (jfunc
);
3161 if (ici
->polymorphic
3162 && !ipa_get_jf_ancestor_type_preserved (jfunc
))
3163 ici
->vptr_changed
= true;
3167 /* Either we can find a destination for this edge now or never. */
3168 ici
->param_index
= -1;
3174 /* Recursively traverse subtree of NODE (including node) made of inlined
3175 cgraph_edges when CS has been inlined and invoke
3176 update_indirect_edges_after_inlining on all nodes and
3177 update_jump_functions_after_inlining on all non-inlined edges that lead out
3178 of this subtree. Newly discovered indirect edges will be added to
3179 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3183 propagate_info_to_inlined_callees (struct cgraph_edge
*cs
,
3184 struct cgraph_node
*node
,
3185 vec
<cgraph_edge
*> *new_edges
)
3187 struct cgraph_edge
*e
;
3190 res
= update_indirect_edges_after_inlining (cs
, node
, new_edges
);
3192 for (e
= node
->callees
; e
; e
= e
->next_callee
)
3193 if (!e
->inline_failed
)
3194 res
|= propagate_info_to_inlined_callees (cs
, e
->callee
, new_edges
);
3196 update_jump_functions_after_inlining (cs
, e
);
3197 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
3198 update_jump_functions_after_inlining (cs
, e
);
3203 /* Combine two controlled uses counts as done during inlining. */
3206 combine_controlled_uses_counters (int c
, int d
)
3208 if (c
== IPA_UNDESCRIBED_USE
|| d
== IPA_UNDESCRIBED_USE
)
3209 return IPA_UNDESCRIBED_USE
;
3214 /* Propagate number of controlled users from CS->caleee to the new root of the
3215 tree of inlined nodes. */
3218 propagate_controlled_uses (struct cgraph_edge
*cs
)
3220 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
3221 struct cgraph_node
*new_root
= cs
->caller
->global
.inlined_to
3222 ? cs
->caller
->global
.inlined_to
: cs
->caller
;
3223 struct ipa_node_params
*new_root_info
= IPA_NODE_REF (new_root
);
3224 struct ipa_node_params
*old_root_info
= IPA_NODE_REF (cs
->callee
);
3227 count
= MIN (ipa_get_cs_argument_count (args
),
3228 ipa_get_param_count (old_root_info
));
3229 for (i
= 0; i
< count
; i
++)
3231 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
3232 struct ipa_cst_ref_desc
*rdesc
;
3234 if (jf
->type
== IPA_JF_PASS_THROUGH
)
3237 src_idx
= ipa_get_jf_pass_through_formal_id (jf
);
3238 c
= ipa_get_controlled_uses (new_root_info
, src_idx
);
3239 d
= ipa_get_controlled_uses (old_root_info
, i
);
3241 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf
)
3242 == NOP_EXPR
|| c
== IPA_UNDESCRIBED_USE
);
3243 c
= combine_controlled_uses_counters (c
, d
);
3244 ipa_set_controlled_uses (new_root_info
, src_idx
, c
);
3245 if (c
== 0 && new_root_info
->ipcp_orig_node
)
3247 struct cgraph_node
*n
;
3248 struct ipa_ref
*ref
;
3249 tree t
= new_root_info
->known_csts
[src_idx
];
3251 if (t
&& TREE_CODE (t
) == ADDR_EXPR
3252 && TREE_CODE (TREE_OPERAND (t
, 0)) == FUNCTION_DECL
3253 && (n
= cgraph_node::get (TREE_OPERAND (t
, 0)))
3254 && (ref
= new_root
->find_reference (n
, NULL
, 0)))
3257 fprintf (dump_file
, "ipa-prop: Removing cloning-created "
3258 "reference from %s/%i to %s/%i.\n",
3259 xstrdup_for_dump (new_root
->name ()),
3261 xstrdup_for_dump (n
->name ()), n
->order
);
3262 ref
->remove_reference ();
3266 else if (jf
->type
== IPA_JF_CONST
3267 && (rdesc
= jfunc_rdesc_usable (jf
)))
3269 int d
= ipa_get_controlled_uses (old_root_info
, i
);
3270 int c
= rdesc
->refcount
;
3271 rdesc
->refcount
= combine_controlled_uses_counters (c
, d
);
3272 if (rdesc
->refcount
== 0)
3274 tree cst
= ipa_get_jf_constant (jf
);
3275 struct cgraph_node
*n
;
3276 gcc_checking_assert (TREE_CODE (cst
) == ADDR_EXPR
3277 && TREE_CODE (TREE_OPERAND (cst
, 0))
3279 n
= cgraph_node::get (TREE_OPERAND (cst
, 0));
3282 struct cgraph_node
*clone
;
3284 ok
= remove_described_reference (n
, rdesc
);
3285 gcc_checking_assert (ok
);
3288 while (clone
->global
.inlined_to
3289 && clone
!= rdesc
->cs
->caller
3290 && IPA_NODE_REF (clone
)->ipcp_orig_node
)
3292 struct ipa_ref
*ref
;
3293 ref
= clone
->find_reference (n
, NULL
, 0);
3297 fprintf (dump_file
, "ipa-prop: Removing "
3298 "cloning-created reference "
3299 "from %s/%i to %s/%i.\n",
3300 xstrdup_for_dump (clone
->name ()),
3302 xstrdup_for_dump (n
->name ()),
3304 ref
->remove_reference ();
3306 clone
= clone
->callers
->caller
;
3313 for (i
= ipa_get_param_count (old_root_info
);
3314 i
< ipa_get_cs_argument_count (args
);
3317 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
3319 if (jf
->type
== IPA_JF_CONST
)
3321 struct ipa_cst_ref_desc
*rdesc
= jfunc_rdesc_usable (jf
);
3323 rdesc
->refcount
= IPA_UNDESCRIBED_USE
;
3325 else if (jf
->type
== IPA_JF_PASS_THROUGH
)
3326 ipa_set_controlled_uses (new_root_info
,
3327 jf
->value
.pass_through
.formal_id
,
3328 IPA_UNDESCRIBED_USE
);
3332 /* Update jump functions and call note functions on inlining the call site CS.
3333 CS is expected to lead to a node already cloned by
3334 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3335 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3339 ipa_propagate_indirect_call_infos (struct cgraph_edge
*cs
,
3340 vec
<cgraph_edge
*> *new_edges
)
3343 /* Do nothing if the preparation phase has not been carried out yet
3344 (i.e. during early inlining). */
3345 if (!ipa_node_params_sum
)
3347 gcc_assert (ipa_edge_args_vector
);
3349 propagate_controlled_uses (cs
);
3350 changed
= propagate_info_to_inlined_callees (cs
, cs
->callee
, new_edges
);
3355 /* Frees all dynamically allocated structures that the argument info points
3359 ipa_free_edge_args_substructures (struct ipa_edge_args
*args
)
3361 vec_free (args
->jump_functions
);
3362 memset (args
, 0, sizeof (*args
));
3365 /* Free all ipa_edge structures. */
3368 ipa_free_all_edge_args (void)
3371 struct ipa_edge_args
*args
;
3373 if (!ipa_edge_args_vector
)
3376 FOR_EACH_VEC_ELT (*ipa_edge_args_vector
, i
, args
)
3377 ipa_free_edge_args_substructures (args
);
3379 vec_free (ipa_edge_args_vector
);
3382 /* Frees all dynamically allocated structures that the param info points
3385 ipa_node_params::~ipa_node_params ()
3387 descriptors
.release ();
3389 /* Lattice values and their sources are deallocated with their alocation
3391 known_contexts
.release ();
3394 ipcp_orig_node
= NULL
;
3397 do_clone_for_all_contexts
= 0;
3398 is_all_contexts_clone
= 0;
3402 /* Free all ipa_node_params structures. */
3405 ipa_free_all_node_params (void)
3407 delete ipa_node_params_sum
;
3408 ipa_node_params_sum
= NULL
;
3411 /* Grow ipcp_transformations if necessary. */
3414 ipcp_grow_transformations_if_necessary (void)
3416 if (vec_safe_length (ipcp_transformations
)
3417 <= (unsigned) symtab
->cgraph_max_uid
)
3418 vec_safe_grow_cleared (ipcp_transformations
, symtab
->cgraph_max_uid
+ 1);
3421 /* Set the aggregate replacements of NODE to be AGGVALS. */
3424 ipa_set_node_agg_value_chain (struct cgraph_node
*node
,
3425 struct ipa_agg_replacement_value
*aggvals
)
3427 ipcp_grow_transformations_if_necessary ();
3428 (*ipcp_transformations
)[node
->uid
].agg_values
= aggvals
;
3431 /* Hook that is called by cgraph.c when an edge is removed. */
3434 ipa_edge_removal_hook (struct cgraph_edge
*cs
, void *data ATTRIBUTE_UNUSED
)
3436 struct ipa_edge_args
*args
;
3438 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3439 if (vec_safe_length (ipa_edge_args_vector
) <= (unsigned)cs
->uid
)
3442 args
= IPA_EDGE_REF (cs
);
3443 if (args
->jump_functions
)
3445 struct ipa_jump_func
*jf
;
3447 FOR_EACH_VEC_ELT (*args
->jump_functions
, i
, jf
)
3449 struct ipa_cst_ref_desc
*rdesc
;
3450 try_decrement_rdesc_refcount (jf
);
3451 if (jf
->type
== IPA_JF_CONST
3452 && (rdesc
= ipa_get_jf_constant_rdesc (jf
))
3458 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs
));
3461 /* Hook that is called by cgraph.c when an edge is duplicated. */
3464 ipa_edge_duplication_hook (struct cgraph_edge
*src
, struct cgraph_edge
*dst
,
3467 struct ipa_edge_args
*old_args
, *new_args
;
3470 ipa_check_create_edge_args ();
3472 old_args
= IPA_EDGE_REF (src
);
3473 new_args
= IPA_EDGE_REF (dst
);
3475 new_args
->jump_functions
= vec_safe_copy (old_args
->jump_functions
);
3476 if (old_args
->polymorphic_call_contexts
)
3477 new_args
->polymorphic_call_contexts
3478 = vec_safe_copy (old_args
->polymorphic_call_contexts
);
3480 for (i
= 0; i
< vec_safe_length (old_args
->jump_functions
); i
++)
3482 struct ipa_jump_func
*src_jf
= ipa_get_ith_jump_func (old_args
, i
);
3483 struct ipa_jump_func
*dst_jf
= ipa_get_ith_jump_func (new_args
, i
);
3485 dst_jf
->agg
.items
= vec_safe_copy (dst_jf
->agg
.items
);
3487 if (src_jf
->type
== IPA_JF_CONST
)
3489 struct ipa_cst_ref_desc
*src_rdesc
= jfunc_rdesc_usable (src_jf
);
3492 dst_jf
->value
.constant
.rdesc
= NULL
;
3493 else if (src
->caller
== dst
->caller
)
3495 struct ipa_ref
*ref
;
3496 symtab_node
*n
= cgraph_node_for_jfunc (src_jf
);
3497 gcc_checking_assert (n
);
3498 ref
= src
->caller
->find_reference (n
, src
->call_stmt
,
3500 gcc_checking_assert (ref
);
3501 dst
->caller
->clone_reference (ref
, ref
->stmt
);
3503 struct ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
3504 dst_rdesc
->cs
= dst
;
3505 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3506 dst_rdesc
->next_duplicate
= NULL
;
3507 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3509 else if (src_rdesc
->cs
== src
)
3511 struct ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
3512 dst_rdesc
->cs
= dst
;
3513 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3514 dst_rdesc
->next_duplicate
= src_rdesc
->next_duplicate
;
3515 src_rdesc
->next_duplicate
= dst_rdesc
;
3516 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3520 struct ipa_cst_ref_desc
*dst_rdesc
;
3521 /* This can happen during inlining, when a JFUNC can refer to a
3522 reference taken in a function up in the tree of inline clones.
3523 We need to find the duplicate that refers to our tree of
3526 gcc_assert (dst
->caller
->global
.inlined_to
);
3527 for (dst_rdesc
= src_rdesc
->next_duplicate
;
3529 dst_rdesc
= dst_rdesc
->next_duplicate
)
3531 struct cgraph_node
*top
;
3532 top
= dst_rdesc
->cs
->caller
->global
.inlined_to
3533 ? dst_rdesc
->cs
->caller
->global
.inlined_to
3534 : dst_rdesc
->cs
->caller
;
3535 if (dst
->caller
->global
.inlined_to
== top
)
3538 gcc_assert (dst_rdesc
);
3539 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3542 else if (dst_jf
->type
== IPA_JF_PASS_THROUGH
3543 && src
->caller
== dst
->caller
)
3545 struct cgraph_node
*inline_root
= dst
->caller
->global
.inlined_to
3546 ? dst
->caller
->global
.inlined_to
: dst
->caller
;
3547 struct ipa_node_params
*root_info
= IPA_NODE_REF (inline_root
);
3548 int idx
= ipa_get_jf_pass_through_formal_id (dst_jf
);
3550 int c
= ipa_get_controlled_uses (root_info
, idx
);
3551 if (c
!= IPA_UNDESCRIBED_USE
)
3554 ipa_set_controlled_uses (root_info
, idx
, c
);
3560 /* Analyze newly added function into callgraph. */
3563 ipa_add_new_function (cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
3565 if (node
->has_gimple_body_p ())
3566 ipa_analyze_node (node
);
3569 /* Hook that is called by summary when a node is duplicated. */
3572 ipa_node_params_t::duplicate(cgraph_node
*src
, cgraph_node
*dst
,
3573 ipa_node_params
*old_info
,
3574 ipa_node_params
*new_info
)
3576 ipa_agg_replacement_value
*old_av
, *new_av
;
3578 new_info
->descriptors
= old_info
->descriptors
.copy ();
3579 new_info
->lattices
= NULL
;
3580 new_info
->ipcp_orig_node
= old_info
->ipcp_orig_node
;
3582 new_info
->analysis_done
= old_info
->analysis_done
;
3583 new_info
->node_enqueued
= old_info
->node_enqueued
;
3585 old_av
= ipa_get_agg_replacements_for_node (src
);
3591 struct ipa_agg_replacement_value
*v
;
3593 v
= ggc_alloc
<ipa_agg_replacement_value
> ();
3594 memcpy (v
, old_av
, sizeof (*v
));
3597 old_av
= old_av
->next
;
3599 ipa_set_node_agg_value_chain (dst
, new_av
);
3602 ipcp_transformation_summary
*src_trans
= ipcp_get_transformation_summary (src
);
3604 if (src_trans
&& vec_safe_length (src_trans
->alignments
) > 0)
3606 ipcp_grow_transformations_if_necessary ();
3607 src_trans
= ipcp_get_transformation_summary (src
);
3608 const vec
<ipa_alignment
, va_gc
> *src_alignments
= src_trans
->alignments
;
3609 vec
<ipa_alignment
, va_gc
> *&dst_alignments
3610 = ipcp_get_transformation_summary (dst
)->alignments
;
3611 vec_safe_reserve_exact (dst_alignments
, src_alignments
->length ());
3612 for (unsigned i
= 0; i
< src_alignments
->length (); ++i
)
3613 dst_alignments
->quick_push ((*src_alignments
)[i
]);
3617 /* Register our cgraph hooks if they are not already there. */
3620 ipa_register_cgraph_hooks (void)
3622 ipa_check_create_node_params ();
3624 if (!edge_removal_hook_holder
)
3625 edge_removal_hook_holder
=
3626 symtab
->add_edge_removal_hook (&ipa_edge_removal_hook
, NULL
);
3627 if (!edge_duplication_hook_holder
)
3628 edge_duplication_hook_holder
=
3629 symtab
->add_edge_duplication_hook (&ipa_edge_duplication_hook
, NULL
);
3630 function_insertion_hook_holder
=
3631 symtab
->add_cgraph_insertion_hook (&ipa_add_new_function
, NULL
);
3634 /* Unregister our cgraph hooks if they are not already there. */
3637 ipa_unregister_cgraph_hooks (void)
3639 symtab
->remove_edge_removal_hook (edge_removal_hook_holder
);
3640 edge_removal_hook_holder
= NULL
;
3641 symtab
->remove_edge_duplication_hook (edge_duplication_hook_holder
);
3642 edge_duplication_hook_holder
= NULL
;
3643 symtab
->remove_cgraph_insertion_hook (function_insertion_hook_holder
);
3644 function_insertion_hook_holder
= NULL
;
3647 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3648 longer needed after ipa-cp. */
3651 ipa_free_all_structures_after_ipa_cp (void)
3653 if (!optimize
&& !in_lto_p
)
3655 ipa_free_all_edge_args ();
3656 ipa_free_all_node_params ();
3657 ipcp_sources_pool
.release ();
3658 ipcp_cst_values_pool
.release ();
3659 ipcp_poly_ctx_values_pool
.release ();
3660 ipcp_agg_lattice_pool
.release ();
3661 ipa_unregister_cgraph_hooks ();
3662 ipa_refdesc_pool
.release ();
3666 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3667 longer needed after indirect inlining. */
3670 ipa_free_all_structures_after_iinln (void)
3672 ipa_free_all_edge_args ();
3673 ipa_free_all_node_params ();
3674 ipa_unregister_cgraph_hooks ();
3675 ipcp_sources_pool
.release ();
3676 ipcp_cst_values_pool
.release ();
3677 ipcp_poly_ctx_values_pool
.release ();
3678 ipcp_agg_lattice_pool
.release ();
3679 ipa_refdesc_pool
.release ();
3682 /* Print ipa_tree_map data structures of all functions in the
3686 ipa_print_node_params (FILE *f
, struct cgraph_node
*node
)
3689 struct ipa_node_params
*info
;
3691 if (!node
->definition
)
3693 info
= IPA_NODE_REF (node
);
3694 fprintf (f
, " function %s/%i parameter descriptors:\n",
3695 node
->name (), node
->order
);
3696 count
= ipa_get_param_count (info
);
3697 for (i
= 0; i
< count
; i
++)
3702 ipa_dump_param (f
, info
, i
);
3703 if (ipa_is_param_used (info
, i
))
3704 fprintf (f
, " used");
3705 c
= ipa_get_controlled_uses (info
, i
);
3706 if (c
== IPA_UNDESCRIBED_USE
)
3707 fprintf (f
, " undescribed_use");
3709 fprintf (f
, " controlled_uses=%i", c
);
3714 /* Print ipa_tree_map data structures of all functions in the
3718 ipa_print_all_params (FILE * f
)
3720 struct cgraph_node
*node
;
3722 fprintf (f
, "\nFunction parameters:\n");
3723 FOR_EACH_FUNCTION (node
)
3724 ipa_print_node_params (f
, node
);
3727 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3730 ipa_get_vector_of_formal_parms (tree fndecl
)
3736 gcc_assert (!flag_wpa
);
3737 count
= count_formal_params (fndecl
);
3738 args
.create (count
);
3739 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
3740 args
.quick_push (parm
);
3745 /* Return a heap allocated vector containing types of formal parameters of
3746 function type FNTYPE. */
3749 ipa_get_vector_of_formal_parm_types (tree fntype
)
3755 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
3758 types
.create (count
);
3759 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
3760 types
.quick_push (TREE_VALUE (t
));
3765 /* Modify the function declaration FNDECL and its type according to the plan in
3766 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3767 to reflect the actual parameters being modified which are determined by the
3768 base_index field. */
3771 ipa_modify_formal_parameters (tree fndecl
, ipa_parm_adjustment_vec adjustments
)
3773 vec
<tree
> oparms
= ipa_get_vector_of_formal_parms (fndecl
);
3774 tree orig_type
= TREE_TYPE (fndecl
);
3775 tree old_arg_types
= TYPE_ARG_TYPES (orig_type
);
3777 /* The following test is an ugly hack, some functions simply don't have any
3778 arguments in their type. This is probably a bug but well... */
3779 bool care_for_types
= (old_arg_types
!= NULL_TREE
);
3780 bool last_parm_void
;
3784 last_parm_void
= (TREE_VALUE (tree_last (old_arg_types
))
3786 otypes
= ipa_get_vector_of_formal_parm_types (orig_type
);
3788 gcc_assert (oparms
.length () + 1 == otypes
.length ());
3790 gcc_assert (oparms
.length () == otypes
.length ());
3794 last_parm_void
= false;
3798 int len
= adjustments
.length ();
3799 tree
*link
= &DECL_ARGUMENTS (fndecl
);
3800 tree new_arg_types
= NULL
;
3801 for (int i
= 0; i
< len
; i
++)
3803 struct ipa_parm_adjustment
*adj
;
3806 adj
= &adjustments
[i
];
3808 if (adj
->op
== IPA_PARM_OP_NEW
)
3811 parm
= oparms
[adj
->base_index
];
3814 if (adj
->op
== IPA_PARM_OP_COPY
)
3817 new_arg_types
= tree_cons (NULL_TREE
, otypes
[adj
->base_index
],
3820 link
= &DECL_CHAIN (parm
);
3822 else if (adj
->op
!= IPA_PARM_OP_REMOVE
)
3828 ptype
= build_pointer_type (adj
->type
);
3832 if (is_gimple_reg_type (ptype
))
3834 unsigned malign
= GET_MODE_ALIGNMENT (TYPE_MODE (ptype
));
3835 if (TYPE_ALIGN (ptype
) < malign
)
3836 ptype
= build_aligned_type (ptype
, malign
);
3841 new_arg_types
= tree_cons (NULL_TREE
, ptype
, new_arg_types
);
3843 new_parm
= build_decl (UNKNOWN_LOCATION
, PARM_DECL
, NULL_TREE
,
3845 const char *prefix
= adj
->arg_prefix
? adj
->arg_prefix
: "SYNTH";
3846 DECL_NAME (new_parm
) = create_tmp_var_name (prefix
);
3847 DECL_ARTIFICIAL (new_parm
) = 1;
3848 DECL_ARG_TYPE (new_parm
) = ptype
;
3849 DECL_CONTEXT (new_parm
) = fndecl
;
3850 TREE_USED (new_parm
) = 1;
3851 DECL_IGNORED_P (new_parm
) = 1;
3852 layout_decl (new_parm
, 0);
3854 if (adj
->op
== IPA_PARM_OP_NEW
)
3858 adj
->new_decl
= new_parm
;
3861 link
= &DECL_CHAIN (new_parm
);
3867 tree new_reversed
= NULL
;
3870 new_reversed
= nreverse (new_arg_types
);
3874 TREE_CHAIN (new_arg_types
) = void_list_node
;
3876 new_reversed
= void_list_node
;
3880 /* Use copy_node to preserve as much as possible from original type
3881 (debug info, attribute lists etc.)
3882 Exception is METHOD_TYPEs must have THIS argument.
3883 When we are asked to remove it, we need to build new FUNCTION_TYPE
3885 tree new_type
= NULL
;
3886 if (TREE_CODE (orig_type
) != METHOD_TYPE
3887 || (adjustments
[0].op
== IPA_PARM_OP_COPY
3888 && adjustments
[0].base_index
== 0))
3890 new_type
= build_distinct_type_copy (orig_type
);
3891 TYPE_ARG_TYPES (new_type
) = new_reversed
;
3896 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type
),
3898 TYPE_CONTEXT (new_type
) = TYPE_CONTEXT (orig_type
);
3899 DECL_VINDEX (fndecl
) = NULL_TREE
;
3902 /* When signature changes, we need to clear builtin info. */
3903 if (DECL_BUILT_IN (fndecl
))
3905 DECL_BUILT_IN_CLASS (fndecl
) = NOT_BUILT_IN
;
3906 DECL_FUNCTION_CODE (fndecl
) = (enum built_in_function
) 0;
3909 TREE_TYPE (fndecl
) = new_type
;
3910 DECL_VIRTUAL_P (fndecl
) = 0;
3911 DECL_LANG_SPECIFIC (fndecl
) = NULL
;
3916 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3917 If this is a directly recursive call, CS must be NULL. Otherwise it must
3918 contain the corresponding call graph edge. */
3921 ipa_modify_call_arguments (struct cgraph_edge
*cs
, gcall
*stmt
,
3922 ipa_parm_adjustment_vec adjustments
)
3924 struct cgraph_node
*current_node
= cgraph_node::get (current_function_decl
);
3926 vec
<tree
, va_gc
> **debug_args
= NULL
;
3928 gimple_stmt_iterator gsi
, prev_gsi
;
3932 len
= adjustments
.length ();
3934 callee_decl
= !cs
? gimple_call_fndecl (stmt
) : cs
->callee
->decl
;
3935 current_node
->remove_stmt_references (stmt
);
3937 gsi
= gsi_for_stmt (stmt
);
3939 gsi_prev (&prev_gsi
);
3940 for (i
= 0; i
< len
; i
++)
3942 struct ipa_parm_adjustment
*adj
;
3944 adj
= &adjustments
[i
];
3946 if (adj
->op
== IPA_PARM_OP_COPY
)
3948 tree arg
= gimple_call_arg (stmt
, adj
->base_index
);
3950 vargs
.quick_push (arg
);
3952 else if (adj
->op
!= IPA_PARM_OP_REMOVE
)
3954 tree expr
, base
, off
;
3956 unsigned int deref_align
= 0;
3957 bool deref_base
= false;
3959 /* We create a new parameter out of the value of the old one, we can
3960 do the following kind of transformations:
3962 - A scalar passed by reference is converted to a scalar passed by
3963 value. (adj->by_ref is false and the type of the original
3964 actual argument is a pointer to a scalar).
3966 - A part of an aggregate is passed instead of the whole aggregate.
3967 The part can be passed either by value or by reference, this is
3968 determined by value of adj->by_ref. Moreover, the code below
3969 handles both situations when the original aggregate is passed by
3970 value (its type is not a pointer) and when it is passed by
3971 reference (it is a pointer to an aggregate).
3973 When the new argument is passed by reference (adj->by_ref is true)
3974 it must be a part of an aggregate and therefore we form it by
3975 simply taking the address of a reference inside the original
3978 gcc_checking_assert (adj
->offset
% BITS_PER_UNIT
== 0);
3979 base
= gimple_call_arg (stmt
, adj
->base_index
);
3980 loc
= DECL_P (base
) ? DECL_SOURCE_LOCATION (base
)
3981 : EXPR_LOCATION (base
);
3983 if (TREE_CODE (base
) != ADDR_EXPR
3984 && POINTER_TYPE_P (TREE_TYPE (base
)))
3985 off
= build_int_cst (adj
->alias_ptr_type
,
3986 adj
->offset
/ BITS_PER_UNIT
);
3989 HOST_WIDE_INT base_offset
;
3993 if (TREE_CODE (base
) == ADDR_EXPR
)
3995 base
= TREE_OPERAND (base
, 0);
4001 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
4002 /* Aggregate arguments can have non-invariant addresses. */
4005 base
= build_fold_addr_expr (prev_base
);
4006 off
= build_int_cst (adj
->alias_ptr_type
,
4007 adj
->offset
/ BITS_PER_UNIT
);
4009 else if (TREE_CODE (base
) == MEM_REF
)
4014 deref_align
= TYPE_ALIGN (TREE_TYPE (base
));
4016 off
= build_int_cst (adj
->alias_ptr_type
,
4018 + adj
->offset
/ BITS_PER_UNIT
);
4019 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1),
4021 base
= TREE_OPERAND (base
, 0);
4025 off
= build_int_cst (adj
->alias_ptr_type
,
4027 + adj
->offset
/ BITS_PER_UNIT
);
4028 base
= build_fold_addr_expr (base
);
4034 tree type
= adj
->type
;
4036 unsigned HOST_WIDE_INT misalign
;
4040 align
= deref_align
;
4045 get_pointer_alignment_1 (base
, &align
, &misalign
);
4046 if (TYPE_ALIGN (type
) > align
)
4047 align
= TYPE_ALIGN (type
);
4049 misalign
+= (offset_int::from (off
, SIGNED
).to_short_addr ()
4051 misalign
= misalign
& (align
- 1);
4053 align
= (misalign
& -misalign
);
4054 if (align
< TYPE_ALIGN (type
))
4055 type
= build_aligned_type (type
, align
);
4056 base
= force_gimple_operand_gsi (&gsi
, base
,
4057 true, NULL
, true, GSI_SAME_STMT
);
4058 expr
= fold_build2_loc (loc
, MEM_REF
, type
, base
, off
);
4059 /* If expr is not a valid gimple call argument emit
4060 a load into a temporary. */
4061 if (is_gimple_reg_type (TREE_TYPE (expr
)))
4063 gimple tem
= gimple_build_assign (NULL_TREE
, expr
);
4064 if (gimple_in_ssa_p (cfun
))
4066 gimple_set_vuse (tem
, gimple_vuse (stmt
));
4067 expr
= make_ssa_name (TREE_TYPE (expr
), tem
);
4070 expr
= create_tmp_reg (TREE_TYPE (expr
));
4071 gimple_assign_set_lhs (tem
, expr
);
4072 gsi_insert_before (&gsi
, tem
, GSI_SAME_STMT
);
4077 expr
= fold_build2_loc (loc
, MEM_REF
, adj
->type
, base
, off
);
4078 expr
= build_fold_addr_expr (expr
);
4079 expr
= force_gimple_operand_gsi (&gsi
, expr
,
4080 true, NULL
, true, GSI_SAME_STMT
);
4082 vargs
.quick_push (expr
);
4084 if (adj
->op
!= IPA_PARM_OP_COPY
&& MAY_HAVE_DEBUG_STMTS
)
4087 tree ddecl
= NULL_TREE
, origin
= DECL_ORIGIN (adj
->base
), arg
;
4090 arg
= gimple_call_arg (stmt
, adj
->base_index
);
4091 if (!useless_type_conversion_p (TREE_TYPE (origin
), TREE_TYPE (arg
)))
4093 if (!fold_convertible_p (TREE_TYPE (origin
), arg
))
4095 arg
= fold_convert_loc (gimple_location (stmt
),
4096 TREE_TYPE (origin
), arg
);
4098 if (debug_args
== NULL
)
4099 debug_args
= decl_debug_args_insert (callee_decl
);
4100 for (ix
= 0; vec_safe_iterate (*debug_args
, ix
, &ddecl
); ix
+= 2)
4101 if (ddecl
== origin
)
4103 ddecl
= (**debug_args
)[ix
+ 1];
4108 ddecl
= make_node (DEBUG_EXPR_DECL
);
4109 DECL_ARTIFICIAL (ddecl
) = 1;
4110 TREE_TYPE (ddecl
) = TREE_TYPE (origin
);
4111 DECL_MODE (ddecl
) = DECL_MODE (origin
);
4113 vec_safe_push (*debug_args
, origin
);
4114 vec_safe_push (*debug_args
, ddecl
);
4116 def_temp
= gimple_build_debug_bind (ddecl
, unshare_expr (arg
), stmt
);
4117 gsi_insert_before (&gsi
, def_temp
, GSI_SAME_STMT
);
4121 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4123 fprintf (dump_file
, "replacing stmt:");
4124 print_gimple_stmt (dump_file
, gsi_stmt (gsi
), 0, 0);
4127 new_stmt
= gimple_build_call_vec (callee_decl
, vargs
);
4129 if (gimple_call_lhs (stmt
))
4130 gimple_call_set_lhs (new_stmt
, gimple_call_lhs (stmt
));
4132 gimple_set_block (new_stmt
, gimple_block (stmt
));
4133 if (gimple_has_location (stmt
))
4134 gimple_set_location (new_stmt
, gimple_location (stmt
));
4135 gimple_call_set_chain (new_stmt
, gimple_call_chain (stmt
));
4136 gimple_call_copy_flags (new_stmt
, stmt
);
4137 if (gimple_in_ssa_p (cfun
))
4139 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
4140 if (gimple_vdef (stmt
))
4142 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
4143 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
4147 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4149 fprintf (dump_file
, "with stmt:");
4150 print_gimple_stmt (dump_file
, new_stmt
, 0, 0);
4151 fprintf (dump_file
, "\n");
4153 gsi_replace (&gsi
, new_stmt
, true);
4155 cs
->set_call_stmt (new_stmt
);
4158 current_node
->record_stmt_references (gsi_stmt (gsi
));
4161 while (gsi_stmt (gsi
) != gsi_stmt (prev_gsi
));
4164 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4165 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4166 specifies whether the function should care about type incompatibility the
4167 current and new expressions. If it is false, the function will leave
4168 incompatibility issues to the caller. Return true iff the expression
4172 ipa_modify_expr (tree
*expr
, bool convert
,
4173 ipa_parm_adjustment_vec adjustments
)
4175 struct ipa_parm_adjustment
*cand
4176 = ipa_get_adjustment_candidate (&expr
, &convert
, adjustments
, false);
4182 src
= build_simple_mem_ref (cand
->new_decl
);
4184 src
= cand
->new_decl
;
4186 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4188 fprintf (dump_file
, "About to replace expr ");
4189 print_generic_expr (dump_file
, *expr
, 0);
4190 fprintf (dump_file
, " with ");
4191 print_generic_expr (dump_file
, src
, 0);
4192 fprintf (dump_file
, "\n");
4195 if (convert
&& !useless_type_conversion_p (TREE_TYPE (*expr
), cand
->type
))
4197 tree vce
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (*expr
), src
);
4205 /* If T is an SSA_NAME, return NULL if it is not a default def or
4206 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4207 the base variable is always returned, regardless if it is a default
4208 def. Return T if it is not an SSA_NAME. */
4211 get_ssa_base_param (tree t
, bool ignore_default_def
)
4213 if (TREE_CODE (t
) == SSA_NAME
)
4215 if (ignore_default_def
|| SSA_NAME_IS_DEFAULT_DEF (t
))
4216 return SSA_NAME_VAR (t
);
4223 /* Given an expression, return an adjustment entry specifying the
4224 transformation to be done on EXPR. If no suitable adjustment entry
4225 was found, returns NULL.
4227 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4228 default def, otherwise bail on them.
4230 If CONVERT is non-NULL, this function will set *CONVERT if the
4231 expression provided is a component reference. ADJUSTMENTS is the
4232 adjustments vector. */
4234 ipa_parm_adjustment
*
4235 ipa_get_adjustment_candidate (tree
**expr
, bool *convert
,
4236 ipa_parm_adjustment_vec adjustments
,
4237 bool ignore_default_def
)
4239 if (TREE_CODE (**expr
) == BIT_FIELD_REF
4240 || TREE_CODE (**expr
) == IMAGPART_EXPR
4241 || TREE_CODE (**expr
) == REALPART_EXPR
)
4243 *expr
= &TREE_OPERAND (**expr
, 0);
4248 HOST_WIDE_INT offset
, size
, max_size
;
4249 tree base
= get_ref_base_and_extent (**expr
, &offset
, &size
, &max_size
);
4250 if (!base
|| size
== -1 || max_size
== -1)
4253 if (TREE_CODE (base
) == MEM_REF
)
4255 offset
+= mem_ref_offset (base
).to_short_addr () * BITS_PER_UNIT
;
4256 base
= TREE_OPERAND (base
, 0);
4259 base
= get_ssa_base_param (base
, ignore_default_def
);
4260 if (!base
|| TREE_CODE (base
) != PARM_DECL
)
4263 struct ipa_parm_adjustment
*cand
= NULL
;
4264 unsigned int len
= adjustments
.length ();
4265 for (unsigned i
= 0; i
< len
; i
++)
4267 struct ipa_parm_adjustment
*adj
= &adjustments
[i
];
4269 if (adj
->base
== base
4270 && (adj
->offset
== offset
|| adj
->op
== IPA_PARM_OP_REMOVE
))
4277 if (!cand
|| cand
->op
== IPA_PARM_OP_COPY
|| cand
->op
== IPA_PARM_OP_REMOVE
)
4282 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4285 index_in_adjustments_multiple_times_p (int base_index
,
4286 ipa_parm_adjustment_vec adjustments
)
4288 int i
, len
= adjustments
.length ();
4291 for (i
= 0; i
< len
; i
++)
4293 struct ipa_parm_adjustment
*adj
;
4294 adj
= &adjustments
[i
];
4296 if (adj
->base_index
== base_index
)
4308 /* Return adjustments that should have the same effect on function parameters
4309 and call arguments as if they were first changed according to adjustments in
4310 INNER and then by adjustments in OUTER. */
4312 ipa_parm_adjustment_vec
4313 ipa_combine_adjustments (ipa_parm_adjustment_vec inner
,
4314 ipa_parm_adjustment_vec outer
)
4316 int i
, outlen
= outer
.length ();
4317 int inlen
= inner
.length ();
4319 ipa_parm_adjustment_vec adjustments
, tmp
;
4322 for (i
= 0; i
< inlen
; i
++)
4324 struct ipa_parm_adjustment
*n
;
4327 if (n
->op
== IPA_PARM_OP_REMOVE
)
4331 /* FIXME: Handling of new arguments are not implemented yet. */
4332 gcc_assert (n
->op
!= IPA_PARM_OP_NEW
);
4333 tmp
.quick_push (*n
);
4337 adjustments
.create (outlen
+ removals
);
4338 for (i
= 0; i
< outlen
; i
++)
4340 struct ipa_parm_adjustment r
;
4341 struct ipa_parm_adjustment
*out
= &outer
[i
];
4342 struct ipa_parm_adjustment
*in
= &tmp
[out
->base_index
];
4344 memset (&r
, 0, sizeof (r
));
4345 gcc_assert (in
->op
!= IPA_PARM_OP_REMOVE
);
4346 if (out
->op
== IPA_PARM_OP_REMOVE
)
4348 if (!index_in_adjustments_multiple_times_p (in
->base_index
, tmp
))
4350 r
.op
= IPA_PARM_OP_REMOVE
;
4351 adjustments
.quick_push (r
);
4357 /* FIXME: Handling of new arguments are not implemented yet. */
4358 gcc_assert (out
->op
!= IPA_PARM_OP_NEW
);
4361 r
.base_index
= in
->base_index
;
4364 /* FIXME: Create nonlocal value too. */
4366 if (in
->op
== IPA_PARM_OP_COPY
&& out
->op
== IPA_PARM_OP_COPY
)
4367 r
.op
= IPA_PARM_OP_COPY
;
4368 else if (in
->op
== IPA_PARM_OP_COPY
)
4369 r
.offset
= out
->offset
;
4370 else if (out
->op
== IPA_PARM_OP_COPY
)
4371 r
.offset
= in
->offset
;
4373 r
.offset
= in
->offset
+ out
->offset
;
4374 adjustments
.quick_push (r
);
4377 for (i
= 0; i
< inlen
; i
++)
4379 struct ipa_parm_adjustment
*n
= &inner
[i
];
4381 if (n
->op
== IPA_PARM_OP_REMOVE
)
4382 adjustments
.quick_push (*n
);
4389 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4390 friendly way, assuming they are meant to be applied to FNDECL. */
4393 ipa_dump_param_adjustments (FILE *file
, ipa_parm_adjustment_vec adjustments
,
4396 int i
, len
= adjustments
.length ();
4398 vec
<tree
> parms
= ipa_get_vector_of_formal_parms (fndecl
);
4400 fprintf (file
, "IPA param adjustments: ");
4401 for (i
= 0; i
< len
; i
++)
4403 struct ipa_parm_adjustment
*adj
;
4404 adj
= &adjustments
[i
];
4407 fprintf (file
, " ");
4411 fprintf (file
, "%i. base_index: %i - ", i
, adj
->base_index
);
4412 print_generic_expr (file
, parms
[adj
->base_index
], 0);
4415 fprintf (file
, ", base: ");
4416 print_generic_expr (file
, adj
->base
, 0);
4420 fprintf (file
, ", new_decl: ");
4421 print_generic_expr (file
, adj
->new_decl
, 0);
4423 if (adj
->new_ssa_base
)
4425 fprintf (file
, ", new_ssa_base: ");
4426 print_generic_expr (file
, adj
->new_ssa_base
, 0);
4429 if (adj
->op
== IPA_PARM_OP_COPY
)
4430 fprintf (file
, ", copy_param");
4431 else if (adj
->op
== IPA_PARM_OP_REMOVE
)
4432 fprintf (file
, ", remove_param");
4434 fprintf (file
, ", offset %li", (long) adj
->offset
);
4436 fprintf (file
, ", by_ref");
4437 print_node_brief (file
, ", type: ", adj
->type
, 0);
4438 fprintf (file
, "\n");
4443 /* Dump the AV linked list. */
4446 ipa_dump_agg_replacement_values (FILE *f
, struct ipa_agg_replacement_value
*av
)
4449 fprintf (f
, " Aggregate replacements:");
4450 for (; av
; av
= av
->next
)
4452 fprintf (f
, "%s %i[" HOST_WIDE_INT_PRINT_DEC
"]=", comma
? "," : "",
4453 av
->index
, av
->offset
);
4454 print_generic_expr (f
, av
->value
, 0);
4460 /* Stream out jump function JUMP_FUNC to OB. */
4463 ipa_write_jump_function (struct output_block
*ob
,
4464 struct ipa_jump_func
*jump_func
)
4466 struct ipa_agg_jf_item
*item
;
4467 struct bitpack_d bp
;
4470 streamer_write_uhwi (ob
, jump_func
->type
);
4471 switch (jump_func
->type
)
4473 case IPA_JF_UNKNOWN
:
4477 EXPR_LOCATION (jump_func
->value
.constant
.value
) == UNKNOWN_LOCATION
);
4478 stream_write_tree (ob
, jump_func
->value
.constant
.value
, true);
4480 case IPA_JF_PASS_THROUGH
:
4481 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.operation
);
4482 if (jump_func
->value
.pass_through
.operation
== NOP_EXPR
)
4484 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4485 bp
= bitpack_create (ob
->main_stream
);
4486 bp_pack_value (&bp
, jump_func
->value
.pass_through
.agg_preserved
, 1);
4487 streamer_write_bitpack (&bp
);
4491 stream_write_tree (ob
, jump_func
->value
.pass_through
.operand
, true);
4492 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4495 case IPA_JF_ANCESTOR
:
4496 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.offset
);
4497 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.formal_id
);
4498 bp
= bitpack_create (ob
->main_stream
);
4499 bp_pack_value (&bp
, jump_func
->value
.ancestor
.agg_preserved
, 1);
4500 streamer_write_bitpack (&bp
);
4504 count
= vec_safe_length (jump_func
->agg
.items
);
4505 streamer_write_uhwi (ob
, count
);
4508 bp
= bitpack_create (ob
->main_stream
);
4509 bp_pack_value (&bp
, jump_func
->agg
.by_ref
, 1);
4510 streamer_write_bitpack (&bp
);
4513 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, i
, item
)
4515 streamer_write_uhwi (ob
, item
->offset
);
4516 stream_write_tree (ob
, item
->value
, true);
4519 bp
= bitpack_create (ob
->main_stream
);
4520 bp_pack_value (&bp
, jump_func
->alignment
.known
, 1);
4521 streamer_write_bitpack (&bp
);
4522 if (jump_func
->alignment
.known
)
4524 streamer_write_uhwi (ob
, jump_func
->alignment
.align
);
4525 streamer_write_uhwi (ob
, jump_func
->alignment
.misalign
);
4529 /* Read in jump function JUMP_FUNC from IB. */
4532 ipa_read_jump_function (struct lto_input_block
*ib
,
4533 struct ipa_jump_func
*jump_func
,
4534 struct cgraph_edge
*cs
,
4535 struct data_in
*data_in
)
4537 enum jump_func_type jftype
;
4538 enum tree_code operation
;
4541 jftype
= (enum jump_func_type
) streamer_read_uhwi (ib
);
4544 case IPA_JF_UNKNOWN
:
4545 ipa_set_jf_unknown (jump_func
);
4548 ipa_set_jf_constant (jump_func
, stream_read_tree (ib
, data_in
), cs
);
4550 case IPA_JF_PASS_THROUGH
:
4551 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
4552 if (operation
== NOP_EXPR
)
4554 int formal_id
= streamer_read_uhwi (ib
);
4555 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4556 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4557 ipa_set_jf_simple_pass_through (jump_func
, formal_id
, agg_preserved
);
4561 tree operand
= stream_read_tree (ib
, data_in
);
4562 int formal_id
= streamer_read_uhwi (ib
);
4563 ipa_set_jf_arith_pass_through (jump_func
, formal_id
, operand
,
4567 case IPA_JF_ANCESTOR
:
4569 HOST_WIDE_INT offset
= streamer_read_uhwi (ib
);
4570 int formal_id
= streamer_read_uhwi (ib
);
4571 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4572 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4573 ipa_set_ancestor_jf (jump_func
, offset
, formal_id
, agg_preserved
);
4578 count
= streamer_read_uhwi (ib
);
4579 vec_alloc (jump_func
->agg
.items
, count
);
4582 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4583 jump_func
->agg
.by_ref
= bp_unpack_value (&bp
, 1);
4585 for (i
= 0; i
< count
; i
++)
4587 struct ipa_agg_jf_item item
;
4588 item
.offset
= streamer_read_uhwi (ib
);
4589 item
.value
= stream_read_tree (ib
, data_in
);
4590 jump_func
->agg
.items
->quick_push (item
);
4593 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4594 bool alignment_known
= bp_unpack_value (&bp
, 1);
4595 if (alignment_known
)
4597 jump_func
->alignment
.known
= true;
4598 jump_func
->alignment
.align
= streamer_read_uhwi (ib
);
4599 jump_func
->alignment
.misalign
= streamer_read_uhwi (ib
);
4602 jump_func
->alignment
.known
= false;
4605 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4606 relevant to indirect inlining to OB. */
4609 ipa_write_indirect_edge_info (struct output_block
*ob
,
4610 struct cgraph_edge
*cs
)
4612 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4613 struct bitpack_d bp
;
4615 streamer_write_hwi (ob
, ii
->param_index
);
4616 bp
= bitpack_create (ob
->main_stream
);
4617 bp_pack_value (&bp
, ii
->polymorphic
, 1);
4618 bp_pack_value (&bp
, ii
->agg_contents
, 1);
4619 bp_pack_value (&bp
, ii
->member_ptr
, 1);
4620 bp_pack_value (&bp
, ii
->by_ref
, 1);
4621 bp_pack_value (&bp
, ii
->vptr_changed
, 1);
4622 streamer_write_bitpack (&bp
);
4623 if (ii
->agg_contents
|| ii
->polymorphic
)
4624 streamer_write_hwi (ob
, ii
->offset
);
4626 gcc_assert (ii
->offset
== 0);
4628 if (ii
->polymorphic
)
4630 streamer_write_hwi (ob
, ii
->otr_token
);
4631 stream_write_tree (ob
, ii
->otr_type
, true);
4632 ii
->context
.stream_out (ob
);
4636 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4637 relevant to indirect inlining from IB. */
4640 ipa_read_indirect_edge_info (struct lto_input_block
*ib
,
4641 struct data_in
*data_in
,
4642 struct cgraph_edge
*cs
)
4644 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4645 struct bitpack_d bp
;
4647 ii
->param_index
= (int) streamer_read_hwi (ib
);
4648 bp
= streamer_read_bitpack (ib
);
4649 ii
->polymorphic
= bp_unpack_value (&bp
, 1);
4650 ii
->agg_contents
= bp_unpack_value (&bp
, 1);
4651 ii
->member_ptr
= bp_unpack_value (&bp
, 1);
4652 ii
->by_ref
= bp_unpack_value (&bp
, 1);
4653 ii
->vptr_changed
= bp_unpack_value (&bp
, 1);
4654 if (ii
->agg_contents
|| ii
->polymorphic
)
4655 ii
->offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4658 if (ii
->polymorphic
)
4660 ii
->otr_token
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4661 ii
->otr_type
= stream_read_tree (ib
, data_in
);
4662 ii
->context
.stream_in (ib
, data_in
);
4666 /* Stream out NODE info to OB. */
4669 ipa_write_node_info (struct output_block
*ob
, struct cgraph_node
*node
)
4672 lto_symtab_encoder_t encoder
;
4673 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
4675 struct cgraph_edge
*e
;
4676 struct bitpack_d bp
;
4678 encoder
= ob
->decl_state
->symtab_node_encoder
;
4679 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4680 streamer_write_uhwi (ob
, node_ref
);
4682 streamer_write_uhwi (ob
, ipa_get_param_count (info
));
4683 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4684 streamer_write_uhwi (ob
, ipa_get_param_move_cost (info
, j
));
4685 bp
= bitpack_create (ob
->main_stream
);
4686 gcc_assert (info
->analysis_done
4687 || ipa_get_param_count (info
) == 0);
4688 gcc_assert (!info
->node_enqueued
);
4689 gcc_assert (!info
->ipcp_orig_node
);
4690 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4691 bp_pack_value (&bp
, ipa_is_param_used (info
, j
), 1);
4692 streamer_write_bitpack (&bp
);
4693 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4694 streamer_write_hwi (ob
, ipa_get_controlled_uses (info
, j
));
4695 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4697 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4699 streamer_write_uhwi (ob
,
4700 ipa_get_cs_argument_count (args
) * 2
4701 + (args
->polymorphic_call_contexts
!= NULL
));
4702 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4704 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4705 if (args
->polymorphic_call_contexts
!= NULL
)
4706 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
4709 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4711 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4713 streamer_write_uhwi (ob
,
4714 ipa_get_cs_argument_count (args
) * 2
4715 + (args
->polymorphic_call_contexts
!= NULL
));
4716 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4718 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4719 if (args
->polymorphic_call_contexts
!= NULL
)
4720 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
4722 ipa_write_indirect_edge_info (ob
, e
);
4726 /* Stream in NODE info from IB. */
4729 ipa_read_node_info (struct lto_input_block
*ib
, struct cgraph_node
*node
,
4730 struct data_in
*data_in
)
4732 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
4734 struct cgraph_edge
*e
;
4735 struct bitpack_d bp
;
4737 ipa_alloc_node_params (node
, streamer_read_uhwi (ib
));
4739 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4740 info
->descriptors
[k
].move_cost
= streamer_read_uhwi (ib
);
4742 bp
= streamer_read_bitpack (ib
);
4743 if (ipa_get_param_count (info
) != 0)
4744 info
->analysis_done
= true;
4745 info
->node_enqueued
= false;
4746 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4747 ipa_set_param_used (info
, k
, bp_unpack_value (&bp
, 1));
4748 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4749 ipa_set_controlled_uses (info
, k
, streamer_read_hwi (ib
));
4750 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4752 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4753 int count
= streamer_read_uhwi (ib
);
4754 bool contexts_computed
= count
& 1;
4759 vec_safe_grow_cleared (args
->jump_functions
, count
);
4760 if (contexts_computed
)
4761 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
);
4763 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
4765 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4767 if (contexts_computed
)
4768 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in (ib
, data_in
);
4771 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4773 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4774 int count
= streamer_read_uhwi (ib
);
4775 bool contexts_computed
= count
& 1;
4780 vec_safe_grow_cleared (args
->jump_functions
, count
);
4781 if (contexts_computed
)
4782 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
);
4783 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
4785 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4787 if (contexts_computed
)
4788 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in (ib
, data_in
);
4791 ipa_read_indirect_edge_info (ib
, data_in
, e
);
4795 /* Write jump functions for nodes in SET. */
4798 ipa_prop_write_jump_functions (void)
4800 struct cgraph_node
*node
;
4801 struct output_block
*ob
;
4802 unsigned int count
= 0;
4803 lto_symtab_encoder_iterator lsei
;
4804 lto_symtab_encoder_t encoder
;
4806 if (!ipa_node_params_sum
)
4809 ob
= create_output_block (LTO_section_jump_functions
);
4810 encoder
= ob
->decl_state
->symtab_node_encoder
;
4812 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4813 lsei_next_function_in_partition (&lsei
))
4815 node
= lsei_cgraph_node (lsei
);
4816 if (node
->has_gimple_body_p ()
4817 && IPA_NODE_REF (node
) != NULL
)
4821 streamer_write_uhwi (ob
, count
);
4823 /* Process all of the functions. */
4824 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4825 lsei_next_function_in_partition (&lsei
))
4827 node
= lsei_cgraph_node (lsei
);
4828 if (node
->has_gimple_body_p ()
4829 && IPA_NODE_REF (node
) != NULL
)
4830 ipa_write_node_info (ob
, node
);
4832 streamer_write_char_stream (ob
->main_stream
, 0);
4833 produce_asm (ob
, NULL
);
4834 destroy_output_block (ob
);
4837 /* Read section in file FILE_DATA of length LEN with data DATA. */
4840 ipa_prop_read_section (struct lto_file_decl_data
*file_data
, const char *data
,
4843 const struct lto_function_header
*header
=
4844 (const struct lto_function_header
*) data
;
4845 const int cfg_offset
= sizeof (struct lto_function_header
);
4846 const int main_offset
= cfg_offset
+ header
->cfg_size
;
4847 const int string_offset
= main_offset
+ header
->main_size
;
4848 struct data_in
*data_in
;
4852 lto_input_block
ib_main ((const char *) data
+ main_offset
,
4853 header
->main_size
, file_data
->mode_table
);
4856 lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
4857 header
->string_size
, vNULL
);
4858 count
= streamer_read_uhwi (&ib_main
);
4860 for (i
= 0; i
< count
; i
++)
4863 struct cgraph_node
*node
;
4864 lto_symtab_encoder_t encoder
;
4866 index
= streamer_read_uhwi (&ib_main
);
4867 encoder
= file_data
->symtab_node_encoder
;
4868 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
4870 gcc_assert (node
->definition
);
4871 ipa_read_node_info (&ib_main
, node
, data_in
);
4873 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
4875 lto_data_in_delete (data_in
);
4878 /* Read ipcp jump functions. */
4881 ipa_prop_read_jump_functions (void)
4883 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
4884 struct lto_file_decl_data
*file_data
;
4887 ipa_check_create_node_params ();
4888 ipa_check_create_edge_args ();
4889 ipa_register_cgraph_hooks ();
4891 while ((file_data
= file_data_vec
[j
++]))
4894 const char *data
= lto_get_section_data (file_data
, LTO_section_jump_functions
, NULL
, &len
);
4897 ipa_prop_read_section (file_data
, data
, len
);
4901 /* After merging units, we can get mismatch in argument counts.
4902 Also decl merging might've rendered parameter lists obsolete.
4903 Also compute called_with_variable_arg info. */
4906 ipa_update_after_lto_read (void)
4908 ipa_check_create_node_params ();
4909 ipa_check_create_edge_args ();
4913 write_ipcp_transformation_info (output_block
*ob
, cgraph_node
*node
)
4916 unsigned int count
= 0;
4917 lto_symtab_encoder_t encoder
;
4918 struct ipa_agg_replacement_value
*aggvals
, *av
;
4920 aggvals
= ipa_get_agg_replacements_for_node (node
);
4921 encoder
= ob
->decl_state
->symtab_node_encoder
;
4922 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4923 streamer_write_uhwi (ob
, node_ref
);
4925 for (av
= aggvals
; av
; av
= av
->next
)
4927 streamer_write_uhwi (ob
, count
);
4929 for (av
= aggvals
; av
; av
= av
->next
)
4931 struct bitpack_d bp
;
4933 streamer_write_uhwi (ob
, av
->offset
);
4934 streamer_write_uhwi (ob
, av
->index
);
4935 stream_write_tree (ob
, av
->value
, true);
4937 bp
= bitpack_create (ob
->main_stream
);
4938 bp_pack_value (&bp
, av
->by_ref
, 1);
4939 streamer_write_bitpack (&bp
);
4942 ipcp_transformation_summary
*ts
= ipcp_get_transformation_summary (node
);
4943 if (ts
&& vec_safe_length (ts
->alignments
) > 0)
4945 count
= ts
->alignments
->length ();
4947 streamer_write_uhwi (ob
, count
);
4948 for (unsigned i
= 0; i
< count
; ++i
)
4950 ipa_alignment
*parm_al
= &(*ts
->alignments
)[i
];
4952 struct bitpack_d bp
;
4953 bp
= bitpack_create (ob
->main_stream
);
4954 bp_pack_value (&bp
, parm_al
->known
, 1);
4955 streamer_write_bitpack (&bp
);
4958 streamer_write_uhwi (ob
, parm_al
->align
);
4959 streamer_write_hwi_in_range (ob
->main_stream
, 0, parm_al
->align
,
4965 streamer_write_uhwi (ob
, 0);
4968 /* Stream in the aggregate value replacement chain for NODE from IB. */
4971 read_ipcp_transformation_info (lto_input_block
*ib
, cgraph_node
*node
,
4974 struct ipa_agg_replacement_value
*aggvals
= NULL
;
4975 unsigned int count
, i
;
4977 count
= streamer_read_uhwi (ib
);
4978 for (i
= 0; i
<count
; i
++)
4980 struct ipa_agg_replacement_value
*av
;
4981 struct bitpack_d bp
;
4983 av
= ggc_alloc
<ipa_agg_replacement_value
> ();
4984 av
->offset
= streamer_read_uhwi (ib
);
4985 av
->index
= streamer_read_uhwi (ib
);
4986 av
->value
= stream_read_tree (ib
, data_in
);
4987 bp
= streamer_read_bitpack (ib
);
4988 av
->by_ref
= bp_unpack_value (&bp
, 1);
4992 ipa_set_node_agg_value_chain (node
, aggvals
);
4994 count
= streamer_read_uhwi (ib
);
4997 ipcp_grow_transformations_if_necessary ();
4999 ipcp_transformation_summary
*ts
= ipcp_get_transformation_summary (node
);
5000 vec_safe_grow_cleared (ts
->alignments
, count
);
5002 for (i
= 0; i
< count
; i
++)
5004 ipa_alignment
*parm_al
;
5005 parm_al
= &(*ts
->alignments
)[i
];
5006 struct bitpack_d bp
;
5007 bp
= streamer_read_bitpack (ib
);
5008 parm_al
->known
= bp_unpack_value (&bp
, 1);
5011 parm_al
->align
= streamer_read_uhwi (ib
);
5013 = streamer_read_hwi_in_range (ib
, "ipa-prop misalign",
5020 /* Write all aggregate replacement for nodes in set. */
5023 ipcp_write_transformation_summaries (void)
5025 struct cgraph_node
*node
;
5026 struct output_block
*ob
;
5027 unsigned int count
= 0;
5028 lto_symtab_encoder_iterator lsei
;
5029 lto_symtab_encoder_t encoder
;
5031 ob
= create_output_block (LTO_section_ipcp_transform
);
5032 encoder
= ob
->decl_state
->symtab_node_encoder
;
5034 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5035 lsei_next_function_in_partition (&lsei
))
5037 node
= lsei_cgraph_node (lsei
);
5038 if (node
->has_gimple_body_p ())
5042 streamer_write_uhwi (ob
, count
);
5044 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5045 lsei_next_function_in_partition (&lsei
))
5047 node
= lsei_cgraph_node (lsei
);
5048 if (node
->has_gimple_body_p ())
5049 write_ipcp_transformation_info (ob
, node
);
5051 streamer_write_char_stream (ob
->main_stream
, 0);
5052 produce_asm (ob
, NULL
);
5053 destroy_output_block (ob
);
5056 /* Read replacements section in file FILE_DATA of length LEN with data
5060 read_replacements_section (struct lto_file_decl_data
*file_data
,
5064 const struct lto_function_header
*header
=
5065 (const struct lto_function_header
*) data
;
5066 const int cfg_offset
= sizeof (struct lto_function_header
);
5067 const int main_offset
= cfg_offset
+ header
->cfg_size
;
5068 const int string_offset
= main_offset
+ header
->main_size
;
5069 struct data_in
*data_in
;
5073 lto_input_block
ib_main ((const char *) data
+ main_offset
,
5074 header
->main_size
, file_data
->mode_table
);
5076 data_in
= lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
5077 header
->string_size
, vNULL
);
5078 count
= streamer_read_uhwi (&ib_main
);
5080 for (i
= 0; i
< count
; i
++)
5083 struct cgraph_node
*node
;
5084 lto_symtab_encoder_t encoder
;
5086 index
= streamer_read_uhwi (&ib_main
);
5087 encoder
= file_data
->symtab_node_encoder
;
5088 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
5090 gcc_assert (node
->definition
);
5091 read_ipcp_transformation_info (&ib_main
, node
, data_in
);
5093 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
5095 lto_data_in_delete (data_in
);
5098 /* Read IPA-CP aggregate replacements. */
5101 ipcp_read_transformation_summaries (void)
5103 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
5104 struct lto_file_decl_data
*file_data
;
5107 while ((file_data
= file_data_vec
[j
++]))
5110 const char *data
= lto_get_section_data (file_data
,
5111 LTO_section_ipcp_transform
,
5114 read_replacements_section (file_data
, data
, len
);
5118 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5122 adjust_agg_replacement_values (struct cgraph_node
*node
,
5123 struct ipa_agg_replacement_value
*aggval
)
5125 struct ipa_agg_replacement_value
*v
;
5126 int i
, c
= 0, d
= 0, *adj
;
5128 if (!node
->clone
.combined_args_to_skip
)
5131 for (v
= aggval
; v
; v
= v
->next
)
5133 gcc_assert (v
->index
>= 0);
5139 adj
= XALLOCAVEC (int, c
);
5140 for (i
= 0; i
< c
; i
++)
5141 if (bitmap_bit_p (node
->clone
.combined_args_to_skip
, i
))
5149 for (v
= aggval
; v
; v
= v
->next
)
5150 v
->index
= adj
[v
->index
];
5153 /* Dominator walker driving the ipcp modification phase. */
5155 class ipcp_modif_dom_walker
: public dom_walker
5158 ipcp_modif_dom_walker (struct func_body_info
*fbi
,
5159 vec
<ipa_param_descriptor
> descs
,
5160 struct ipa_agg_replacement_value
*av
,
5162 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
), m_descriptors (descs
),
5163 m_aggval (av
), m_something_changed (sc
), m_cfg_changed (cc
) {}
5165 virtual void before_dom_children (basic_block
);
5168 struct func_body_info
*m_fbi
;
5169 vec
<ipa_param_descriptor
> m_descriptors
;
5170 struct ipa_agg_replacement_value
*m_aggval
;
5171 bool *m_something_changed
, *m_cfg_changed
;
5175 ipcp_modif_dom_walker::before_dom_children (basic_block bb
)
5177 gimple_stmt_iterator gsi
;
5178 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5180 struct ipa_agg_replacement_value
*v
;
5181 gimple stmt
= gsi_stmt (gsi
);
5183 HOST_WIDE_INT offset
, size
;
5187 if (!gimple_assign_load_p (stmt
))
5189 rhs
= gimple_assign_rhs1 (stmt
);
5190 if (!is_gimple_reg_type (TREE_TYPE (rhs
)))
5195 while (handled_component_p (t
))
5197 /* V_C_E can do things like convert an array of integers to one
5198 bigger integer and similar things we do not handle below. */
5199 if (TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
)
5204 t
= TREE_OPERAND (t
, 0);
5209 if (!ipa_load_from_parm_agg_1 (m_fbi
, m_descriptors
, stmt
, rhs
, &index
,
5210 &offset
, &size
, &by_ref
))
5212 for (v
= m_aggval
; v
; v
= v
->next
)
5213 if (v
->index
== index
5214 && v
->offset
== offset
)
5217 || v
->by_ref
!= by_ref
5218 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v
->value
))) != size
)
5221 gcc_checking_assert (is_gimple_ip_invariant (v
->value
));
5222 if (!useless_type_conversion_p (TREE_TYPE (rhs
), TREE_TYPE (v
->value
)))
5224 if (fold_convertible_p (TREE_TYPE (rhs
), v
->value
))
5225 val
= fold_build1 (NOP_EXPR
, TREE_TYPE (rhs
), v
->value
);
5226 else if (TYPE_SIZE (TREE_TYPE (rhs
))
5227 == TYPE_SIZE (TREE_TYPE (v
->value
)))
5228 val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (rhs
), v
->value
);
5233 fprintf (dump_file
, " const ");
5234 print_generic_expr (dump_file
, v
->value
, 0);
5235 fprintf (dump_file
, " can't be converted to type of ");
5236 print_generic_expr (dump_file
, rhs
, 0);
5237 fprintf (dump_file
, "\n");
5245 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5247 fprintf (dump_file
, "Modifying stmt:\n ");
5248 print_gimple_stmt (dump_file
, stmt
, 0, 0);
5250 gimple_assign_set_rhs_from_tree (&gsi
, val
);
5253 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5255 fprintf (dump_file
, "into:\n ");
5256 print_gimple_stmt (dump_file
, stmt
, 0, 0);
5257 fprintf (dump_file
, "\n");
5260 *m_something_changed
= true;
5261 if (maybe_clean_eh_stmt (stmt
)
5262 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
5263 *m_cfg_changed
= true;
5268 /* Update alignment of formal parameters as described in
5269 ipcp_transformation_summary. */
5272 ipcp_update_alignments (struct cgraph_node
*node
)
5274 tree fndecl
= node
->decl
;
5275 tree parm
= DECL_ARGUMENTS (fndecl
);
5276 tree next_parm
= parm
;
5277 ipcp_transformation_summary
*ts
= ipcp_get_transformation_summary (node
);
5278 if (!ts
|| vec_safe_length (ts
->alignments
) == 0)
5280 const vec
<ipa_alignment
, va_gc
> &alignments
= *ts
->alignments
;
5281 unsigned count
= alignments
.length ();
5283 for (unsigned i
= 0; i
< count
; ++i
, parm
= next_parm
)
5285 if (node
->clone
.combined_args_to_skip
5286 && bitmap_bit_p (node
->clone
.combined_args_to_skip
, i
))
5288 gcc_checking_assert (parm
);
5289 next_parm
= DECL_CHAIN (parm
);
5291 if (!alignments
[i
].known
|| !is_gimple_reg (parm
))
5293 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
), parm
);
5298 fprintf (dump_file
, " Adjusting alignment of param %u to %u, "
5299 "misalignment to %u\n", i
, alignments
[i
].align
,
5300 alignments
[i
].misalign
);
5302 struct ptr_info_def
*pi
= get_ptr_info (ddef
);
5303 gcc_checking_assert (pi
);
5305 unsigned old_misalign
;
5306 bool old_known
= get_ptr_info_alignment (pi
, &old_align
, &old_misalign
);
5309 && old_align
>= alignments
[i
].align
)
5312 fprintf (dump_file
, " But the alignment was already %u.\n",
5316 set_ptr_info_alignment (pi
, alignments
[i
].align
, alignments
[i
].misalign
);
5320 /* IPCP transformation phase doing propagation of aggregate values. */
5323 ipcp_transform_function (struct cgraph_node
*node
)
5325 vec
<ipa_param_descriptor
> descriptors
= vNULL
;
5326 struct func_body_info fbi
;
5327 struct ipa_agg_replacement_value
*aggval
;
5329 bool cfg_changed
= false, something_changed
= false;
5331 gcc_checking_assert (cfun
);
5332 gcc_checking_assert (current_function_decl
);
5335 fprintf (dump_file
, "Modification phase of node %s/%i\n",
5336 node
->name (), node
->order
);
5338 ipcp_update_alignments (node
);
5339 aggval
= ipa_get_agg_replacements_for_node (node
);
5342 param_count
= count_formal_params (node
->decl
);
5343 if (param_count
== 0)
5345 adjust_agg_replacement_values (node
, aggval
);
5347 ipa_dump_agg_replacement_values (dump_file
, aggval
);
5351 fbi
.bb_infos
= vNULL
;
5352 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
));
5353 fbi
.param_count
= param_count
;
5356 descriptors
.safe_grow_cleared (param_count
);
5357 ipa_populate_param_decls (node
, descriptors
);
5358 calculate_dominance_info (CDI_DOMINATORS
);
5359 ipcp_modif_dom_walker (&fbi
, descriptors
, aggval
, &something_changed
,
5360 &cfg_changed
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5363 struct ipa_bb_info
*bi
;
5364 FOR_EACH_VEC_ELT (fbi
.bb_infos
, i
, bi
)
5365 free_ipa_bb_info (bi
);
5366 fbi
.bb_infos
.release ();
5367 free_dominance_info (CDI_DOMINATORS
);
5368 (*ipcp_transformations
)[node
->uid
].agg_values
= NULL
;
5369 (*ipcp_transformations
)[node
->uid
].alignments
= NULL
;
5370 descriptors
.release ();
5372 if (!something_changed
)
5374 else if (cfg_changed
)
5375 return TODO_update_ssa_only_virtuals
| TODO_cleanup_cfg
;
5377 return TODO_update_ssa_only_virtuals
;