1 /* Interprocedural analyses.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
30 #include "tree-streamer.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
47 #include "tree-inline.h"
48 #include "ipa-inline.h"
49 #include "gimple-pretty-print.h"
51 #include "ipa-utils.h"
56 /* Function summary where the parameter infos are actually stored. */
57 ipa_node_params_t
*ipa_node_params_sum
= NULL
;
58 /* Vector of IPA-CP transformation data for each clone. */
59 vec
<ipcp_transformation_summary
, va_gc
> *ipcp_transformations
;
60 /* Vector where the parameter infos are actually stored. */
61 vec
<ipa_edge_args
, va_gc
> *ipa_edge_args_vector
;
63 /* Holders of ipa cgraph hooks: */
64 static struct cgraph_edge_hook_list
*edge_removal_hook_holder
;
65 static struct cgraph_2edge_hook_list
*edge_duplication_hook_holder
;
66 static struct cgraph_node_hook_list
*function_insertion_hook_holder
;
68 /* Description of a reference to an IPA constant. */
69 struct ipa_cst_ref_desc
71 /* Edge that corresponds to the statement which took the reference. */
72 struct cgraph_edge
*cs
;
73 /* Linked list of duplicates created when call graph edges are cloned. */
74 struct ipa_cst_ref_desc
*next_duplicate
;
75 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
80 /* Allocation pool for reference descriptions. */
82 static object_allocator
<ipa_cst_ref_desc
> ipa_refdesc_pool
83 ("IPA-PROP ref descriptions");
85 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
86 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
89 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node
*node
)
91 tree fs_opts
= DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node
->decl
);
95 return !opt_for_fn (node
->decl
, optimize
) || !opt_for_fn (node
->decl
, flag_ipa_cp
);
98 /* Return index of the formal whose tree is PTREE in function which corresponds
102 ipa_get_param_decl_index_1 (vec
<ipa_param_descriptor
> descriptors
, tree ptree
)
106 count
= descriptors
.length ();
107 for (i
= 0; i
< count
; i
++)
108 if (descriptors
[i
].decl
== ptree
)
114 /* Return index of the formal whose tree is PTREE in function which corresponds
118 ipa_get_param_decl_index (struct ipa_node_params
*info
, tree ptree
)
120 return ipa_get_param_decl_index_1 (info
->descriptors
, ptree
);
123 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
127 ipa_populate_param_decls (struct cgraph_node
*node
,
128 vec
<ipa_param_descriptor
> &descriptors
)
136 gcc_assert (gimple_has_body_p (fndecl
));
137 fnargs
= DECL_ARGUMENTS (fndecl
);
139 for (parm
= fnargs
; parm
; parm
= DECL_CHAIN (parm
))
141 descriptors
[param_num
].decl
= parm
;
142 descriptors
[param_num
].move_cost
= estimate_move_cost (TREE_TYPE (parm
),
148 /* Return how many formal parameters FNDECL has. */
151 count_formal_params (tree fndecl
)
155 gcc_assert (gimple_has_body_p (fndecl
));
157 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
163 /* Return the declaration of Ith formal parameter of the function corresponding
164 to INFO. Note there is no setter function as this array is built just once
165 using ipa_initialize_node_params. */
168 ipa_dump_param (FILE *file
, struct ipa_node_params
*info
, int i
)
170 fprintf (file
, "param #%i", i
);
171 if (info
->descriptors
[i
].decl
)
174 print_generic_expr (file
, info
->descriptors
[i
].decl
, 0);
178 /* Initialize the ipa_node_params structure associated with NODE
179 to hold PARAM_COUNT parameters. */
182 ipa_alloc_node_params (struct cgraph_node
*node
, int param_count
)
184 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
186 if (!info
->descriptors
.exists () && param_count
)
187 info
->descriptors
.safe_grow_cleared (param_count
);
190 /* Initialize the ipa_node_params structure associated with NODE by counting
191 the function parameters, creating the descriptors and populating their
195 ipa_initialize_node_params (struct cgraph_node
*node
)
197 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
199 if (!info
->descriptors
.exists ())
201 ipa_alloc_node_params (node
, count_formal_params (node
->decl
));
202 ipa_populate_param_decls (node
, info
->descriptors
);
206 /* Print the jump functions associated with call graph edge CS to file F. */
209 ipa_print_node_jump_functions_for_edge (FILE *f
, struct cgraph_edge
*cs
)
213 count
= ipa_get_cs_argument_count (IPA_EDGE_REF (cs
));
214 for (i
= 0; i
< count
; i
++)
216 struct ipa_jump_func
*jump_func
;
217 enum jump_func_type type
;
219 jump_func
= ipa_get_ith_jump_func (IPA_EDGE_REF (cs
), i
);
220 type
= jump_func
->type
;
222 fprintf (f
, " param %d: ", i
);
223 if (type
== IPA_JF_UNKNOWN
)
224 fprintf (f
, "UNKNOWN\n");
225 else if (type
== IPA_JF_CONST
)
227 tree val
= jump_func
->value
.constant
.value
;
228 fprintf (f
, "CONST: ");
229 print_generic_expr (f
, val
, 0);
230 if (TREE_CODE (val
) == ADDR_EXPR
231 && TREE_CODE (TREE_OPERAND (val
, 0)) == CONST_DECL
)
234 print_generic_expr (f
, DECL_INITIAL (TREE_OPERAND (val
, 0)),
239 else if (type
== IPA_JF_PASS_THROUGH
)
241 fprintf (f
, "PASS THROUGH: ");
242 fprintf (f
, "%d, op %s",
243 jump_func
->value
.pass_through
.formal_id
,
244 get_tree_code_name(jump_func
->value
.pass_through
.operation
));
245 if (jump_func
->value
.pass_through
.operation
!= NOP_EXPR
)
248 print_generic_expr (f
,
249 jump_func
->value
.pass_through
.operand
, 0);
251 if (jump_func
->value
.pass_through
.agg_preserved
)
252 fprintf (f
, ", agg_preserved");
255 else if (type
== IPA_JF_ANCESTOR
)
257 fprintf (f
, "ANCESTOR: ");
258 fprintf (f
, "%d, offset " HOST_WIDE_INT_PRINT_DEC
,
259 jump_func
->value
.ancestor
.formal_id
,
260 jump_func
->value
.ancestor
.offset
);
261 if (jump_func
->value
.ancestor
.agg_preserved
)
262 fprintf (f
, ", agg_preserved");
266 if (jump_func
->agg
.items
)
268 struct ipa_agg_jf_item
*item
;
271 fprintf (f
, " Aggregate passed by %s:\n",
272 jump_func
->agg
.by_ref
? "reference" : "value");
273 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, j
, item
)
275 fprintf (f
, " offset: " HOST_WIDE_INT_PRINT_DEC
", ",
277 if (TYPE_P (item
->value
))
278 fprintf (f
, "clobber of " HOST_WIDE_INT_PRINT_DEC
" bits",
279 tree_to_uhwi (TYPE_SIZE (item
->value
)));
282 fprintf (f
, "cst: ");
283 print_generic_expr (f
, item
->value
, 0);
289 struct ipa_polymorphic_call_context
*ctx
290 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs
), i
);
291 if (ctx
&& !ctx
->useless_p ())
293 fprintf (f
, " Context: ");
294 ctx
->dump (dump_file
);
297 if (jump_func
->alignment
.known
)
299 fprintf (f
, " Alignment: %u, misalignment: %u\n",
300 jump_func
->alignment
.align
,
301 jump_func
->alignment
.misalign
);
304 fprintf (f
, " Unknown alignment\n");
309 /* Print the jump functions of all arguments on all call graph edges going from
313 ipa_print_node_jump_functions (FILE *f
, struct cgraph_node
*node
)
315 struct cgraph_edge
*cs
;
317 fprintf (f
, " Jump functions of caller %s/%i:\n", node
->name (),
319 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
321 if (!ipa_edge_args_info_available_for_edge_p (cs
))
324 fprintf (f
, " callsite %s/%i -> %s/%i : \n",
325 xstrdup_for_dump (node
->name ()), node
->order
,
326 xstrdup_for_dump (cs
->callee
->name ()),
328 ipa_print_node_jump_functions_for_edge (f
, cs
);
331 for (cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
333 struct cgraph_indirect_call_info
*ii
;
334 if (!ipa_edge_args_info_available_for_edge_p (cs
))
337 ii
= cs
->indirect_info
;
338 if (ii
->agg_contents
)
339 fprintf (f
, " indirect %s callsite, calling param %i, "
340 "offset " HOST_WIDE_INT_PRINT_DEC
", %s",
341 ii
->member_ptr
? "member ptr" : "aggregate",
342 ii
->param_index
, ii
->offset
,
343 ii
->by_ref
? "by reference" : "by_value");
345 fprintf (f
, " indirect %s callsite, calling param %i, "
346 "offset " HOST_WIDE_INT_PRINT_DEC
,
347 ii
->polymorphic
? "polymorphic" : "simple", ii
->param_index
,
352 fprintf (f
, ", for stmt ");
353 print_gimple_stmt (f
, cs
->call_stmt
, 0, TDF_SLIM
);
358 ii
->context
.dump (f
);
359 ipa_print_node_jump_functions_for_edge (f
, cs
);
363 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
366 ipa_print_all_jump_functions (FILE *f
)
368 struct cgraph_node
*node
;
370 fprintf (f
, "\nJump functions:\n");
371 FOR_EACH_FUNCTION (node
)
373 ipa_print_node_jump_functions (f
, node
);
377 /* Set jfunc to be a know-really nothing jump function. */
380 ipa_set_jf_unknown (struct ipa_jump_func
*jfunc
)
382 jfunc
->type
= IPA_JF_UNKNOWN
;
383 jfunc
->alignment
.known
= false;
386 /* Set JFUNC to be a copy of another jmp (to be used by jump function
387 combination code). The two functions will share their rdesc. */
390 ipa_set_jf_cst_copy (struct ipa_jump_func
*dst
,
391 struct ipa_jump_func
*src
)
394 gcc_checking_assert (src
->type
== IPA_JF_CONST
);
395 dst
->type
= IPA_JF_CONST
;
396 dst
->value
.constant
= src
->value
.constant
;
399 /* Set JFUNC to be a constant jmp function. */
402 ipa_set_jf_constant (struct ipa_jump_func
*jfunc
, tree constant
,
403 struct cgraph_edge
*cs
)
405 constant
= unshare_expr (constant
);
406 if (constant
&& EXPR_P (constant
))
407 SET_EXPR_LOCATION (constant
, UNKNOWN_LOCATION
);
408 jfunc
->type
= IPA_JF_CONST
;
409 jfunc
->value
.constant
.value
= unshare_expr_without_location (constant
);
411 if (TREE_CODE (constant
) == ADDR_EXPR
412 && TREE_CODE (TREE_OPERAND (constant
, 0)) == FUNCTION_DECL
)
414 struct ipa_cst_ref_desc
*rdesc
;
416 rdesc
= ipa_refdesc_pool
.allocate ();
418 rdesc
->next_duplicate
= NULL
;
420 jfunc
->value
.constant
.rdesc
= rdesc
;
423 jfunc
->value
.constant
.rdesc
= NULL
;
426 /* Set JFUNC to be a simple pass-through jump function. */
428 ipa_set_jf_simple_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
431 jfunc
->type
= IPA_JF_PASS_THROUGH
;
432 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
433 jfunc
->value
.pass_through
.formal_id
= formal_id
;
434 jfunc
->value
.pass_through
.operation
= NOP_EXPR
;
435 jfunc
->value
.pass_through
.agg_preserved
= agg_preserved
;
438 /* Set JFUNC to be an arithmetic pass through jump function. */
441 ipa_set_jf_arith_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
442 tree operand
, enum tree_code operation
)
444 jfunc
->type
= IPA_JF_PASS_THROUGH
;
445 jfunc
->value
.pass_through
.operand
= unshare_expr_without_location (operand
);
446 jfunc
->value
.pass_through
.formal_id
= formal_id
;
447 jfunc
->value
.pass_through
.operation
= operation
;
448 jfunc
->value
.pass_through
.agg_preserved
= false;
451 /* Set JFUNC to be an ancestor jump function. */
454 ipa_set_ancestor_jf (struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
,
455 int formal_id
, bool agg_preserved
)
457 jfunc
->type
= IPA_JF_ANCESTOR
;
458 jfunc
->value
.ancestor
.formal_id
= formal_id
;
459 jfunc
->value
.ancestor
.offset
= offset
;
460 jfunc
->value
.ancestor
.agg_preserved
= agg_preserved
;
463 /* Get IPA BB information about the given BB. FBI is the context of analyzis
464 of this function body. */
466 static struct ipa_bb_info
*
467 ipa_get_bb_info (struct ipa_func_body_info
*fbi
, basic_block bb
)
469 gcc_checking_assert (fbi
);
470 return &fbi
->bb_infos
[bb
->index
];
473 /* Structure to be passed in between detect_type_change and
474 check_stmt_for_type_change. */
476 struct prop_type_change_info
478 /* Offset into the object where there is the virtual method pointer we are
480 HOST_WIDE_INT offset
;
481 /* The declaration or SSA_NAME pointer of the base that we are checking for
484 /* Set to true if dynamic type change has been detected. */
485 bool type_maybe_changed
;
488 /* Return true if STMT can modify a virtual method table pointer.
490 This function makes special assumptions about both constructors and
491 destructors which are all the functions that are allowed to alter the VMT
492 pointers. It assumes that destructors begin with assignment into all VMT
493 pointers and that constructors essentially look in the following way:
495 1) The very first thing they do is that they call constructors of ancestor
496 sub-objects that have them.
498 2) Then VMT pointers of this and all its ancestors is set to new values
499 corresponding to the type corresponding to the constructor.
501 3) Only afterwards, other stuff such as constructor of member sub-objects
502 and the code written by the user is run. Only this may include calling
503 virtual functions, directly or indirectly.
505 There is no way to call a constructor of an ancestor sub-object in any
508 This means that we do not have to care whether constructors get the correct
509 type information because they will always change it (in fact, if we define
510 the type to be given by the VMT pointer, it is undefined).
512 The most important fact to derive from the above is that if, for some
513 statement in the section 3, we try to detect whether the dynamic type has
514 changed, we can safely ignore all calls as we examine the function body
515 backwards until we reach statements in section 2 because these calls cannot
516 be ancestor constructors or destructors (if the input is not bogus) and so
517 do not change the dynamic type (this holds true only for automatically
518 allocated objects but at the moment we devirtualize only these). We then
519 must detect that statements in section 2 change the dynamic type and can try
520 to derive the new type. That is enough and we can stop, we will never see
521 the calls into constructors of sub-objects in this code. Therefore we can
522 safely ignore all call statements that we traverse.
526 stmt_may_be_vtbl_ptr_store (gimple
*stmt
)
528 if (is_gimple_call (stmt
))
530 if (gimple_clobber_p (stmt
))
532 else if (is_gimple_assign (stmt
))
534 tree lhs
= gimple_assign_lhs (stmt
);
536 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs
)))
538 if (flag_strict_aliasing
539 && !POINTER_TYPE_P (TREE_TYPE (lhs
)))
542 if (TREE_CODE (lhs
) == COMPONENT_REF
543 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
545 /* In the future we might want to use get_base_ref_and_offset to find
546 if there is a field corresponding to the offset and if so, proceed
547 almost like if it was a component ref. */
553 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
554 to check whether a particular statement may modify the virtual table
555 pointerIt stores its result into DATA, which points to a
556 prop_type_change_info structure. */
559 check_stmt_for_type_change (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef
, void *data
)
561 gimple
*stmt
= SSA_NAME_DEF_STMT (vdef
);
562 struct prop_type_change_info
*tci
= (struct prop_type_change_info
*) data
;
564 if (stmt_may_be_vtbl_ptr_store (stmt
))
566 tci
->type_maybe_changed
= true;
573 /* See if ARG is PARAM_DECl describing instance passed by pointer
574 or reference in FUNCTION. Return false if the dynamic type may change
575 in between beggining of the function until CALL is invoked.
577 Generally functions are not allowed to change type of such instances,
578 but they call destructors. We assume that methods can not destroy the THIS
579 pointer. Also as a special cases, constructor and destructors may change
580 type of the THIS pointer. */
583 param_type_may_change_p (tree function
, tree arg
, gimple
*call
)
585 /* Pure functions can not do any changes on the dynamic type;
586 that require writting to memory. */
587 if (flags_from_decl_or_type (function
) & (ECF_PURE
| ECF_CONST
))
589 /* We need to check if we are within inlined consturctor
590 or destructor (ideally we would have way to check that the
591 inline cdtor is actually working on ARG, but we don't have
592 easy tie on this, so punt on all non-pure cdtors.
593 We may also record the types of cdtors and once we know type
594 of the instance match them.
596 Also code unification optimizations may merge calls from
597 different blocks making return values unreliable. So
598 do nothing during late optimization. */
599 if (DECL_STRUCT_FUNCTION (function
)->after_inlining
)
601 if (TREE_CODE (arg
) == SSA_NAME
602 && SSA_NAME_IS_DEFAULT_DEF (arg
)
603 && TREE_CODE (SSA_NAME_VAR (arg
)) == PARM_DECL
)
605 /* Normal (non-THIS) argument. */
606 if ((SSA_NAME_VAR (arg
) != DECL_ARGUMENTS (function
)
607 || TREE_CODE (TREE_TYPE (function
)) != METHOD_TYPE
)
608 /* THIS pointer of an method - here we want to watch constructors
609 and destructors as those definitely may change the dynamic
611 || (TREE_CODE (TREE_TYPE (function
)) == METHOD_TYPE
612 && !DECL_CXX_CONSTRUCTOR_P (function
)
613 && !DECL_CXX_DESTRUCTOR_P (function
)
614 && (SSA_NAME_VAR (arg
) == DECL_ARGUMENTS (function
))))
616 /* Walk the inline stack and watch out for ctors/dtors. */
617 for (tree block
= gimple_block (call
); block
&& TREE_CODE (block
) == BLOCK
;
618 block
= BLOCK_SUPERCONTEXT (block
))
619 if (inlined_polymorphic_ctor_dtor_block_p (block
, false))
627 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
628 callsite CALL) by looking for assignments to its virtual table pointer. If
629 it is, return true and fill in the jump function JFUNC with relevant type
630 information or set it to unknown. ARG is the object itself (not a pointer
631 to it, unless dereferenced). BASE is the base of the memory access as
632 returned by get_ref_base_and_extent, as is the offset.
634 This is helper function for detect_type_change and detect_type_change_ssa
635 that does the heavy work which is usually unnecesary. */
638 detect_type_change_from_memory_writes (tree arg
, tree base
, tree comp_type
,
639 gcall
*call
, struct ipa_jump_func
*jfunc
,
640 HOST_WIDE_INT offset
)
642 struct prop_type_change_info tci
;
644 bool entry_reached
= false;
646 gcc_checking_assert (DECL_P (arg
)
647 || TREE_CODE (arg
) == MEM_REF
648 || handled_component_p (arg
));
650 comp_type
= TYPE_MAIN_VARIANT (comp_type
);
652 /* Const calls cannot call virtual methods through VMT and so type changes do
654 if (!flag_devirtualize
|| !gimple_vuse (call
)
655 /* Be sure expected_type is polymorphic. */
657 || TREE_CODE (comp_type
) != RECORD_TYPE
658 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))
659 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))))
662 ao_ref_init (&ao
, arg
);
665 ao
.size
= POINTER_SIZE
;
666 ao
.max_size
= ao
.size
;
669 tci
.object
= get_base_address (arg
);
670 tci
.type_maybe_changed
= false;
672 walk_aliased_vdefs (&ao
, gimple_vuse (call
), check_stmt_for_type_change
,
673 &tci
, NULL
, &entry_reached
);
674 if (!tci
.type_maybe_changed
)
677 ipa_set_jf_unknown (jfunc
);
681 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
682 If it is, return true and fill in the jump function JFUNC with relevant type
683 information or set it to unknown. ARG is the object itself (not a pointer
684 to it, unless dereferenced). BASE is the base of the memory access as
685 returned by get_ref_base_and_extent, as is the offset. */
688 detect_type_change (tree arg
, tree base
, tree comp_type
, gcall
*call
,
689 struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
)
691 if (!flag_devirtualize
)
694 if (TREE_CODE (base
) == MEM_REF
695 && !param_type_may_change_p (current_function_decl
,
696 TREE_OPERAND (base
, 0),
699 return detect_type_change_from_memory_writes (arg
, base
, comp_type
,
700 call
, jfunc
, offset
);
703 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
704 SSA name (its dereference will become the base and the offset is assumed to
708 detect_type_change_ssa (tree arg
, tree comp_type
,
709 gcall
*call
, struct ipa_jump_func
*jfunc
)
711 gcc_checking_assert (TREE_CODE (arg
) == SSA_NAME
);
712 if (!flag_devirtualize
713 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
716 if (!param_type_may_change_p (current_function_decl
, arg
, call
))
719 arg
= build2 (MEM_REF
, ptr_type_node
, arg
,
720 build_int_cst (ptr_type_node
, 0));
722 return detect_type_change_from_memory_writes (arg
, arg
, comp_type
,
726 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
727 boolean variable pointed to by DATA. */
730 mark_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
733 bool *b
= (bool *) data
;
738 /* Return true if we have already walked so many statements in AA that we
739 should really just start giving up. */
742 aa_overwalked (struct ipa_func_body_info
*fbi
)
744 gcc_checking_assert (fbi
);
745 return fbi
->aa_walked
> (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS
);
748 /* Find the nearest valid aa status for parameter specified by INDEX that
751 static struct ipa_param_aa_status
*
752 find_dominating_aa_status (struct ipa_func_body_info
*fbi
, basic_block bb
,
757 bb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
760 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
761 if (!bi
->param_aa_statuses
.is_empty ()
762 && bi
->param_aa_statuses
[index
].valid
)
763 return &bi
->param_aa_statuses
[index
];
767 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
768 structures and/or intialize the result with a dominating description as
771 static struct ipa_param_aa_status
*
772 parm_bb_aa_status_for_bb (struct ipa_func_body_info
*fbi
, basic_block bb
,
775 gcc_checking_assert (fbi
);
776 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
777 if (bi
->param_aa_statuses
.is_empty ())
778 bi
->param_aa_statuses
.safe_grow_cleared (fbi
->param_count
);
779 struct ipa_param_aa_status
*paa
= &bi
->param_aa_statuses
[index
];
782 gcc_checking_assert (!paa
->parm_modified
783 && !paa
->ref_modified
784 && !paa
->pt_modified
);
785 struct ipa_param_aa_status
*dom_paa
;
786 dom_paa
= find_dominating_aa_status (fbi
, bb
, index
);
796 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
797 a value known not to be modified in this function before reaching the
798 statement STMT. FBI holds information about the function we have so far
799 gathered but do not survive the summary building stage. */
802 parm_preserved_before_stmt_p (struct ipa_func_body_info
*fbi
, int index
,
803 gimple
*stmt
, tree parm_load
)
805 struct ipa_param_aa_status
*paa
;
806 bool modified
= false;
809 /* FIXME: FBI can be NULL if we are being called from outside
810 ipa_node_analysis or ipcp_transform_function, which currently happens
811 during inlining analysis. It would be great to extend fbi's lifetime and
812 always have it. Currently, we are just not afraid of too much walking in
816 if (aa_overwalked (fbi
))
818 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
819 if (paa
->parm_modified
)
825 gcc_checking_assert (gimple_vuse (stmt
) != NULL_TREE
);
826 ao_ref_init (&refd
, parm_load
);
827 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
830 fbi
->aa_walked
+= walked
;
832 paa
->parm_modified
= true;
836 /* If STMT is an assignment that loads a value from an parameter declaration,
837 return the index of the parameter in ipa_node_params which has not been
838 modified. Otherwise return -1. */
841 load_from_unmodified_param (struct ipa_func_body_info
*fbi
,
842 vec
<ipa_param_descriptor
> descriptors
,
848 if (!gimple_assign_single_p (stmt
))
851 op1
= gimple_assign_rhs1 (stmt
);
852 if (TREE_CODE (op1
) != PARM_DECL
)
855 index
= ipa_get_param_decl_index_1 (descriptors
, op1
);
857 || !parm_preserved_before_stmt_p (fbi
, index
, stmt
, op1
))
863 /* Return true if memory reference REF (which must be a load through parameter
864 with INDEX) loads data that are known to be unmodified in this function
865 before reaching statement STMT. */
868 parm_ref_data_preserved_p (struct ipa_func_body_info
*fbi
,
869 int index
, gimple
*stmt
, tree ref
)
871 struct ipa_param_aa_status
*paa
;
872 bool modified
= false;
875 /* FIXME: FBI can be NULL if we are being called from outside
876 ipa_node_analysis or ipcp_transform_function, which currently happens
877 during inlining analysis. It would be great to extend fbi's lifetime and
878 always have it. Currently, we are just not afraid of too much walking in
882 if (aa_overwalked (fbi
))
884 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
885 if (paa
->ref_modified
)
891 gcc_checking_assert (gimple_vuse (stmt
));
892 ao_ref_init (&refd
, ref
);
893 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
896 fbi
->aa_walked
+= walked
;
898 paa
->ref_modified
= true;
902 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
903 is known to be unmodified in this function before reaching call statement
904 CALL into which it is passed. FBI describes the function body. */
907 parm_ref_data_pass_through_p (struct ipa_func_body_info
*fbi
, int index
,
908 gimple
*call
, tree parm
)
910 bool modified
= false;
913 /* It's unnecessary to calculate anything about memory contnets for a const
914 function because it is not goin to use it. But do not cache the result
915 either. Also, no such calculations for non-pointers. */
916 if (!gimple_vuse (call
)
917 || !POINTER_TYPE_P (TREE_TYPE (parm
))
918 || aa_overwalked (fbi
))
921 struct ipa_param_aa_status
*paa
= parm_bb_aa_status_for_bb (fbi
,
924 if (paa
->pt_modified
)
927 ao_ref_init_from_ptr_and_size (&refd
, parm
, NULL_TREE
);
928 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (call
), mark_modified
,
930 fbi
->aa_walked
+= walked
;
932 paa
->pt_modified
= true;
936 /* Return true if we can prove that OP is a memory reference loading unmodified
937 data from an aggregate passed as a parameter and if the aggregate is passed
938 by reference, that the alias type of the load corresponds to the type of the
939 formal parameter (so that we can rely on this type for TBAA in callers).
940 INFO and PARMS_AINFO describe parameters of the current function (but the
941 latter can be NULL), STMT is the load statement. If function returns true,
942 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
943 within the aggregate and whether it is a load from a value passed by
944 reference respectively. */
947 ipa_load_from_parm_agg (struct ipa_func_body_info
*fbi
,
948 vec
<ipa_param_descriptor
> descriptors
,
949 gimple
*stmt
, tree op
, int *index_p
,
950 HOST_WIDE_INT
*offset_p
, HOST_WIDE_INT
*size_p
,
954 HOST_WIDE_INT size
, max_size
;
957 = get_ref_base_and_extent (op
, offset_p
, &size
, &max_size
, &reverse
);
959 if (max_size
== -1 || max_size
!= size
|| *offset_p
< 0)
964 int index
= ipa_get_param_decl_index_1 (descriptors
, base
);
966 && parm_preserved_before_stmt_p (fbi
, index
, stmt
, op
))
977 if (TREE_CODE (base
) != MEM_REF
978 || TREE_CODE (TREE_OPERAND (base
, 0)) != SSA_NAME
979 || !integer_zerop (TREE_OPERAND (base
, 1)))
982 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base
, 0)))
984 tree parm
= SSA_NAME_VAR (TREE_OPERAND (base
, 0));
985 index
= ipa_get_param_decl_index_1 (descriptors
, parm
);
989 /* This branch catches situations where a pointer parameter is not a
990 gimple register, for example:
992 void hip7(S*) (struct S * p)
994 void (*<T2e4>) (struct S *) D.1867;
1004 gimple
*def
= SSA_NAME_DEF_STMT (TREE_OPERAND (base
, 0));
1005 index
= load_from_unmodified_param (fbi
, descriptors
, def
);
1009 && parm_ref_data_preserved_p (fbi
, index
, stmt
, op
))
1020 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1021 of an assignment statement STMT, try to determine whether we are actually
1022 handling any of the following cases and construct an appropriate jump
1023 function into JFUNC if so:
1025 1) The passed value is loaded from a formal parameter which is not a gimple
1026 register (most probably because it is addressable, the value has to be
1027 scalar) and we can guarantee the value has not changed. This case can
1028 therefore be described by a simple pass-through jump function. For example:
1037 2) The passed value can be described by a simple arithmetic pass-through
1044 D.2064_4 = a.1(D) + 4;
1047 This case can also occur in combination of the previous one, e.g.:
1055 D.2064_4 = a.0_3 + 4;
1058 3) The passed value is an address of an object within another one (which
1059 also passed by reference). Such situations are described by an ancestor
1060 jump function and describe situations such as:
1062 B::foo() (struct B * const this)
1066 D.1845_2 = &this_1(D)->D.1748;
1069 INFO is the structure describing individual parameters access different
1070 stages of IPA optimizations. PARMS_AINFO contains the information that is
1071 only needed for intraprocedural analysis. */
1074 compute_complex_assign_jump_func (struct ipa_func_body_info
*fbi
,
1075 struct ipa_node_params
*info
,
1076 struct ipa_jump_func
*jfunc
,
1077 gcall
*call
, gimple
*stmt
, tree name
,
1080 HOST_WIDE_INT offset
, size
, max_size
;
1081 tree op1
, tc_ssa
, base
, ssa
;
1085 op1
= gimple_assign_rhs1 (stmt
);
1087 if (TREE_CODE (op1
) == SSA_NAME
)
1089 if (SSA_NAME_IS_DEFAULT_DEF (op1
))
1090 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (op1
));
1092 index
= load_from_unmodified_param (fbi
, info
->descriptors
,
1093 SSA_NAME_DEF_STMT (op1
));
1098 index
= load_from_unmodified_param (fbi
, info
->descriptors
, stmt
);
1099 tc_ssa
= gimple_assign_lhs (stmt
);
1104 tree op2
= gimple_assign_rhs2 (stmt
);
1108 if (!is_gimple_ip_invariant (op2
)
1109 || (TREE_CODE_CLASS (gimple_expr_code (stmt
)) != tcc_comparison
1110 && !useless_type_conversion_p (TREE_TYPE (name
),
1114 ipa_set_jf_arith_pass_through (jfunc
, index
, op2
,
1115 gimple_assign_rhs_code (stmt
));
1117 else if (gimple_assign_single_p (stmt
))
1119 bool agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
, tc_ssa
);
1120 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1125 if (TREE_CODE (op1
) != ADDR_EXPR
)
1127 op1
= TREE_OPERAND (op1
, 0);
1128 if (TREE_CODE (TREE_TYPE (op1
)) != RECORD_TYPE
)
1130 base
= get_ref_base_and_extent (op1
, &offset
, &size
, &max_size
, &reverse
);
1131 if (TREE_CODE (base
) != MEM_REF
1132 /* If this is a varying address, punt. */
1134 || max_size
!= size
)
1136 offset
+= mem_ref_offset (base
).to_short_addr () * BITS_PER_UNIT
;
1137 ssa
= TREE_OPERAND (base
, 0);
1138 if (TREE_CODE (ssa
) != SSA_NAME
1139 || !SSA_NAME_IS_DEFAULT_DEF (ssa
)
1143 /* Dynamic types are changed in constructors and destructors. */
1144 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (ssa
));
1145 if (index
>= 0 && param_type
&& POINTER_TYPE_P (param_type
))
1146 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1147 parm_ref_data_pass_through_p (fbi
, index
, call
, ssa
));
1150 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1153 iftmp.1_3 = &obj_2(D)->D.1762;
1155 The base of the MEM_REF must be a default definition SSA NAME of a
1156 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1157 whole MEM_REF expression is returned and the offset calculated from any
1158 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1159 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1162 get_ancestor_addr_info (gimple
*assign
, tree
*obj_p
, HOST_WIDE_INT
*offset
)
1164 HOST_WIDE_INT size
, max_size
;
1165 tree expr
, parm
, obj
;
1168 if (!gimple_assign_single_p (assign
))
1170 expr
= gimple_assign_rhs1 (assign
);
1172 if (TREE_CODE (expr
) != ADDR_EXPR
)
1174 expr
= TREE_OPERAND (expr
, 0);
1176 expr
= get_ref_base_and_extent (expr
, offset
, &size
, &max_size
, &reverse
);
1178 if (TREE_CODE (expr
) != MEM_REF
1179 /* If this is a varying address, punt. */
1184 parm
= TREE_OPERAND (expr
, 0);
1185 if (TREE_CODE (parm
) != SSA_NAME
1186 || !SSA_NAME_IS_DEFAULT_DEF (parm
)
1187 || TREE_CODE (SSA_NAME_VAR (parm
)) != PARM_DECL
)
1190 *offset
+= mem_ref_offset (expr
).to_short_addr () * BITS_PER_UNIT
;
1196 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1197 statement PHI, try to find out whether NAME is in fact a
1198 multiple-inheritance typecast from a descendant into an ancestor of a formal
1199 parameter and thus can be described by an ancestor jump function and if so,
1200 write the appropriate function into JFUNC.
1202 Essentially we want to match the following pattern:
1210 iftmp.1_3 = &obj_2(D)->D.1762;
1213 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1214 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1218 compute_complex_ancestor_jump_func (struct ipa_func_body_info
*fbi
,
1219 struct ipa_node_params
*info
,
1220 struct ipa_jump_func
*jfunc
,
1221 gcall
*call
, gphi
*phi
)
1223 HOST_WIDE_INT offset
;
1224 gimple
*assign
, *cond
;
1225 basic_block phi_bb
, assign_bb
, cond_bb
;
1226 tree tmp
, parm
, expr
, obj
;
1229 if (gimple_phi_num_args (phi
) != 2)
1232 if (integer_zerop (PHI_ARG_DEF (phi
, 1)))
1233 tmp
= PHI_ARG_DEF (phi
, 0);
1234 else if (integer_zerop (PHI_ARG_DEF (phi
, 0)))
1235 tmp
= PHI_ARG_DEF (phi
, 1);
1238 if (TREE_CODE (tmp
) != SSA_NAME
1239 || SSA_NAME_IS_DEFAULT_DEF (tmp
)
1240 || !POINTER_TYPE_P (TREE_TYPE (tmp
))
1241 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp
))) != RECORD_TYPE
)
1244 assign
= SSA_NAME_DEF_STMT (tmp
);
1245 assign_bb
= gimple_bb (assign
);
1246 if (!single_pred_p (assign_bb
))
1248 expr
= get_ancestor_addr_info (assign
, &obj
, &offset
);
1251 parm
= TREE_OPERAND (expr
, 0);
1252 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (parm
));
1256 cond_bb
= single_pred (assign_bb
);
1257 cond
= last_stmt (cond_bb
);
1259 || gimple_code (cond
) != GIMPLE_COND
1260 || gimple_cond_code (cond
) != NE_EXPR
1261 || gimple_cond_lhs (cond
) != parm
1262 || !integer_zerop (gimple_cond_rhs (cond
)))
1265 phi_bb
= gimple_bb (phi
);
1266 for (i
= 0; i
< 2; i
++)
1268 basic_block pred
= EDGE_PRED (phi_bb
, i
)->src
;
1269 if (pred
!= assign_bb
&& pred
!= cond_bb
)
1273 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1274 parm_ref_data_pass_through_p (fbi
, index
, call
, parm
));
1277 /* Inspect the given TYPE and return true iff it has the same structure (the
1278 same number of fields of the same types) as a C++ member pointer. If
1279 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1280 corresponding fields there. */
1283 type_like_member_ptr_p (tree type
, tree
*method_ptr
, tree
*delta
)
1287 if (TREE_CODE (type
) != RECORD_TYPE
)
1290 fld
= TYPE_FIELDS (type
);
1291 if (!fld
|| !POINTER_TYPE_P (TREE_TYPE (fld
))
1292 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld
))) != METHOD_TYPE
1293 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1299 fld
= DECL_CHAIN (fld
);
1300 if (!fld
|| INTEGRAL_TYPE_P (fld
)
1301 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1306 if (DECL_CHAIN (fld
))
1312 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1313 return the rhs of its defining statement. Otherwise return RHS as it
1317 get_ssa_def_if_simple_copy (tree rhs
)
1319 while (TREE_CODE (rhs
) == SSA_NAME
&& !SSA_NAME_IS_DEFAULT_DEF (rhs
))
1321 gimple
*def_stmt
= SSA_NAME_DEF_STMT (rhs
);
1323 if (gimple_assign_single_p (def_stmt
))
1324 rhs
= gimple_assign_rhs1 (def_stmt
);
1331 /* Simple linked list, describing known contents of an aggregate beforere
1334 struct ipa_known_agg_contents_list
1336 /* Offset and size of the described part of the aggregate. */
1337 HOST_WIDE_INT offset
, size
;
1338 /* Known constant value or NULL if the contents is known to be unknown. */
1340 /* Pointer to the next structure in the list. */
1341 struct ipa_known_agg_contents_list
*next
;
1344 /* Find the proper place in linked list of ipa_known_agg_contents_list
1345 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1346 unless there is a partial overlap, in which case return NULL, or such
1347 element is already there, in which case set *ALREADY_THERE to true. */
1349 static struct ipa_known_agg_contents_list
**
1350 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list
**list
,
1351 HOST_WIDE_INT lhs_offset
,
1352 HOST_WIDE_INT lhs_size
,
1353 bool *already_there
)
1355 struct ipa_known_agg_contents_list
**p
= list
;
1356 while (*p
&& (*p
)->offset
< lhs_offset
)
1358 if ((*p
)->offset
+ (*p
)->size
> lhs_offset
)
1363 if (*p
&& (*p
)->offset
< lhs_offset
+ lhs_size
)
1365 if ((*p
)->offset
== lhs_offset
&& (*p
)->size
== lhs_size
)
1366 /* We already know this value is subsequently overwritten with
1368 *already_there
= true;
1370 /* Otherwise this is a partial overlap which we cannot
1377 /* Build aggregate jump function from LIST, assuming there are exactly
1378 CONST_COUNT constant entries there and that th offset of the passed argument
1379 is ARG_OFFSET and store it into JFUNC. */
1382 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list
*list
,
1383 int const_count
, HOST_WIDE_INT arg_offset
,
1384 struct ipa_jump_func
*jfunc
)
1386 vec_alloc (jfunc
->agg
.items
, const_count
);
1391 struct ipa_agg_jf_item item
;
1392 item
.offset
= list
->offset
- arg_offset
;
1393 gcc_assert ((item
.offset
% BITS_PER_UNIT
) == 0);
1394 item
.value
= unshare_expr_without_location (list
->constant
);
1395 jfunc
->agg
.items
->quick_push (item
);
1401 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1402 in ARG is filled in with constant values. ARG can either be an aggregate
1403 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1404 aggregate. JFUNC is the jump function into which the constants are
1405 subsequently stored. */
1408 determine_locally_known_aggregate_parts (gcall
*call
, tree arg
,
1410 struct ipa_jump_func
*jfunc
)
1412 struct ipa_known_agg_contents_list
*list
= NULL
;
1413 int item_count
= 0, const_count
= 0;
1414 HOST_WIDE_INT arg_offset
, arg_size
;
1415 gimple_stmt_iterator gsi
;
1417 bool check_ref
, by_ref
;
1420 /* The function operates in three stages. First, we prepare check_ref, r,
1421 arg_base and arg_offset based on what is actually passed as an actual
1424 if (POINTER_TYPE_P (arg_type
))
1427 if (TREE_CODE (arg
) == SSA_NAME
)
1430 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type
))))
1435 type_size
= TYPE_SIZE (TREE_TYPE (arg_type
));
1436 arg_size
= tree_to_uhwi (type_size
);
1437 ao_ref_init_from_ptr_and_size (&r
, arg_base
, NULL_TREE
);
1439 else if (TREE_CODE (arg
) == ADDR_EXPR
)
1441 HOST_WIDE_INT arg_max_size
;
1444 arg
= TREE_OPERAND (arg
, 0);
1445 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1446 &arg_max_size
, &reverse
);
1447 if (arg_max_size
== -1
1448 || arg_max_size
!= arg_size
1451 if (DECL_P (arg_base
))
1454 ao_ref_init (&r
, arg_base
);
1464 HOST_WIDE_INT arg_max_size
;
1467 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg
)));
1471 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1472 &arg_max_size
, &reverse
);
1473 if (arg_max_size
== -1
1474 || arg_max_size
!= arg_size
1478 ao_ref_init (&r
, arg
);
1481 /* Second stage walks back the BB, looks at individual statements and as long
1482 as it is confident of how the statements affect contents of the
1483 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1485 gsi
= gsi_for_stmt (call
);
1487 for (; !gsi_end_p (gsi
); gsi_prev (&gsi
))
1489 struct ipa_known_agg_contents_list
*n
, **p
;
1490 gimple
*stmt
= gsi_stmt (gsi
);
1491 HOST_WIDE_INT lhs_offset
, lhs_size
, lhs_max_size
;
1492 tree lhs
, rhs
, lhs_base
;
1495 if (!stmt_may_clobber_ref_p_1 (stmt
, &r
))
1497 if (!gimple_assign_single_p (stmt
))
1500 lhs
= gimple_assign_lhs (stmt
);
1501 rhs
= gimple_assign_rhs1 (stmt
);
1502 if (!is_gimple_reg_type (TREE_TYPE (rhs
))
1503 || TREE_CODE (lhs
) == BIT_FIELD_REF
1504 || contains_bitfld_component_ref_p (lhs
))
1507 lhs_base
= get_ref_base_and_extent (lhs
, &lhs_offset
, &lhs_size
,
1508 &lhs_max_size
, &reverse
);
1509 if (lhs_max_size
== -1
1510 || lhs_max_size
!= lhs_size
)
1515 if (TREE_CODE (lhs_base
) != MEM_REF
1516 || TREE_OPERAND (lhs_base
, 0) != arg_base
1517 || !integer_zerop (TREE_OPERAND (lhs_base
, 1)))
1520 else if (lhs_base
!= arg_base
)
1522 if (DECL_P (lhs_base
))
1528 bool already_there
= false;
1529 p
= get_place_in_agg_contents_list (&list
, lhs_offset
, lhs_size
,
1536 rhs
= get_ssa_def_if_simple_copy (rhs
);
1537 n
= XALLOCA (struct ipa_known_agg_contents_list
);
1539 n
->offset
= lhs_offset
;
1540 if (is_gimple_ip_invariant (rhs
))
1546 n
->constant
= NULL_TREE
;
1551 if (const_count
== PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
)
1552 || item_count
== 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
))
1556 /* Third stage just goes over the list and creates an appropriate vector of
1557 ipa_agg_jf_item structures out of it, of sourse only if there are
1558 any known constants to begin with. */
1562 jfunc
->agg
.by_ref
= by_ref
;
1563 build_agg_jump_func_from_list (list
, const_count
, arg_offset
, jfunc
);
1568 ipa_get_callee_param_type (struct cgraph_edge
*e
, int i
)
1571 tree type
= (e
->callee
1572 ? TREE_TYPE (e
->callee
->decl
)
1573 : gimple_call_fntype (e
->call_stmt
));
1574 tree t
= TYPE_ARG_TYPES (type
);
1576 for (n
= 0; n
< i
; n
++)
1583 return TREE_VALUE (t
);
1586 t
= DECL_ARGUMENTS (e
->callee
->decl
);
1587 for (n
= 0; n
< i
; n
++)
1594 return TREE_TYPE (t
);
1598 /* Compute jump function for all arguments of callsite CS and insert the
1599 information in the jump_functions array in the ipa_edge_args corresponding
1600 to this callsite. */
1603 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info
*fbi
,
1604 struct cgraph_edge
*cs
)
1606 struct ipa_node_params
*info
= IPA_NODE_REF (cs
->caller
);
1607 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
1608 gcall
*call
= cs
->call_stmt
;
1609 int n
, arg_num
= gimple_call_num_args (call
);
1610 bool useful_context
= false;
1612 if (arg_num
== 0 || args
->jump_functions
)
1614 vec_safe_grow_cleared (args
->jump_functions
, arg_num
);
1615 if (flag_devirtualize
)
1616 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, arg_num
);
1618 if (gimple_call_internal_p (call
))
1620 if (ipa_func_spec_opts_forbid_analysis_p (cs
->caller
))
1623 for (n
= 0; n
< arg_num
; n
++)
1625 struct ipa_jump_func
*jfunc
= ipa_get_ith_jump_func (args
, n
);
1626 tree arg
= gimple_call_arg (call
, n
);
1627 tree param_type
= ipa_get_callee_param_type (cs
, n
);
1628 if (flag_devirtualize
&& POINTER_TYPE_P (TREE_TYPE (arg
)))
1631 struct ipa_polymorphic_call_context
context (cs
->caller
->decl
,
1634 context
.get_dynamic_type (instance
, arg
, NULL
, cs
->call_stmt
);
1635 *ipa_get_ith_polymorhic_call_context (args
, n
) = context
;
1636 if (!context
.useless_p ())
1637 useful_context
= true;
1640 if (POINTER_TYPE_P (TREE_TYPE(arg
)))
1642 unsigned HOST_WIDE_INT hwi_bitpos
;
1645 if (get_pointer_alignment_1 (arg
, &align
, &hwi_bitpos
)
1646 && align
% BITS_PER_UNIT
== 0
1647 && hwi_bitpos
% BITS_PER_UNIT
== 0)
1649 gcc_checking_assert (align
!= 0);
1650 jfunc
->alignment
.known
= true;
1651 jfunc
->alignment
.align
= align
/ BITS_PER_UNIT
;
1652 jfunc
->alignment
.misalign
= hwi_bitpos
/ BITS_PER_UNIT
;
1655 gcc_assert (!jfunc
->alignment
.known
);
1658 gcc_assert (!jfunc
->alignment
.known
);
1660 if (is_gimple_ip_invariant (arg
))
1661 ipa_set_jf_constant (jfunc
, arg
, cs
);
1662 else if (!is_gimple_reg_type (TREE_TYPE (arg
))
1663 && TREE_CODE (arg
) == PARM_DECL
)
1665 int index
= ipa_get_param_decl_index (info
, arg
);
1667 gcc_assert (index
>=0);
1668 /* Aggregate passed by value, check for pass-through, otherwise we
1669 will attempt to fill in aggregate contents later in this
1671 if (parm_preserved_before_stmt_p (fbi
, index
, call
, arg
))
1673 ipa_set_jf_simple_pass_through (jfunc
, index
, false);
1677 else if (TREE_CODE (arg
) == SSA_NAME
)
1679 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
1681 int index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (arg
));
1685 agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
, arg
);
1686 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1691 gimple
*stmt
= SSA_NAME_DEF_STMT (arg
);
1692 if (is_gimple_assign (stmt
))
1693 compute_complex_assign_jump_func (fbi
, info
, jfunc
,
1694 call
, stmt
, arg
, param_type
);
1695 else if (gimple_code (stmt
) == GIMPLE_PHI
)
1696 compute_complex_ancestor_jump_func (fbi
, info
, jfunc
,
1698 as_a
<gphi
*> (stmt
));
1702 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1703 passed (because type conversions are ignored in gimple). Usually we can
1704 safely get type from function declaration, but in case of K&R prototypes or
1705 variadic functions we can try our luck with type of the pointer passed.
1706 TODO: Since we look for actual initialization of the memory object, we may better
1707 work out the type based on the memory stores we find. */
1709 param_type
= TREE_TYPE (arg
);
1711 if ((jfunc
->type
!= IPA_JF_PASS_THROUGH
1712 || !ipa_get_jf_pass_through_agg_preserved (jfunc
))
1713 && (jfunc
->type
!= IPA_JF_ANCESTOR
1714 || !ipa_get_jf_ancestor_agg_preserved (jfunc
))
1715 && (AGGREGATE_TYPE_P (TREE_TYPE (arg
))
1716 || POINTER_TYPE_P (param_type
)))
1717 determine_locally_known_aggregate_parts (call
, arg
, param_type
, jfunc
);
1719 if (!useful_context
)
1720 vec_free (args
->polymorphic_call_contexts
);
1723 /* Compute jump functions for all edges - both direct and indirect - outgoing
1727 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info
*fbi
, basic_block bb
)
1729 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
1731 struct cgraph_edge
*cs
;
1733 FOR_EACH_VEC_ELT_REVERSE (bi
->cg_edges
, i
, cs
)
1735 struct cgraph_node
*callee
= cs
->callee
;
1739 callee
->ultimate_alias_target ();
1740 /* We do not need to bother analyzing calls to unknown functions
1741 unless they may become known during lto/whopr. */
1742 if (!callee
->definition
&& !flag_lto
)
1745 ipa_compute_jump_functions_for_edge (fbi
, cs
);
1749 /* If STMT looks like a statement loading a value from a member pointer formal
1750 parameter, return that parameter and store the offset of the field to
1751 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1752 might be clobbered). If USE_DELTA, then we look for a use of the delta
1753 field rather than the pfn. */
1756 ipa_get_stmt_member_ptr_load_param (gimple
*stmt
, bool use_delta
,
1757 HOST_WIDE_INT
*offset_p
)
1759 tree rhs
, rec
, ref_field
, ref_offset
, fld
, ptr_field
, delta_field
;
1761 if (!gimple_assign_single_p (stmt
))
1764 rhs
= gimple_assign_rhs1 (stmt
);
1765 if (TREE_CODE (rhs
) == COMPONENT_REF
)
1767 ref_field
= TREE_OPERAND (rhs
, 1);
1768 rhs
= TREE_OPERAND (rhs
, 0);
1771 ref_field
= NULL_TREE
;
1772 if (TREE_CODE (rhs
) != MEM_REF
)
1774 rec
= TREE_OPERAND (rhs
, 0);
1775 if (TREE_CODE (rec
) != ADDR_EXPR
)
1777 rec
= TREE_OPERAND (rec
, 0);
1778 if (TREE_CODE (rec
) != PARM_DECL
1779 || !type_like_member_ptr_p (TREE_TYPE (rec
), &ptr_field
, &delta_field
))
1781 ref_offset
= TREE_OPERAND (rhs
, 1);
1788 *offset_p
= int_bit_position (fld
);
1792 if (integer_nonzerop (ref_offset
))
1794 return ref_field
== fld
? rec
: NULL_TREE
;
1797 return tree_int_cst_equal (byte_position (fld
), ref_offset
) ? rec
1801 /* Returns true iff T is an SSA_NAME defined by a statement. */
1804 ipa_is_ssa_with_stmt_def (tree t
)
1806 if (TREE_CODE (t
) == SSA_NAME
1807 && !SSA_NAME_IS_DEFAULT_DEF (t
))
1813 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1814 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1815 indirect call graph edge. */
1817 static struct cgraph_edge
*
1818 ipa_note_param_call (struct cgraph_node
*node
, int param_index
,
1821 struct cgraph_edge
*cs
;
1823 cs
= node
->get_edge (stmt
);
1824 cs
->indirect_info
->param_index
= param_index
;
1825 cs
->indirect_info
->agg_contents
= 0;
1826 cs
->indirect_info
->member_ptr
= 0;
1830 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1831 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1832 intermediate information about each formal parameter. Currently it checks
1833 whether the call calls a pointer that is a formal parameter and if so, the
1834 parameter is marked with the called flag and an indirect call graph edge
1835 describing the call is created. This is very simple for ordinary pointers
1836 represented in SSA but not-so-nice when it comes to member pointers. The
1837 ugly part of this function does nothing more than trying to match the
1838 pattern of such a call. An example of such a pattern is the gimple dump
1839 below, the call is on the last line:
1842 f$__delta_5 = f.__delta;
1843 f$__pfn_24 = f.__pfn;
1847 f$__delta_5 = MEM[(struct *)&f];
1848 f$__pfn_24 = MEM[(struct *)&f + 4B];
1850 and a few lines below:
1853 D.2496_3 = (int) f$__pfn_24;
1854 D.2497_4 = D.2496_3 & 1;
1861 D.2500_7 = (unsigned int) f$__delta_5;
1862 D.2501_8 = &S + D.2500_7;
1863 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1864 D.2503_10 = *D.2502_9;
1865 D.2504_12 = f$__pfn_24 + -1;
1866 D.2505_13 = (unsigned int) D.2504_12;
1867 D.2506_14 = D.2503_10 + D.2505_13;
1868 D.2507_15 = *D.2506_14;
1869 iftmp.11_16 = (String:: *) D.2507_15;
1872 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1873 D.2500_19 = (unsigned int) f$__delta_5;
1874 D.2508_20 = &S + D.2500_19;
1875 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1877 Such patterns are results of simple calls to a member pointer:
1879 int doprinting (int (MyString::* f)(int) const)
1881 MyString S ("somestring");
1886 Moreover, the function also looks for called pointers loaded from aggregates
1887 passed by value or reference. */
1890 ipa_analyze_indirect_call_uses (struct ipa_func_body_info
*fbi
, gcall
*call
,
1893 struct ipa_node_params
*info
= fbi
->info
;
1894 HOST_WIDE_INT offset
;
1897 if (SSA_NAME_IS_DEFAULT_DEF (target
))
1899 tree var
= SSA_NAME_VAR (target
);
1900 int index
= ipa_get_param_decl_index (info
, var
);
1902 ipa_note_param_call (fbi
->node
, index
, call
);
1907 gimple
*def
= SSA_NAME_DEF_STMT (target
);
1908 if (gimple_assign_single_p (def
)
1909 && ipa_load_from_parm_agg (fbi
, info
->descriptors
, def
,
1910 gimple_assign_rhs1 (def
), &index
, &offset
,
1913 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
1914 cs
->indirect_info
->offset
= offset
;
1915 cs
->indirect_info
->agg_contents
= 1;
1916 cs
->indirect_info
->by_ref
= by_ref
;
1920 /* Now we need to try to match the complex pattern of calling a member
1922 if (gimple_code (def
) != GIMPLE_PHI
1923 || gimple_phi_num_args (def
) != 2
1924 || !POINTER_TYPE_P (TREE_TYPE (target
))
1925 || TREE_CODE (TREE_TYPE (TREE_TYPE (target
))) != METHOD_TYPE
)
1928 /* First, we need to check whether one of these is a load from a member
1929 pointer that is a parameter to this function. */
1930 tree n1
= PHI_ARG_DEF (def
, 0);
1931 tree n2
= PHI_ARG_DEF (def
, 1);
1932 if (!ipa_is_ssa_with_stmt_def (n1
) || !ipa_is_ssa_with_stmt_def (n2
))
1934 gimple
*d1
= SSA_NAME_DEF_STMT (n1
);
1935 gimple
*d2
= SSA_NAME_DEF_STMT (n2
);
1938 basic_block bb
, virt_bb
;
1939 basic_block join
= gimple_bb (def
);
1940 if ((rec
= ipa_get_stmt_member_ptr_load_param (d1
, false, &offset
)))
1942 if (ipa_get_stmt_member_ptr_load_param (d2
, false, NULL
))
1945 bb
= EDGE_PRED (join
, 0)->src
;
1946 virt_bb
= gimple_bb (d2
);
1948 else if ((rec
= ipa_get_stmt_member_ptr_load_param (d2
, false, &offset
)))
1950 bb
= EDGE_PRED (join
, 1)->src
;
1951 virt_bb
= gimple_bb (d1
);
1956 /* Second, we need to check that the basic blocks are laid out in the way
1957 corresponding to the pattern. */
1959 if (!single_pred_p (virt_bb
) || !single_succ_p (virt_bb
)
1960 || single_pred (virt_bb
) != bb
1961 || single_succ (virt_bb
) != join
)
1964 /* Third, let's see that the branching is done depending on the least
1965 significant bit of the pfn. */
1967 gimple
*branch
= last_stmt (bb
);
1968 if (!branch
|| gimple_code (branch
) != GIMPLE_COND
)
1971 if ((gimple_cond_code (branch
) != NE_EXPR
1972 && gimple_cond_code (branch
) != EQ_EXPR
)
1973 || !integer_zerop (gimple_cond_rhs (branch
)))
1976 tree cond
= gimple_cond_lhs (branch
);
1977 if (!ipa_is_ssa_with_stmt_def (cond
))
1980 def
= SSA_NAME_DEF_STMT (cond
);
1981 if (!is_gimple_assign (def
)
1982 || gimple_assign_rhs_code (def
) != BIT_AND_EXPR
1983 || !integer_onep (gimple_assign_rhs2 (def
)))
1986 cond
= gimple_assign_rhs1 (def
);
1987 if (!ipa_is_ssa_with_stmt_def (cond
))
1990 def
= SSA_NAME_DEF_STMT (cond
);
1992 if (is_gimple_assign (def
)
1993 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
1995 cond
= gimple_assign_rhs1 (def
);
1996 if (!ipa_is_ssa_with_stmt_def (cond
))
1998 def
= SSA_NAME_DEF_STMT (cond
);
2002 rec2
= ipa_get_stmt_member_ptr_load_param (def
,
2003 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2004 == ptrmemfunc_vbit_in_delta
),
2009 index
= ipa_get_param_decl_index (info
, rec
);
2011 && parm_preserved_before_stmt_p (fbi
, index
, call
, rec
))
2013 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2014 cs
->indirect_info
->offset
= offset
;
2015 cs
->indirect_info
->agg_contents
= 1;
2016 cs
->indirect_info
->member_ptr
= 1;
2022 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2023 object referenced in the expression is a formal parameter of the caller
2024 FBI->node (described by FBI->info), create a call note for the
2028 ipa_analyze_virtual_call_uses (struct ipa_func_body_info
*fbi
,
2029 gcall
*call
, tree target
)
2031 tree obj
= OBJ_TYPE_REF_OBJECT (target
);
2033 HOST_WIDE_INT anc_offset
;
2035 if (!flag_devirtualize
)
2038 if (TREE_CODE (obj
) != SSA_NAME
)
2041 struct ipa_node_params
*info
= fbi
->info
;
2042 if (SSA_NAME_IS_DEFAULT_DEF (obj
))
2044 struct ipa_jump_func jfunc
;
2045 if (TREE_CODE (SSA_NAME_VAR (obj
)) != PARM_DECL
)
2049 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (obj
));
2050 gcc_assert (index
>= 0);
2051 if (detect_type_change_ssa (obj
, obj_type_ref_class (target
),
2057 struct ipa_jump_func jfunc
;
2058 gimple
*stmt
= SSA_NAME_DEF_STMT (obj
);
2061 expr
= get_ancestor_addr_info (stmt
, &obj
, &anc_offset
);
2064 index
= ipa_get_param_decl_index (info
,
2065 SSA_NAME_VAR (TREE_OPERAND (expr
, 0)));
2066 gcc_assert (index
>= 0);
2067 if (detect_type_change (obj
, expr
, obj_type_ref_class (target
),
2068 call
, &jfunc
, anc_offset
))
2072 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2073 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
2074 ii
->offset
= anc_offset
;
2075 ii
->otr_token
= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target
));
2076 ii
->otr_type
= obj_type_ref_class (target
);
2077 ii
->polymorphic
= 1;
2080 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2081 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2082 containing intermediate information about each formal parameter. */
2085 ipa_analyze_call_uses (struct ipa_func_body_info
*fbi
, gcall
*call
)
2087 tree target
= gimple_call_fn (call
);
2090 || (TREE_CODE (target
) != SSA_NAME
2091 && !virtual_method_call_p (target
)))
2094 struct cgraph_edge
*cs
= fbi
->node
->get_edge (call
);
2095 /* If we previously turned the call into a direct call, there is
2096 no need to analyze. */
2097 if (cs
&& !cs
->indirect_unknown_callee
)
2100 if (cs
->indirect_info
->polymorphic
&& flag_devirtualize
)
2103 tree target
= gimple_call_fn (call
);
2104 ipa_polymorphic_call_context
context (current_function_decl
,
2105 target
, call
, &instance
);
2107 gcc_checking_assert (cs
->indirect_info
->otr_type
2108 == obj_type_ref_class (target
));
2109 gcc_checking_assert (cs
->indirect_info
->otr_token
2110 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target
)));
2112 cs
->indirect_info
->vptr_changed
2113 = !context
.get_dynamic_type (instance
,
2114 OBJ_TYPE_REF_OBJECT (target
),
2115 obj_type_ref_class (target
), call
);
2116 cs
->indirect_info
->context
= context
;
2119 if (TREE_CODE (target
) == SSA_NAME
)
2120 ipa_analyze_indirect_call_uses (fbi
, call
, target
);
2121 else if (virtual_method_call_p (target
))
2122 ipa_analyze_virtual_call_uses (fbi
, call
, target
);
2126 /* Analyze the call statement STMT with respect to formal parameters (described
2127 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2128 formal parameters are called. */
2131 ipa_analyze_stmt_uses (struct ipa_func_body_info
*fbi
, gimple
*stmt
)
2133 if (is_gimple_call (stmt
))
2134 ipa_analyze_call_uses (fbi
, as_a
<gcall
*> (stmt
));
2137 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2138 If OP is a parameter declaration, mark it as used in the info structure
2142 visit_ref_for_mod_analysis (gimple
*, tree op
, tree
, void *data
)
2144 struct ipa_node_params
*info
= (struct ipa_node_params
*) data
;
2146 op
= get_base_address (op
);
2148 && TREE_CODE (op
) == PARM_DECL
)
2150 int index
= ipa_get_param_decl_index (info
, op
);
2151 gcc_assert (index
>= 0);
2152 ipa_set_param_used (info
, index
, true);
2158 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2159 the findings in various structures of the associated ipa_node_params
2160 structure, such as parameter flags, notes etc. FBI holds various data about
2161 the function being analyzed. */
2164 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info
*fbi
, basic_block bb
)
2166 gimple_stmt_iterator gsi
;
2167 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2169 gimple
*stmt
= gsi_stmt (gsi
);
2171 if (is_gimple_debug (stmt
))
2174 ipa_analyze_stmt_uses (fbi
, stmt
);
2175 walk_stmt_load_store_addr_ops (stmt
, fbi
->info
,
2176 visit_ref_for_mod_analysis
,
2177 visit_ref_for_mod_analysis
,
2178 visit_ref_for_mod_analysis
);
2180 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2181 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), fbi
->info
,
2182 visit_ref_for_mod_analysis
,
2183 visit_ref_for_mod_analysis
,
2184 visit_ref_for_mod_analysis
);
2187 /* Calculate controlled uses of parameters of NODE. */
2190 ipa_analyze_controlled_uses (struct cgraph_node
*node
)
2192 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
2194 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2196 tree parm
= ipa_get_param (info
, i
);
2197 int controlled_uses
= 0;
2199 /* For SSA regs see if parameter is used. For non-SSA we compute
2200 the flag during modification analysis. */
2201 if (is_gimple_reg (parm
))
2203 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
),
2205 if (ddef
&& !has_zero_uses (ddef
))
2207 imm_use_iterator imm_iter
;
2208 use_operand_p use_p
;
2210 ipa_set_param_used (info
, i
, true);
2211 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, ddef
)
2212 if (!is_gimple_call (USE_STMT (use_p
)))
2214 if (!is_gimple_debug (USE_STMT (use_p
)))
2216 controlled_uses
= IPA_UNDESCRIBED_USE
;
2224 controlled_uses
= 0;
2227 controlled_uses
= IPA_UNDESCRIBED_USE
;
2228 ipa_set_controlled_uses (info
, i
, controlled_uses
);
2232 /* Free stuff in BI. */
2235 free_ipa_bb_info (struct ipa_bb_info
*bi
)
2237 bi
->cg_edges
.release ();
2238 bi
->param_aa_statuses
.release ();
2241 /* Dominator walker driving the analysis. */
2243 class analysis_dom_walker
: public dom_walker
2246 analysis_dom_walker (struct ipa_func_body_info
*fbi
)
2247 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
) {}
2249 virtual edge
before_dom_children (basic_block
);
2252 struct ipa_func_body_info
*m_fbi
;
2256 analysis_dom_walker::before_dom_children (basic_block bb
)
2258 ipa_analyze_params_uses_in_bb (m_fbi
, bb
);
2259 ipa_compute_jump_functions_for_bb (m_fbi
, bb
);
2263 /* Release body info FBI. */
2266 ipa_release_body_info (struct ipa_func_body_info
*fbi
)
2269 struct ipa_bb_info
*bi
;
2271 FOR_EACH_VEC_ELT (fbi
->bb_infos
, i
, bi
)
2272 free_ipa_bb_info (bi
);
2273 fbi
->bb_infos
.release ();
2276 /* Initialize the array describing properties of formal parameters
2277 of NODE, analyze their uses and compute jump functions associated
2278 with actual arguments of calls from within NODE. */
2281 ipa_analyze_node (struct cgraph_node
*node
)
2283 struct ipa_func_body_info fbi
;
2284 struct ipa_node_params
*info
;
2286 ipa_check_create_node_params ();
2287 ipa_check_create_edge_args ();
2288 info
= IPA_NODE_REF (node
);
2290 if (info
->analysis_done
)
2292 info
->analysis_done
= 1;
2294 if (ipa_func_spec_opts_forbid_analysis_p (node
))
2296 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2298 ipa_set_param_used (info
, i
, true);
2299 ipa_set_controlled_uses (info
, i
, IPA_UNDESCRIBED_USE
);
2304 struct function
*func
= DECL_STRUCT_FUNCTION (node
->decl
);
2306 calculate_dominance_info (CDI_DOMINATORS
);
2307 ipa_initialize_node_params (node
);
2308 ipa_analyze_controlled_uses (node
);
2311 fbi
.info
= IPA_NODE_REF (node
);
2312 fbi
.bb_infos
= vNULL
;
2313 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
));
2314 fbi
.param_count
= ipa_get_param_count (info
);
2317 for (struct cgraph_edge
*cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
2319 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
2320 bi
->cg_edges
.safe_push (cs
);
2323 for (struct cgraph_edge
*cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
2325 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
2326 bi
->cg_edges
.safe_push (cs
);
2329 analysis_dom_walker (&fbi
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
2331 ipa_release_body_info (&fbi
);
2332 free_dominance_info (CDI_DOMINATORS
);
2336 /* Update the jump functions associated with call graph edge E when the call
2337 graph edge CS is being inlined, assuming that E->caller is already (possibly
2338 indirectly) inlined into CS->callee and that E has not been inlined. */
2341 update_jump_functions_after_inlining (struct cgraph_edge
*cs
,
2342 struct cgraph_edge
*e
)
2344 struct ipa_edge_args
*top
= IPA_EDGE_REF (cs
);
2345 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
2346 int count
= ipa_get_cs_argument_count (args
);
2349 for (i
= 0; i
< count
; i
++)
2351 struct ipa_jump_func
*dst
= ipa_get_ith_jump_func (args
, i
);
2352 struct ipa_polymorphic_call_context
*dst_ctx
2353 = ipa_get_ith_polymorhic_call_context (args
, i
);
2355 if (dst
->type
== IPA_JF_ANCESTOR
)
2357 struct ipa_jump_func
*src
;
2358 int dst_fid
= dst
->value
.ancestor
.formal_id
;
2359 struct ipa_polymorphic_call_context
*src_ctx
2360 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
2362 /* Variable number of arguments can cause havoc if we try to access
2363 one that does not exist in the inlined edge. So make sure we
2365 if (dst_fid
>= ipa_get_cs_argument_count (top
))
2367 ipa_set_jf_unknown (dst
);
2371 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2373 if (src_ctx
&& !src_ctx
->useless_p ())
2375 struct ipa_polymorphic_call_context ctx
= *src_ctx
;
2377 /* TODO: Make type preserved safe WRT contexts. */
2378 if (!ipa_get_jf_ancestor_type_preserved (dst
))
2379 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
2380 ctx
.offset_by (dst
->value
.ancestor
.offset
);
2381 if (!ctx
.useless_p ())
2385 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
2387 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
2390 dst_ctx
->combine_with (ctx
);
2395 && (dst
->value
.ancestor
.agg_preserved
|| !src
->agg
.by_ref
))
2397 struct ipa_agg_jf_item
*item
;
2400 /* Currently we do not produce clobber aggregate jump functions,
2401 replace with merging when we do. */
2402 gcc_assert (!dst
->agg
.items
);
2404 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2405 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2406 FOR_EACH_VEC_SAFE_ELT (dst
->agg
.items
, j
, item
)
2407 item
->offset
-= dst
->value
.ancestor
.offset
;
2410 if (src
->type
== IPA_JF_PASS_THROUGH
2411 && src
->value
.pass_through
.operation
== NOP_EXPR
)
2413 dst
->value
.ancestor
.formal_id
= src
->value
.pass_through
.formal_id
;
2414 dst
->value
.ancestor
.agg_preserved
&=
2415 src
->value
.pass_through
.agg_preserved
;
2417 else if (src
->type
== IPA_JF_ANCESTOR
)
2419 dst
->value
.ancestor
.formal_id
= src
->value
.ancestor
.formal_id
;
2420 dst
->value
.ancestor
.offset
+= src
->value
.ancestor
.offset
;
2421 dst
->value
.ancestor
.agg_preserved
&=
2422 src
->value
.ancestor
.agg_preserved
;
2425 ipa_set_jf_unknown (dst
);
2427 else if (dst
->type
== IPA_JF_PASS_THROUGH
)
2429 struct ipa_jump_func
*src
;
2430 /* We must check range due to calls with variable number of arguments
2431 and we cannot combine jump functions with operations. */
2432 if (dst
->value
.pass_through
.operation
== NOP_EXPR
2433 && (dst
->value
.pass_through
.formal_id
2434 < ipa_get_cs_argument_count (top
)))
2436 int dst_fid
= dst
->value
.pass_through
.formal_id
;
2437 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2438 bool dst_agg_p
= ipa_get_jf_pass_through_agg_preserved (dst
);
2439 struct ipa_polymorphic_call_context
*src_ctx
2440 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
2442 if (src_ctx
&& !src_ctx
->useless_p ())
2444 struct ipa_polymorphic_call_context ctx
= *src_ctx
;
2446 /* TODO: Make type preserved safe WRT contexts. */
2447 if (!ipa_get_jf_pass_through_type_preserved (dst
))
2448 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
2449 if (!ctx
.useless_p ())
2453 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
2455 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
2457 dst_ctx
->combine_with (ctx
);
2462 case IPA_JF_UNKNOWN
:
2463 ipa_set_jf_unknown (dst
);
2466 ipa_set_jf_cst_copy (dst
, src
);
2469 case IPA_JF_PASS_THROUGH
:
2471 int formal_id
= ipa_get_jf_pass_through_formal_id (src
);
2472 enum tree_code operation
;
2473 operation
= ipa_get_jf_pass_through_operation (src
);
2475 if (operation
== NOP_EXPR
)
2479 && ipa_get_jf_pass_through_agg_preserved (src
);
2480 ipa_set_jf_simple_pass_through (dst
, formal_id
, agg_p
);
2484 tree operand
= ipa_get_jf_pass_through_operand (src
);
2485 ipa_set_jf_arith_pass_through (dst
, formal_id
, operand
,
2490 case IPA_JF_ANCESTOR
:
2494 && ipa_get_jf_ancestor_agg_preserved (src
);
2495 ipa_set_ancestor_jf (dst
,
2496 ipa_get_jf_ancestor_offset (src
),
2497 ipa_get_jf_ancestor_formal_id (src
),
2506 && (dst_agg_p
|| !src
->agg
.by_ref
))
2508 /* Currently we do not produce clobber aggregate jump
2509 functions, replace with merging when we do. */
2510 gcc_assert (!dst
->agg
.items
);
2512 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2513 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2517 ipa_set_jf_unknown (dst
);
2522 /* If TARGET is an addr_expr of a function declaration, make it the
2523 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2524 Otherwise, return NULL. */
2526 struct cgraph_edge
*
2527 ipa_make_edge_direct_to_target (struct cgraph_edge
*ie
, tree target
,
2530 struct cgraph_node
*callee
;
2531 struct inline_edge_summary
*es
= inline_edge_summary (ie
);
2532 bool unreachable
= false;
2534 if (TREE_CODE (target
) == ADDR_EXPR
)
2535 target
= TREE_OPERAND (target
, 0);
2536 if (TREE_CODE (target
) != FUNCTION_DECL
)
2538 target
= canonicalize_constructor_val (target
, NULL
);
2539 if (!target
|| TREE_CODE (target
) != FUNCTION_DECL
)
2541 /* Member pointer call that goes through a VMT lookup. */
2542 if (ie
->indirect_info
->member_ptr
2543 /* Or if target is not an invariant expression and we do not
2544 know if it will evaulate to function at runtime.
2545 This can happen when folding through &VAR, where &VAR
2546 is IP invariant, but VAR itself is not.
2548 TODO: Revisit this when GCC 5 is branched. It seems that
2549 member_ptr check is not needed and that we may try to fold
2550 the expression and see if VAR is readonly. */
2551 || !is_gimple_ip_invariant (target
))
2553 if (dump_enabled_p ())
2555 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2556 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2557 "discovered direct call non-invariant "
2559 ie
->caller
->name (), ie
->caller
->order
);
2565 if (dump_enabled_p ())
2567 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2568 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2569 "discovered direct call to non-function in %s/%i, "
2570 "making it __builtin_unreachable\n",
2571 ie
->caller
->name (), ie
->caller
->order
);
2574 target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2575 callee
= cgraph_node::get_create (target
);
2579 callee
= cgraph_node::get (target
);
2582 callee
= cgraph_node::get (target
);
2584 /* Because may-edges are not explicitely represented and vtable may be external,
2585 we may create the first reference to the object in the unit. */
2586 if (!callee
|| callee
->global
.inlined_to
)
2589 /* We are better to ensure we can refer to it.
2590 In the case of static functions we are out of luck, since we already
2591 removed its body. In the case of public functions we may or may
2592 not introduce the reference. */
2593 if (!canonicalize_constructor_val (target
, NULL
)
2594 || !TREE_PUBLIC (target
))
2597 fprintf (dump_file
, "ipa-prop: Discovered call to a known target "
2598 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2599 xstrdup_for_dump (ie
->caller
->name ()),
2601 xstrdup_for_dump (ie
->callee
->name ()),
2605 callee
= cgraph_node::get_create (target
);
2608 /* If the edge is already speculated. */
2609 if (speculative
&& ie
->speculative
)
2611 struct cgraph_edge
*e2
;
2612 struct ipa_ref
*ref
;
2613 ie
->speculative_call_info (e2
, ie
, ref
);
2614 if (e2
->callee
->ultimate_alias_target ()
2615 != callee
->ultimate_alias_target ())
2618 fprintf (dump_file
, "ipa-prop: Discovered call to a speculative target "
2619 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2620 xstrdup_for_dump (ie
->caller
->name ()),
2622 xstrdup_for_dump (callee
->name ()),
2624 xstrdup_for_dump (e2
->callee
->name ()),
2630 fprintf (dump_file
, "ipa-prop: Discovered call to a speculative target "
2631 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2632 xstrdup_for_dump (ie
->caller
->name ()),
2634 xstrdup_for_dump (callee
->name ()),
2640 if (!dbg_cnt (devirt
))
2643 ipa_check_create_node_params ();
2645 /* We can not make edges to inline clones. It is bug that someone removed
2646 the cgraph node too early. */
2647 gcc_assert (!callee
->global
.inlined_to
);
2649 if (dump_file
&& !unreachable
)
2651 fprintf (dump_file
, "ipa-prop: Discovered %s call to a %s target "
2652 "(%s/%i -> %s/%i), for stmt ",
2653 ie
->indirect_info
->polymorphic
? "a virtual" : "an indirect",
2654 speculative
? "speculative" : "known",
2655 xstrdup_for_dump (ie
->caller
->name ()),
2657 xstrdup_for_dump (callee
->name ()),
2660 print_gimple_stmt (dump_file
, ie
->call_stmt
, 2, TDF_SLIM
);
2662 fprintf (dump_file
, "with uid %i\n", ie
->lto_stmt_uid
);
2664 if (dump_enabled_p ())
2666 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2668 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2669 "converting indirect call in %s to direct call to %s\n",
2670 ie
->caller
->name (), callee
->name ());
2674 struct cgraph_edge
*orig
= ie
;
2675 ie
= ie
->make_direct (callee
);
2676 /* If we resolved speculative edge the cost is already up to date
2677 for direct call (adjusted by inline_edge_duplication_hook). */
2680 es
= inline_edge_summary (ie
);
2681 es
->call_stmt_size
-= (eni_size_weights
.indirect_call_cost
2682 - eni_size_weights
.call_cost
);
2683 es
->call_stmt_time
-= (eni_time_weights
.indirect_call_cost
2684 - eni_time_weights
.call_cost
);
2689 if (!callee
->can_be_discarded_p ())
2692 alias
= dyn_cast
<cgraph_node
*> (callee
->noninterposable_alias ());
2696 /* make_speculative will update ie's cost to direct call cost. */
2697 ie
= ie
->make_speculative
2698 (callee
, ie
->count
* 8 / 10, ie
->frequency
* 8 / 10);
2704 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2705 return NULL if there is not any. BY_REF specifies whether the value has to
2706 be passed by reference or by value. */
2709 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function
*agg
,
2710 HOST_WIDE_INT offset
, bool by_ref
)
2712 struct ipa_agg_jf_item
*item
;
2715 if (by_ref
!= agg
->by_ref
)
2718 FOR_EACH_VEC_SAFE_ELT (agg
->items
, i
, item
)
2719 if (item
->offset
== offset
)
2721 /* Currently we do not have clobber values, return NULL for them once
2723 gcc_checking_assert (is_gimple_ip_invariant (item
->value
));
2729 /* Remove a reference to SYMBOL from the list of references of a node given by
2730 reference description RDESC. Return true if the reference has been
2731 successfully found and removed. */
2734 remove_described_reference (symtab_node
*symbol
, struct ipa_cst_ref_desc
*rdesc
)
2736 struct ipa_ref
*to_del
;
2737 struct cgraph_edge
*origin
;
2742 to_del
= origin
->caller
->find_reference (symbol
, origin
->call_stmt
,
2743 origin
->lto_stmt_uid
);
2747 to_del
->remove_reference ();
2749 fprintf (dump_file
, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2750 xstrdup_for_dump (origin
->caller
->name ()),
2751 origin
->caller
->order
, xstrdup_for_dump (symbol
->name ()));
2755 /* If JFUNC has a reference description with refcount different from
2756 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2757 NULL. JFUNC must be a constant jump function. */
2759 static struct ipa_cst_ref_desc
*
2760 jfunc_rdesc_usable (struct ipa_jump_func
*jfunc
)
2762 struct ipa_cst_ref_desc
*rdesc
= ipa_get_jf_constant_rdesc (jfunc
);
2763 if (rdesc
&& rdesc
->refcount
!= IPA_UNDESCRIBED_USE
)
2769 /* If the value of constant jump function JFUNC is an address of a function
2770 declaration, return the associated call graph node. Otherwise return
2773 static cgraph_node
*
2774 cgraph_node_for_jfunc (struct ipa_jump_func
*jfunc
)
2776 gcc_checking_assert (jfunc
->type
== IPA_JF_CONST
);
2777 tree cst
= ipa_get_jf_constant (jfunc
);
2778 if (TREE_CODE (cst
) != ADDR_EXPR
2779 || TREE_CODE (TREE_OPERAND (cst
, 0)) != FUNCTION_DECL
)
2782 return cgraph_node::get (TREE_OPERAND (cst
, 0));
2786 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2787 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2788 the edge specified in the rdesc. Return false if either the symbol or the
2789 reference could not be found, otherwise return true. */
2792 try_decrement_rdesc_refcount (struct ipa_jump_func
*jfunc
)
2794 struct ipa_cst_ref_desc
*rdesc
;
2795 if (jfunc
->type
== IPA_JF_CONST
2796 && (rdesc
= jfunc_rdesc_usable (jfunc
))
2797 && --rdesc
->refcount
== 0)
2799 symtab_node
*symbol
= cgraph_node_for_jfunc (jfunc
);
2803 return remove_described_reference (symbol
, rdesc
);
2808 /* Try to find a destination for indirect edge IE that corresponds to a simple
2809 call or a call of a member function pointer and where the destination is a
2810 pointer formal parameter described by jump function JFUNC. If it can be
2811 determined, return the newly direct edge, otherwise return NULL.
2812 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2814 static struct cgraph_edge
*
2815 try_make_edge_direct_simple_call (struct cgraph_edge
*ie
,
2816 struct ipa_jump_func
*jfunc
,
2817 struct ipa_node_params
*new_root_info
)
2819 struct cgraph_edge
*cs
;
2821 bool agg_contents
= ie
->indirect_info
->agg_contents
;
2823 if (ie
->indirect_info
->agg_contents
)
2824 target
= ipa_find_agg_cst_for_param (&jfunc
->agg
,
2825 ie
->indirect_info
->offset
,
2826 ie
->indirect_info
->by_ref
);
2828 target
= ipa_value_from_jfunc (new_root_info
, jfunc
);
2831 cs
= ipa_make_edge_direct_to_target (ie
, target
);
2833 if (cs
&& !agg_contents
)
2836 gcc_checking_assert (cs
->callee
2838 || jfunc
->type
!= IPA_JF_CONST
2839 || !cgraph_node_for_jfunc (jfunc
)
2840 || cs
->callee
== cgraph_node_for_jfunc (jfunc
)));
2841 ok
= try_decrement_rdesc_refcount (jfunc
);
2842 gcc_checking_assert (ok
);
2848 /* Return the target to be used in cases of impossible devirtualization. IE
2849 and target (the latter can be NULL) are dumped when dumping is enabled. */
2852 ipa_impossible_devirt_target (struct cgraph_edge
*ie
, tree target
)
2858 "Type inconsistent devirtualization: %s/%i->%s\n",
2859 ie
->caller
->name (), ie
->caller
->order
,
2860 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target
)));
2863 "No devirtualization target in %s/%i\n",
2864 ie
->caller
->name (), ie
->caller
->order
);
2866 tree new_target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2867 cgraph_node::get_create (new_target
);
2871 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2872 call based on a formal parameter which is described by jump function JFUNC
2873 and if it can be determined, make it direct and return the direct edge.
2874 Otherwise, return NULL. CTX describes the polymorphic context that the
2875 parameter the call is based on brings along with it. */
2877 static struct cgraph_edge
*
2878 try_make_edge_direct_virtual_call (struct cgraph_edge
*ie
,
2879 struct ipa_jump_func
*jfunc
,
2880 struct ipa_polymorphic_call_context ctx
)
2883 bool speculative
= false;
2885 if (!opt_for_fn (ie
->caller
->decl
, flag_devirtualize
))
2888 gcc_assert (!ie
->indirect_info
->by_ref
);
2890 /* Try to do lookup via known virtual table pointer value. */
2891 if (!ie
->indirect_info
->vptr_changed
2892 || opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
))
2895 unsigned HOST_WIDE_INT offset
;
2896 tree t
= ipa_find_agg_cst_for_param (&jfunc
->agg
,
2897 ie
->indirect_info
->offset
,
2899 if (t
&& vtable_pointer_value_to_vtable (t
, &vtable
, &offset
))
2902 t
= gimple_get_virt_method_for_vtable (ie
->indirect_info
->otr_token
,
2903 vtable
, offset
, &can_refer
);
2907 || (TREE_CODE (TREE_TYPE (t
)) == FUNCTION_TYPE
2908 && DECL_FUNCTION_CODE (t
) == BUILT_IN_UNREACHABLE
)
2909 || !possible_polymorphic_call_target_p
2910 (ie
, cgraph_node::get (t
)))
2912 /* Do not speculate builtin_unreachable, it is stupid! */
2913 if (!ie
->indirect_info
->vptr_changed
)
2914 target
= ipa_impossible_devirt_target (ie
, target
);
2921 speculative
= ie
->indirect_info
->vptr_changed
;
2927 ipa_polymorphic_call_context
ie_context (ie
);
2928 vec
<cgraph_node
*>targets
;
2931 ctx
.offset_by (ie
->indirect_info
->offset
);
2932 if (ie
->indirect_info
->vptr_changed
)
2933 ctx
.possible_dynamic_type_change (ie
->in_polymorphic_cdtor
,
2934 ie
->indirect_info
->otr_type
);
2935 ctx
.combine_with (ie_context
, ie
->indirect_info
->otr_type
);
2936 targets
= possible_polymorphic_call_targets
2937 (ie
->indirect_info
->otr_type
,
2938 ie
->indirect_info
->otr_token
,
2940 if (final
&& targets
.length () <= 1)
2942 speculative
= false;
2943 if (targets
.length () == 1)
2944 target
= targets
[0]->decl
;
2946 target
= ipa_impossible_devirt_target (ie
, NULL_TREE
);
2948 else if (!target
&& opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
)
2949 && !ie
->speculative
&& ie
->maybe_hot_p ())
2952 n
= try_speculative_devirtualization (ie
->indirect_info
->otr_type
,
2953 ie
->indirect_info
->otr_token
,
2954 ie
->indirect_info
->context
);
2964 if (!possible_polymorphic_call_target_p
2965 (ie
, cgraph_node::get_create (target
)))
2969 target
= ipa_impossible_devirt_target (ie
, target
);
2971 return ipa_make_edge_direct_to_target (ie
, target
, speculative
);
2977 /* Update the param called notes associated with NODE when CS is being inlined,
2978 assuming NODE is (potentially indirectly) inlined into CS->callee.
2979 Moreover, if the callee is discovered to be constant, create a new cgraph
2980 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2981 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2984 update_indirect_edges_after_inlining (struct cgraph_edge
*cs
,
2985 struct cgraph_node
*node
,
2986 vec
<cgraph_edge
*> *new_edges
)
2988 struct ipa_edge_args
*top
;
2989 struct cgraph_edge
*ie
, *next_ie
, *new_direct_edge
;
2990 struct ipa_node_params
*new_root_info
;
2993 ipa_check_create_edge_args ();
2994 top
= IPA_EDGE_REF (cs
);
2995 new_root_info
= IPA_NODE_REF (cs
->caller
->global
.inlined_to
2996 ? cs
->caller
->global
.inlined_to
2999 for (ie
= node
->indirect_calls
; ie
; ie
= next_ie
)
3001 struct cgraph_indirect_call_info
*ici
= ie
->indirect_info
;
3002 struct ipa_jump_func
*jfunc
;
3004 cgraph_node
*spec_target
= NULL
;
3006 next_ie
= ie
->next_callee
;
3008 if (ici
->param_index
== -1)
3011 /* We must check range due to calls with variable number of arguments: */
3012 if (ici
->param_index
>= ipa_get_cs_argument_count (top
))
3014 ici
->param_index
= -1;
3018 param_index
= ici
->param_index
;
3019 jfunc
= ipa_get_ith_jump_func (top
, param_index
);
3021 if (ie
->speculative
)
3023 struct cgraph_edge
*de
;
3024 struct ipa_ref
*ref
;
3025 ie
->speculative_call_info (de
, ie
, ref
);
3026 spec_target
= de
->callee
;
3029 if (!opt_for_fn (node
->decl
, flag_indirect_inlining
))
3030 new_direct_edge
= NULL
;
3031 else if (ici
->polymorphic
)
3033 ipa_polymorphic_call_context ctx
;
3034 ctx
= ipa_context_from_jfunc (new_root_info
, cs
, param_index
, jfunc
);
3035 new_direct_edge
= try_make_edge_direct_virtual_call (ie
, jfunc
, ctx
);
3038 new_direct_edge
= try_make_edge_direct_simple_call (ie
, jfunc
,
3040 /* If speculation was removed, then we need to do nothing. */
3041 if (new_direct_edge
&& new_direct_edge
!= ie
3042 && new_direct_edge
->callee
== spec_target
)
3044 new_direct_edge
->indirect_inlining_edge
= 1;
3045 top
= IPA_EDGE_REF (cs
);
3047 if (!new_direct_edge
->speculative
)
3050 else if (new_direct_edge
)
3052 new_direct_edge
->indirect_inlining_edge
= 1;
3053 if (new_direct_edge
->call_stmt
)
3054 new_direct_edge
->call_stmt_cannot_inline_p
3055 = !gimple_check_call_matching_types (
3056 new_direct_edge
->call_stmt
,
3057 new_direct_edge
->callee
->decl
, false);
3060 new_edges
->safe_push (new_direct_edge
);
3063 top
= IPA_EDGE_REF (cs
);
3064 /* If speculative edge was introduced we still need to update
3065 call info of the indirect edge. */
3066 if (!new_direct_edge
->speculative
)
3069 if (jfunc
->type
== IPA_JF_PASS_THROUGH
3070 && ipa_get_jf_pass_through_operation (jfunc
) == NOP_EXPR
)
3072 if (ici
->agg_contents
3073 && !ipa_get_jf_pass_through_agg_preserved (jfunc
)
3074 && !ici
->polymorphic
)
3075 ici
->param_index
= -1;
3078 ici
->param_index
= ipa_get_jf_pass_through_formal_id (jfunc
);
3079 if (ici
->polymorphic
3080 && !ipa_get_jf_pass_through_type_preserved (jfunc
))
3081 ici
->vptr_changed
= true;
3084 else if (jfunc
->type
== IPA_JF_ANCESTOR
)
3086 if (ici
->agg_contents
3087 && !ipa_get_jf_ancestor_agg_preserved (jfunc
)
3088 && !ici
->polymorphic
)
3089 ici
->param_index
= -1;
3092 ici
->param_index
= ipa_get_jf_ancestor_formal_id (jfunc
);
3093 ici
->offset
+= ipa_get_jf_ancestor_offset (jfunc
);
3094 if (ici
->polymorphic
3095 && !ipa_get_jf_ancestor_type_preserved (jfunc
))
3096 ici
->vptr_changed
= true;
3100 /* Either we can find a destination for this edge now or never. */
3101 ici
->param_index
= -1;
3107 /* Recursively traverse subtree of NODE (including node) made of inlined
3108 cgraph_edges when CS has been inlined and invoke
3109 update_indirect_edges_after_inlining on all nodes and
3110 update_jump_functions_after_inlining on all non-inlined edges that lead out
3111 of this subtree. Newly discovered indirect edges will be added to
3112 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3116 propagate_info_to_inlined_callees (struct cgraph_edge
*cs
,
3117 struct cgraph_node
*node
,
3118 vec
<cgraph_edge
*> *new_edges
)
3120 struct cgraph_edge
*e
;
3123 res
= update_indirect_edges_after_inlining (cs
, node
, new_edges
);
3125 for (e
= node
->callees
; e
; e
= e
->next_callee
)
3126 if (!e
->inline_failed
)
3127 res
|= propagate_info_to_inlined_callees (cs
, e
->callee
, new_edges
);
3129 update_jump_functions_after_inlining (cs
, e
);
3130 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
3131 update_jump_functions_after_inlining (cs
, e
);
3136 /* Combine two controlled uses counts as done during inlining. */
3139 combine_controlled_uses_counters (int c
, int d
)
3141 if (c
== IPA_UNDESCRIBED_USE
|| d
== IPA_UNDESCRIBED_USE
)
3142 return IPA_UNDESCRIBED_USE
;
3147 /* Propagate number of controlled users from CS->caleee to the new root of the
3148 tree of inlined nodes. */
3151 propagate_controlled_uses (struct cgraph_edge
*cs
)
3153 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
3154 struct cgraph_node
*new_root
= cs
->caller
->global
.inlined_to
3155 ? cs
->caller
->global
.inlined_to
: cs
->caller
;
3156 struct ipa_node_params
*new_root_info
= IPA_NODE_REF (new_root
);
3157 struct ipa_node_params
*old_root_info
= IPA_NODE_REF (cs
->callee
);
3160 count
= MIN (ipa_get_cs_argument_count (args
),
3161 ipa_get_param_count (old_root_info
));
3162 for (i
= 0; i
< count
; i
++)
3164 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
3165 struct ipa_cst_ref_desc
*rdesc
;
3167 if (jf
->type
== IPA_JF_PASS_THROUGH
)
3170 src_idx
= ipa_get_jf_pass_through_formal_id (jf
);
3171 c
= ipa_get_controlled_uses (new_root_info
, src_idx
);
3172 d
= ipa_get_controlled_uses (old_root_info
, i
);
3174 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf
)
3175 == NOP_EXPR
|| c
== IPA_UNDESCRIBED_USE
);
3176 c
= combine_controlled_uses_counters (c
, d
);
3177 ipa_set_controlled_uses (new_root_info
, src_idx
, c
);
3178 if (c
== 0 && new_root_info
->ipcp_orig_node
)
3180 struct cgraph_node
*n
;
3181 struct ipa_ref
*ref
;
3182 tree t
= new_root_info
->known_csts
[src_idx
];
3184 if (t
&& TREE_CODE (t
) == ADDR_EXPR
3185 && TREE_CODE (TREE_OPERAND (t
, 0)) == FUNCTION_DECL
3186 && (n
= cgraph_node::get (TREE_OPERAND (t
, 0)))
3187 && (ref
= new_root
->find_reference (n
, NULL
, 0)))
3190 fprintf (dump_file
, "ipa-prop: Removing cloning-created "
3191 "reference from %s/%i to %s/%i.\n",
3192 xstrdup_for_dump (new_root
->name ()),
3194 xstrdup_for_dump (n
->name ()), n
->order
);
3195 ref
->remove_reference ();
3199 else if (jf
->type
== IPA_JF_CONST
3200 && (rdesc
= jfunc_rdesc_usable (jf
)))
3202 int d
= ipa_get_controlled_uses (old_root_info
, i
);
3203 int c
= rdesc
->refcount
;
3204 rdesc
->refcount
= combine_controlled_uses_counters (c
, d
);
3205 if (rdesc
->refcount
== 0)
3207 tree cst
= ipa_get_jf_constant (jf
);
3208 struct cgraph_node
*n
;
3209 gcc_checking_assert (TREE_CODE (cst
) == ADDR_EXPR
3210 && TREE_CODE (TREE_OPERAND (cst
, 0))
3212 n
= cgraph_node::get (TREE_OPERAND (cst
, 0));
3215 struct cgraph_node
*clone
;
3217 ok
= remove_described_reference (n
, rdesc
);
3218 gcc_checking_assert (ok
);
3221 while (clone
->global
.inlined_to
3222 && clone
!= rdesc
->cs
->caller
3223 && IPA_NODE_REF (clone
)->ipcp_orig_node
)
3225 struct ipa_ref
*ref
;
3226 ref
= clone
->find_reference (n
, NULL
, 0);
3230 fprintf (dump_file
, "ipa-prop: Removing "
3231 "cloning-created reference "
3232 "from %s/%i to %s/%i.\n",
3233 xstrdup_for_dump (clone
->name ()),
3235 xstrdup_for_dump (n
->name ()),
3237 ref
->remove_reference ();
3239 clone
= clone
->callers
->caller
;
3246 for (i
= ipa_get_param_count (old_root_info
);
3247 i
< ipa_get_cs_argument_count (args
);
3250 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
3252 if (jf
->type
== IPA_JF_CONST
)
3254 struct ipa_cst_ref_desc
*rdesc
= jfunc_rdesc_usable (jf
);
3256 rdesc
->refcount
= IPA_UNDESCRIBED_USE
;
3258 else if (jf
->type
== IPA_JF_PASS_THROUGH
)
3259 ipa_set_controlled_uses (new_root_info
,
3260 jf
->value
.pass_through
.formal_id
,
3261 IPA_UNDESCRIBED_USE
);
3265 /* Update jump functions and call note functions on inlining the call site CS.
3266 CS is expected to lead to a node already cloned by
3267 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3268 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3272 ipa_propagate_indirect_call_infos (struct cgraph_edge
*cs
,
3273 vec
<cgraph_edge
*> *new_edges
)
3276 /* Do nothing if the preparation phase has not been carried out yet
3277 (i.e. during early inlining). */
3278 if (!ipa_node_params_sum
)
3280 gcc_assert (ipa_edge_args_vector
);
3282 propagate_controlled_uses (cs
);
3283 changed
= propagate_info_to_inlined_callees (cs
, cs
->callee
, new_edges
);
3288 /* Frees all dynamically allocated structures that the argument info points
3292 ipa_free_edge_args_substructures (struct ipa_edge_args
*args
)
3294 vec_free (args
->jump_functions
);
3295 memset (args
, 0, sizeof (*args
));
3298 /* Free all ipa_edge structures. */
3301 ipa_free_all_edge_args (void)
3304 struct ipa_edge_args
*args
;
3306 if (!ipa_edge_args_vector
)
3309 FOR_EACH_VEC_ELT (*ipa_edge_args_vector
, i
, args
)
3310 ipa_free_edge_args_substructures (args
);
3312 vec_free (ipa_edge_args_vector
);
3315 /* Frees all dynamically allocated structures that the param info points
3318 ipa_node_params::~ipa_node_params ()
3320 descriptors
.release ();
3322 /* Lattice values and their sources are deallocated with their alocation
3324 known_csts
.release ();
3325 known_contexts
.release ();
3328 ipcp_orig_node
= NULL
;
3331 do_clone_for_all_contexts
= 0;
3332 is_all_contexts_clone
= 0;
3336 /* Free all ipa_node_params structures. */
3339 ipa_free_all_node_params (void)
3341 delete ipa_node_params_sum
;
3342 ipa_node_params_sum
= NULL
;
3345 /* Grow ipcp_transformations if necessary. */
3348 ipcp_grow_transformations_if_necessary (void)
3350 if (vec_safe_length (ipcp_transformations
)
3351 <= (unsigned) symtab
->cgraph_max_uid
)
3352 vec_safe_grow_cleared (ipcp_transformations
, symtab
->cgraph_max_uid
+ 1);
3355 /* Set the aggregate replacements of NODE to be AGGVALS. */
3358 ipa_set_node_agg_value_chain (struct cgraph_node
*node
,
3359 struct ipa_agg_replacement_value
*aggvals
)
3361 ipcp_grow_transformations_if_necessary ();
3362 (*ipcp_transformations
)[node
->uid
].agg_values
= aggvals
;
3365 /* Hook that is called by cgraph.c when an edge is removed. */
3368 ipa_edge_removal_hook (struct cgraph_edge
*cs
, void *data ATTRIBUTE_UNUSED
)
3370 struct ipa_edge_args
*args
;
3372 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3373 if (vec_safe_length (ipa_edge_args_vector
) <= (unsigned)cs
->uid
)
3376 args
= IPA_EDGE_REF (cs
);
3377 if (args
->jump_functions
)
3379 struct ipa_jump_func
*jf
;
3381 FOR_EACH_VEC_ELT (*args
->jump_functions
, i
, jf
)
3383 struct ipa_cst_ref_desc
*rdesc
;
3384 try_decrement_rdesc_refcount (jf
);
3385 if (jf
->type
== IPA_JF_CONST
3386 && (rdesc
= ipa_get_jf_constant_rdesc (jf
))
3392 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs
));
3395 /* Hook that is called by cgraph.c when an edge is duplicated. */
3398 ipa_edge_duplication_hook (struct cgraph_edge
*src
, struct cgraph_edge
*dst
,
3401 struct ipa_edge_args
*old_args
, *new_args
;
3404 ipa_check_create_edge_args ();
3406 old_args
= IPA_EDGE_REF (src
);
3407 new_args
= IPA_EDGE_REF (dst
);
3409 new_args
->jump_functions
= vec_safe_copy (old_args
->jump_functions
);
3410 if (old_args
->polymorphic_call_contexts
)
3411 new_args
->polymorphic_call_contexts
3412 = vec_safe_copy (old_args
->polymorphic_call_contexts
);
3414 for (i
= 0; i
< vec_safe_length (old_args
->jump_functions
); i
++)
3416 struct ipa_jump_func
*src_jf
= ipa_get_ith_jump_func (old_args
, i
);
3417 struct ipa_jump_func
*dst_jf
= ipa_get_ith_jump_func (new_args
, i
);
3419 dst_jf
->agg
.items
= vec_safe_copy (dst_jf
->agg
.items
);
3421 if (src_jf
->type
== IPA_JF_CONST
)
3423 struct ipa_cst_ref_desc
*src_rdesc
= jfunc_rdesc_usable (src_jf
);
3426 dst_jf
->value
.constant
.rdesc
= NULL
;
3427 else if (src
->caller
== dst
->caller
)
3429 struct ipa_ref
*ref
;
3430 symtab_node
*n
= cgraph_node_for_jfunc (src_jf
);
3431 gcc_checking_assert (n
);
3432 ref
= src
->caller
->find_reference (n
, src
->call_stmt
,
3434 gcc_checking_assert (ref
);
3435 dst
->caller
->clone_reference (ref
, ref
->stmt
);
3437 struct ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
3438 dst_rdesc
->cs
= dst
;
3439 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3440 dst_rdesc
->next_duplicate
= NULL
;
3441 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3443 else if (src_rdesc
->cs
== src
)
3445 struct ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
3446 dst_rdesc
->cs
= dst
;
3447 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3448 dst_rdesc
->next_duplicate
= src_rdesc
->next_duplicate
;
3449 src_rdesc
->next_duplicate
= dst_rdesc
;
3450 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3454 struct ipa_cst_ref_desc
*dst_rdesc
;
3455 /* This can happen during inlining, when a JFUNC can refer to a
3456 reference taken in a function up in the tree of inline clones.
3457 We need to find the duplicate that refers to our tree of
3460 gcc_assert (dst
->caller
->global
.inlined_to
);
3461 for (dst_rdesc
= src_rdesc
->next_duplicate
;
3463 dst_rdesc
= dst_rdesc
->next_duplicate
)
3465 struct cgraph_node
*top
;
3466 top
= dst_rdesc
->cs
->caller
->global
.inlined_to
3467 ? dst_rdesc
->cs
->caller
->global
.inlined_to
3468 : dst_rdesc
->cs
->caller
;
3469 if (dst
->caller
->global
.inlined_to
== top
)
3472 gcc_assert (dst_rdesc
);
3473 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3476 else if (dst_jf
->type
== IPA_JF_PASS_THROUGH
3477 && src
->caller
== dst
->caller
)
3479 struct cgraph_node
*inline_root
= dst
->caller
->global
.inlined_to
3480 ? dst
->caller
->global
.inlined_to
: dst
->caller
;
3481 struct ipa_node_params
*root_info
= IPA_NODE_REF (inline_root
);
3482 int idx
= ipa_get_jf_pass_through_formal_id (dst_jf
);
3484 int c
= ipa_get_controlled_uses (root_info
, idx
);
3485 if (c
!= IPA_UNDESCRIBED_USE
)
3488 ipa_set_controlled_uses (root_info
, idx
, c
);
3494 /* Analyze newly added function into callgraph. */
3497 ipa_add_new_function (cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
3499 if (node
->has_gimple_body_p ())
3500 ipa_analyze_node (node
);
3503 /* Hook that is called by summary when a node is duplicated. */
3506 ipa_node_params_t::duplicate(cgraph_node
*src
, cgraph_node
*dst
,
3507 ipa_node_params
*old_info
,
3508 ipa_node_params
*new_info
)
3510 ipa_agg_replacement_value
*old_av
, *new_av
;
3512 new_info
->descriptors
= old_info
->descriptors
.copy ();
3513 new_info
->lattices
= NULL
;
3514 new_info
->ipcp_orig_node
= old_info
->ipcp_orig_node
;
3516 new_info
->analysis_done
= old_info
->analysis_done
;
3517 new_info
->node_enqueued
= old_info
->node_enqueued
;
3518 new_info
->versionable
= old_info
->versionable
;
3520 old_av
= ipa_get_agg_replacements_for_node (src
);
3526 struct ipa_agg_replacement_value
*v
;
3528 v
= ggc_alloc
<ipa_agg_replacement_value
> ();
3529 memcpy (v
, old_av
, sizeof (*v
));
3532 old_av
= old_av
->next
;
3534 ipa_set_node_agg_value_chain (dst
, new_av
);
3537 ipcp_transformation_summary
*src_trans
= ipcp_get_transformation_summary (src
);
3539 if (src_trans
&& vec_safe_length (src_trans
->alignments
) > 0)
3541 ipcp_grow_transformations_if_necessary ();
3542 src_trans
= ipcp_get_transformation_summary (src
);
3543 const vec
<ipa_alignment
, va_gc
> *src_alignments
= src_trans
->alignments
;
3544 vec
<ipa_alignment
, va_gc
> *&dst_alignments
3545 = ipcp_get_transformation_summary (dst
)->alignments
;
3546 vec_safe_reserve_exact (dst_alignments
, src_alignments
->length ());
3547 for (unsigned i
= 0; i
< src_alignments
->length (); ++i
)
3548 dst_alignments
->quick_push ((*src_alignments
)[i
]);
3552 /* Register our cgraph hooks if they are not already there. */
3555 ipa_register_cgraph_hooks (void)
3557 ipa_check_create_node_params ();
3559 if (!edge_removal_hook_holder
)
3560 edge_removal_hook_holder
=
3561 symtab
->add_edge_removal_hook (&ipa_edge_removal_hook
, NULL
);
3562 if (!edge_duplication_hook_holder
)
3563 edge_duplication_hook_holder
=
3564 symtab
->add_edge_duplication_hook (&ipa_edge_duplication_hook
, NULL
);
3565 function_insertion_hook_holder
=
3566 symtab
->add_cgraph_insertion_hook (&ipa_add_new_function
, NULL
);
3569 /* Unregister our cgraph hooks if they are not already there. */
3572 ipa_unregister_cgraph_hooks (void)
3574 symtab
->remove_edge_removal_hook (edge_removal_hook_holder
);
3575 edge_removal_hook_holder
= NULL
;
3576 symtab
->remove_edge_duplication_hook (edge_duplication_hook_holder
);
3577 edge_duplication_hook_holder
= NULL
;
3578 symtab
->remove_cgraph_insertion_hook (function_insertion_hook_holder
);
3579 function_insertion_hook_holder
= NULL
;
3582 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3583 longer needed after ipa-cp. */
3586 ipa_free_all_structures_after_ipa_cp (void)
3588 if (!optimize
&& !in_lto_p
)
3590 ipa_free_all_edge_args ();
3591 ipa_free_all_node_params ();
3592 ipcp_sources_pool
.release ();
3593 ipcp_cst_values_pool
.release ();
3594 ipcp_poly_ctx_values_pool
.release ();
3595 ipcp_agg_lattice_pool
.release ();
3596 ipa_unregister_cgraph_hooks ();
3597 ipa_refdesc_pool
.release ();
3601 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3602 longer needed after indirect inlining. */
3605 ipa_free_all_structures_after_iinln (void)
3607 ipa_free_all_edge_args ();
3608 ipa_free_all_node_params ();
3609 ipa_unregister_cgraph_hooks ();
3610 ipcp_sources_pool
.release ();
3611 ipcp_cst_values_pool
.release ();
3612 ipcp_poly_ctx_values_pool
.release ();
3613 ipcp_agg_lattice_pool
.release ();
3614 ipa_refdesc_pool
.release ();
3617 /* Print ipa_tree_map data structures of all functions in the
3621 ipa_print_node_params (FILE *f
, struct cgraph_node
*node
)
3624 struct ipa_node_params
*info
;
3626 if (!node
->definition
)
3628 info
= IPA_NODE_REF (node
);
3629 fprintf (f
, " function %s/%i parameter descriptors:\n",
3630 node
->name (), node
->order
);
3631 count
= ipa_get_param_count (info
);
3632 for (i
= 0; i
< count
; i
++)
3637 ipa_dump_param (f
, info
, i
);
3638 if (ipa_is_param_used (info
, i
))
3639 fprintf (f
, " used");
3640 c
= ipa_get_controlled_uses (info
, i
);
3641 if (c
== IPA_UNDESCRIBED_USE
)
3642 fprintf (f
, " undescribed_use");
3644 fprintf (f
, " controlled_uses=%i", c
);
3649 /* Print ipa_tree_map data structures of all functions in the
3653 ipa_print_all_params (FILE * f
)
3655 struct cgraph_node
*node
;
3657 fprintf (f
, "\nFunction parameters:\n");
3658 FOR_EACH_FUNCTION (node
)
3659 ipa_print_node_params (f
, node
);
3662 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3665 ipa_get_vector_of_formal_parms (tree fndecl
)
3671 gcc_assert (!flag_wpa
);
3672 count
= count_formal_params (fndecl
);
3673 args
.create (count
);
3674 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
3675 args
.quick_push (parm
);
3680 /* Return a heap allocated vector containing types of formal parameters of
3681 function type FNTYPE. */
3684 ipa_get_vector_of_formal_parm_types (tree fntype
)
3690 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
3693 types
.create (count
);
3694 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
3695 types
.quick_push (TREE_VALUE (t
));
3700 /* Modify the function declaration FNDECL and its type according to the plan in
3701 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3702 to reflect the actual parameters being modified which are determined by the
3703 base_index field. */
3706 ipa_modify_formal_parameters (tree fndecl
, ipa_parm_adjustment_vec adjustments
)
3708 vec
<tree
> oparms
= ipa_get_vector_of_formal_parms (fndecl
);
3709 tree orig_type
= TREE_TYPE (fndecl
);
3710 tree old_arg_types
= TYPE_ARG_TYPES (orig_type
);
3712 /* The following test is an ugly hack, some functions simply don't have any
3713 arguments in their type. This is probably a bug but well... */
3714 bool care_for_types
= (old_arg_types
!= NULL_TREE
);
3715 bool last_parm_void
;
3719 last_parm_void
= (TREE_VALUE (tree_last (old_arg_types
))
3721 otypes
= ipa_get_vector_of_formal_parm_types (orig_type
);
3723 gcc_assert (oparms
.length () + 1 == otypes
.length ());
3725 gcc_assert (oparms
.length () == otypes
.length ());
3729 last_parm_void
= false;
3733 int len
= adjustments
.length ();
3734 tree
*link
= &DECL_ARGUMENTS (fndecl
);
3735 tree new_arg_types
= NULL
;
3736 for (int i
= 0; i
< len
; i
++)
3738 struct ipa_parm_adjustment
*adj
;
3741 adj
= &adjustments
[i
];
3743 if (adj
->op
== IPA_PARM_OP_NEW
)
3746 parm
= oparms
[adj
->base_index
];
3749 if (adj
->op
== IPA_PARM_OP_COPY
)
3752 new_arg_types
= tree_cons (NULL_TREE
, otypes
[adj
->base_index
],
3755 link
= &DECL_CHAIN (parm
);
3757 else if (adj
->op
!= IPA_PARM_OP_REMOVE
)
3763 ptype
= build_pointer_type (adj
->type
);
3767 if (is_gimple_reg_type (ptype
))
3769 unsigned malign
= GET_MODE_ALIGNMENT (TYPE_MODE (ptype
));
3770 if (TYPE_ALIGN (ptype
) < malign
)
3771 ptype
= build_aligned_type (ptype
, malign
);
3776 new_arg_types
= tree_cons (NULL_TREE
, ptype
, new_arg_types
);
3778 new_parm
= build_decl (UNKNOWN_LOCATION
, PARM_DECL
, NULL_TREE
,
3780 const char *prefix
= adj
->arg_prefix
? adj
->arg_prefix
: "SYNTH";
3781 DECL_NAME (new_parm
) = create_tmp_var_name (prefix
);
3782 DECL_ARTIFICIAL (new_parm
) = 1;
3783 DECL_ARG_TYPE (new_parm
) = ptype
;
3784 DECL_CONTEXT (new_parm
) = fndecl
;
3785 TREE_USED (new_parm
) = 1;
3786 DECL_IGNORED_P (new_parm
) = 1;
3787 layout_decl (new_parm
, 0);
3789 if (adj
->op
== IPA_PARM_OP_NEW
)
3793 adj
->new_decl
= new_parm
;
3796 link
= &DECL_CHAIN (new_parm
);
3802 tree new_reversed
= NULL
;
3805 new_reversed
= nreverse (new_arg_types
);
3809 TREE_CHAIN (new_arg_types
) = void_list_node
;
3811 new_reversed
= void_list_node
;
3815 /* Use copy_node to preserve as much as possible from original type
3816 (debug info, attribute lists etc.)
3817 Exception is METHOD_TYPEs must have THIS argument.
3818 When we are asked to remove it, we need to build new FUNCTION_TYPE
3820 tree new_type
= NULL
;
3821 if (TREE_CODE (orig_type
) != METHOD_TYPE
3822 || (adjustments
[0].op
== IPA_PARM_OP_COPY
3823 && adjustments
[0].base_index
== 0))
3825 new_type
= build_distinct_type_copy (orig_type
);
3826 TYPE_ARG_TYPES (new_type
) = new_reversed
;
3831 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type
),
3833 TYPE_CONTEXT (new_type
) = TYPE_CONTEXT (orig_type
);
3834 DECL_VINDEX (fndecl
) = NULL_TREE
;
3837 /* When signature changes, we need to clear builtin info. */
3838 if (DECL_BUILT_IN (fndecl
))
3840 DECL_BUILT_IN_CLASS (fndecl
) = NOT_BUILT_IN
;
3841 DECL_FUNCTION_CODE (fndecl
) = (enum built_in_function
) 0;
3844 TREE_TYPE (fndecl
) = new_type
;
3845 DECL_VIRTUAL_P (fndecl
) = 0;
3846 DECL_LANG_SPECIFIC (fndecl
) = NULL
;
3851 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3852 If this is a directly recursive call, CS must be NULL. Otherwise it must
3853 contain the corresponding call graph edge. */
3856 ipa_modify_call_arguments (struct cgraph_edge
*cs
, gcall
*stmt
,
3857 ipa_parm_adjustment_vec adjustments
)
3859 struct cgraph_node
*current_node
= cgraph_node::get (current_function_decl
);
3861 vec
<tree
, va_gc
> **debug_args
= NULL
;
3863 gimple_stmt_iterator gsi
, prev_gsi
;
3867 len
= adjustments
.length ();
3869 callee_decl
= !cs
? gimple_call_fndecl (stmt
) : cs
->callee
->decl
;
3870 current_node
->remove_stmt_references (stmt
);
3872 gsi
= gsi_for_stmt (stmt
);
3874 gsi_prev (&prev_gsi
);
3875 for (i
= 0; i
< len
; i
++)
3877 struct ipa_parm_adjustment
*adj
;
3879 adj
= &adjustments
[i
];
3881 if (adj
->op
== IPA_PARM_OP_COPY
)
3883 tree arg
= gimple_call_arg (stmt
, adj
->base_index
);
3885 vargs
.quick_push (arg
);
3887 else if (adj
->op
!= IPA_PARM_OP_REMOVE
)
3889 tree expr
, base
, off
;
3891 unsigned int deref_align
= 0;
3892 bool deref_base
= false;
3894 /* We create a new parameter out of the value of the old one, we can
3895 do the following kind of transformations:
3897 - A scalar passed by reference is converted to a scalar passed by
3898 value. (adj->by_ref is false and the type of the original
3899 actual argument is a pointer to a scalar).
3901 - A part of an aggregate is passed instead of the whole aggregate.
3902 The part can be passed either by value or by reference, this is
3903 determined by value of adj->by_ref. Moreover, the code below
3904 handles both situations when the original aggregate is passed by
3905 value (its type is not a pointer) and when it is passed by
3906 reference (it is a pointer to an aggregate).
3908 When the new argument is passed by reference (adj->by_ref is true)
3909 it must be a part of an aggregate and therefore we form it by
3910 simply taking the address of a reference inside the original
3913 gcc_checking_assert (adj
->offset
% BITS_PER_UNIT
== 0);
3914 base
= gimple_call_arg (stmt
, adj
->base_index
);
3915 loc
= DECL_P (base
) ? DECL_SOURCE_LOCATION (base
)
3916 : EXPR_LOCATION (base
);
3918 if (TREE_CODE (base
) != ADDR_EXPR
3919 && POINTER_TYPE_P (TREE_TYPE (base
)))
3920 off
= build_int_cst (adj
->alias_ptr_type
,
3921 adj
->offset
/ BITS_PER_UNIT
);
3924 HOST_WIDE_INT base_offset
;
3928 if (TREE_CODE (base
) == ADDR_EXPR
)
3930 base
= TREE_OPERAND (base
, 0);
3936 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
3937 /* Aggregate arguments can have non-invariant addresses. */
3940 base
= build_fold_addr_expr (prev_base
);
3941 off
= build_int_cst (adj
->alias_ptr_type
,
3942 adj
->offset
/ BITS_PER_UNIT
);
3944 else if (TREE_CODE (base
) == MEM_REF
)
3949 deref_align
= TYPE_ALIGN (TREE_TYPE (base
));
3951 off
= build_int_cst (adj
->alias_ptr_type
,
3953 + adj
->offset
/ BITS_PER_UNIT
);
3954 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1),
3956 base
= TREE_OPERAND (base
, 0);
3960 off
= build_int_cst (adj
->alias_ptr_type
,
3962 + adj
->offset
/ BITS_PER_UNIT
);
3963 base
= build_fold_addr_expr (base
);
3969 tree type
= adj
->type
;
3971 unsigned HOST_WIDE_INT misalign
;
3975 align
= deref_align
;
3980 get_pointer_alignment_1 (base
, &align
, &misalign
);
3981 if (TYPE_ALIGN (type
) > align
)
3982 align
= TYPE_ALIGN (type
);
3984 misalign
+= (offset_int::from (off
, SIGNED
).to_short_addr ()
3986 misalign
= misalign
& (align
- 1);
3988 align
= (misalign
& -misalign
);
3989 if (align
< TYPE_ALIGN (type
))
3990 type
= build_aligned_type (type
, align
);
3991 base
= force_gimple_operand_gsi (&gsi
, base
,
3992 true, NULL
, true, GSI_SAME_STMT
);
3993 expr
= fold_build2_loc (loc
, MEM_REF
, type
, base
, off
);
3994 REF_REVERSE_STORAGE_ORDER (expr
) = adj
->reverse
;
3995 /* If expr is not a valid gimple call argument emit
3996 a load into a temporary. */
3997 if (is_gimple_reg_type (TREE_TYPE (expr
)))
3999 gimple
*tem
= gimple_build_assign (NULL_TREE
, expr
);
4000 if (gimple_in_ssa_p (cfun
))
4002 gimple_set_vuse (tem
, gimple_vuse (stmt
));
4003 expr
= make_ssa_name (TREE_TYPE (expr
), tem
);
4006 expr
= create_tmp_reg (TREE_TYPE (expr
));
4007 gimple_assign_set_lhs (tem
, expr
);
4008 gsi_insert_before (&gsi
, tem
, GSI_SAME_STMT
);
4013 expr
= fold_build2_loc (loc
, MEM_REF
, adj
->type
, base
, off
);
4014 REF_REVERSE_STORAGE_ORDER (expr
) = adj
->reverse
;
4015 expr
= build_fold_addr_expr (expr
);
4016 expr
= force_gimple_operand_gsi (&gsi
, expr
,
4017 true, NULL
, true, GSI_SAME_STMT
);
4019 vargs
.quick_push (expr
);
4021 if (adj
->op
!= IPA_PARM_OP_COPY
&& MAY_HAVE_DEBUG_STMTS
)
4024 tree ddecl
= NULL_TREE
, origin
= DECL_ORIGIN (adj
->base
), arg
;
4027 arg
= gimple_call_arg (stmt
, adj
->base_index
);
4028 if (!useless_type_conversion_p (TREE_TYPE (origin
), TREE_TYPE (arg
)))
4030 if (!fold_convertible_p (TREE_TYPE (origin
), arg
))
4032 arg
= fold_convert_loc (gimple_location (stmt
),
4033 TREE_TYPE (origin
), arg
);
4035 if (debug_args
== NULL
)
4036 debug_args
= decl_debug_args_insert (callee_decl
);
4037 for (ix
= 0; vec_safe_iterate (*debug_args
, ix
, &ddecl
); ix
+= 2)
4038 if (ddecl
== origin
)
4040 ddecl
= (**debug_args
)[ix
+ 1];
4045 ddecl
= make_node (DEBUG_EXPR_DECL
);
4046 DECL_ARTIFICIAL (ddecl
) = 1;
4047 TREE_TYPE (ddecl
) = TREE_TYPE (origin
);
4048 DECL_MODE (ddecl
) = DECL_MODE (origin
);
4050 vec_safe_push (*debug_args
, origin
);
4051 vec_safe_push (*debug_args
, ddecl
);
4053 def_temp
= gimple_build_debug_bind (ddecl
, unshare_expr (arg
), stmt
);
4054 gsi_insert_before (&gsi
, def_temp
, GSI_SAME_STMT
);
4058 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4060 fprintf (dump_file
, "replacing stmt:");
4061 print_gimple_stmt (dump_file
, gsi_stmt (gsi
), 0, 0);
4064 new_stmt
= gimple_build_call_vec (callee_decl
, vargs
);
4066 if (gimple_call_lhs (stmt
))
4067 gimple_call_set_lhs (new_stmt
, gimple_call_lhs (stmt
));
4069 gimple_set_block (new_stmt
, gimple_block (stmt
));
4070 if (gimple_has_location (stmt
))
4071 gimple_set_location (new_stmt
, gimple_location (stmt
));
4072 gimple_call_set_chain (new_stmt
, gimple_call_chain (stmt
));
4073 gimple_call_copy_flags (new_stmt
, stmt
);
4074 if (gimple_in_ssa_p (cfun
))
4076 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
4077 if (gimple_vdef (stmt
))
4079 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
4080 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
4084 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4086 fprintf (dump_file
, "with stmt:");
4087 print_gimple_stmt (dump_file
, new_stmt
, 0, 0);
4088 fprintf (dump_file
, "\n");
4090 gsi_replace (&gsi
, new_stmt
, true);
4092 cs
->set_call_stmt (new_stmt
);
4095 current_node
->record_stmt_references (gsi_stmt (gsi
));
4098 while (gsi_stmt (gsi
) != gsi_stmt (prev_gsi
));
4101 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4102 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4103 specifies whether the function should care about type incompatibility the
4104 current and new expressions. If it is false, the function will leave
4105 incompatibility issues to the caller. Return true iff the expression
4109 ipa_modify_expr (tree
*expr
, bool convert
,
4110 ipa_parm_adjustment_vec adjustments
)
4112 struct ipa_parm_adjustment
*cand
4113 = ipa_get_adjustment_candidate (&expr
, &convert
, adjustments
, false);
4120 src
= build_simple_mem_ref (cand
->new_decl
);
4121 REF_REVERSE_STORAGE_ORDER (src
) = cand
->reverse
;
4124 src
= cand
->new_decl
;
4126 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4128 fprintf (dump_file
, "About to replace expr ");
4129 print_generic_expr (dump_file
, *expr
, 0);
4130 fprintf (dump_file
, " with ");
4131 print_generic_expr (dump_file
, src
, 0);
4132 fprintf (dump_file
, "\n");
4135 if (convert
&& !useless_type_conversion_p (TREE_TYPE (*expr
), cand
->type
))
4137 tree vce
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (*expr
), src
);
4145 /* If T is an SSA_NAME, return NULL if it is not a default def or
4146 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4147 the base variable is always returned, regardless if it is a default
4148 def. Return T if it is not an SSA_NAME. */
4151 get_ssa_base_param (tree t
, bool ignore_default_def
)
4153 if (TREE_CODE (t
) == SSA_NAME
)
4155 if (ignore_default_def
|| SSA_NAME_IS_DEFAULT_DEF (t
))
4156 return SSA_NAME_VAR (t
);
4163 /* Given an expression, return an adjustment entry specifying the
4164 transformation to be done on EXPR. If no suitable adjustment entry
4165 was found, returns NULL.
4167 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4168 default def, otherwise bail on them.
4170 If CONVERT is non-NULL, this function will set *CONVERT if the
4171 expression provided is a component reference. ADJUSTMENTS is the
4172 adjustments vector. */
4174 ipa_parm_adjustment
*
4175 ipa_get_adjustment_candidate (tree
**expr
, bool *convert
,
4176 ipa_parm_adjustment_vec adjustments
,
4177 bool ignore_default_def
)
4179 if (TREE_CODE (**expr
) == BIT_FIELD_REF
4180 || TREE_CODE (**expr
) == IMAGPART_EXPR
4181 || TREE_CODE (**expr
) == REALPART_EXPR
)
4183 *expr
= &TREE_OPERAND (**expr
, 0);
4188 HOST_WIDE_INT offset
, size
, max_size
;
4191 = get_ref_base_and_extent (**expr
, &offset
, &size
, &max_size
, &reverse
);
4192 if (!base
|| size
== -1 || max_size
== -1)
4195 if (TREE_CODE (base
) == MEM_REF
)
4197 offset
+= mem_ref_offset (base
).to_short_addr () * BITS_PER_UNIT
;
4198 base
= TREE_OPERAND (base
, 0);
4201 base
= get_ssa_base_param (base
, ignore_default_def
);
4202 if (!base
|| TREE_CODE (base
) != PARM_DECL
)
4205 struct ipa_parm_adjustment
*cand
= NULL
;
4206 unsigned int len
= adjustments
.length ();
4207 for (unsigned i
= 0; i
< len
; i
++)
4209 struct ipa_parm_adjustment
*adj
= &adjustments
[i
];
4211 if (adj
->base
== base
4212 && (adj
->offset
== offset
|| adj
->op
== IPA_PARM_OP_REMOVE
))
4219 if (!cand
|| cand
->op
== IPA_PARM_OP_COPY
|| cand
->op
== IPA_PARM_OP_REMOVE
)
4224 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4227 index_in_adjustments_multiple_times_p (int base_index
,
4228 ipa_parm_adjustment_vec adjustments
)
4230 int i
, len
= adjustments
.length ();
4233 for (i
= 0; i
< len
; i
++)
4235 struct ipa_parm_adjustment
*adj
;
4236 adj
= &adjustments
[i
];
4238 if (adj
->base_index
== base_index
)
4250 /* Return adjustments that should have the same effect on function parameters
4251 and call arguments as if they were first changed according to adjustments in
4252 INNER and then by adjustments in OUTER. */
4254 ipa_parm_adjustment_vec
4255 ipa_combine_adjustments (ipa_parm_adjustment_vec inner
,
4256 ipa_parm_adjustment_vec outer
)
4258 int i
, outlen
= outer
.length ();
4259 int inlen
= inner
.length ();
4261 ipa_parm_adjustment_vec adjustments
, tmp
;
4264 for (i
= 0; i
< inlen
; i
++)
4266 struct ipa_parm_adjustment
*n
;
4269 if (n
->op
== IPA_PARM_OP_REMOVE
)
4273 /* FIXME: Handling of new arguments are not implemented yet. */
4274 gcc_assert (n
->op
!= IPA_PARM_OP_NEW
);
4275 tmp
.quick_push (*n
);
4279 adjustments
.create (outlen
+ removals
);
4280 for (i
= 0; i
< outlen
; i
++)
4282 struct ipa_parm_adjustment r
;
4283 struct ipa_parm_adjustment
*out
= &outer
[i
];
4284 struct ipa_parm_adjustment
*in
= &tmp
[out
->base_index
];
4286 memset (&r
, 0, sizeof (r
));
4287 gcc_assert (in
->op
!= IPA_PARM_OP_REMOVE
);
4288 if (out
->op
== IPA_PARM_OP_REMOVE
)
4290 if (!index_in_adjustments_multiple_times_p (in
->base_index
, tmp
))
4292 r
.op
= IPA_PARM_OP_REMOVE
;
4293 adjustments
.quick_push (r
);
4299 /* FIXME: Handling of new arguments are not implemented yet. */
4300 gcc_assert (out
->op
!= IPA_PARM_OP_NEW
);
4303 r
.base_index
= in
->base_index
;
4306 /* FIXME: Create nonlocal value too. */
4308 if (in
->op
== IPA_PARM_OP_COPY
&& out
->op
== IPA_PARM_OP_COPY
)
4309 r
.op
= IPA_PARM_OP_COPY
;
4310 else if (in
->op
== IPA_PARM_OP_COPY
)
4311 r
.offset
= out
->offset
;
4312 else if (out
->op
== IPA_PARM_OP_COPY
)
4313 r
.offset
= in
->offset
;
4315 r
.offset
= in
->offset
+ out
->offset
;
4316 adjustments
.quick_push (r
);
4319 for (i
= 0; i
< inlen
; i
++)
4321 struct ipa_parm_adjustment
*n
= &inner
[i
];
4323 if (n
->op
== IPA_PARM_OP_REMOVE
)
4324 adjustments
.quick_push (*n
);
4331 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4332 friendly way, assuming they are meant to be applied to FNDECL. */
4335 ipa_dump_param_adjustments (FILE *file
, ipa_parm_adjustment_vec adjustments
,
4338 int i
, len
= adjustments
.length ();
4340 vec
<tree
> parms
= ipa_get_vector_of_formal_parms (fndecl
);
4342 fprintf (file
, "IPA param adjustments: ");
4343 for (i
= 0; i
< len
; i
++)
4345 struct ipa_parm_adjustment
*adj
;
4346 adj
= &adjustments
[i
];
4349 fprintf (file
, " ");
4353 fprintf (file
, "%i. base_index: %i - ", i
, adj
->base_index
);
4354 print_generic_expr (file
, parms
[adj
->base_index
], 0);
4357 fprintf (file
, ", base: ");
4358 print_generic_expr (file
, adj
->base
, 0);
4362 fprintf (file
, ", new_decl: ");
4363 print_generic_expr (file
, adj
->new_decl
, 0);
4365 if (adj
->new_ssa_base
)
4367 fprintf (file
, ", new_ssa_base: ");
4368 print_generic_expr (file
, adj
->new_ssa_base
, 0);
4371 if (adj
->op
== IPA_PARM_OP_COPY
)
4372 fprintf (file
, ", copy_param");
4373 else if (adj
->op
== IPA_PARM_OP_REMOVE
)
4374 fprintf (file
, ", remove_param");
4376 fprintf (file
, ", offset %li", (long) adj
->offset
);
4378 fprintf (file
, ", by_ref");
4379 print_node_brief (file
, ", type: ", adj
->type
, 0);
4380 fprintf (file
, "\n");
4385 /* Dump the AV linked list. */
4388 ipa_dump_agg_replacement_values (FILE *f
, struct ipa_agg_replacement_value
*av
)
4391 fprintf (f
, " Aggregate replacements:");
4392 for (; av
; av
= av
->next
)
4394 fprintf (f
, "%s %i[" HOST_WIDE_INT_PRINT_DEC
"]=", comma
? "," : "",
4395 av
->index
, av
->offset
);
4396 print_generic_expr (f
, av
->value
, 0);
4402 /* Stream out jump function JUMP_FUNC to OB. */
4405 ipa_write_jump_function (struct output_block
*ob
,
4406 struct ipa_jump_func
*jump_func
)
4408 struct ipa_agg_jf_item
*item
;
4409 struct bitpack_d bp
;
4412 streamer_write_uhwi (ob
, jump_func
->type
);
4413 switch (jump_func
->type
)
4415 case IPA_JF_UNKNOWN
:
4419 EXPR_LOCATION (jump_func
->value
.constant
.value
) == UNKNOWN_LOCATION
);
4420 stream_write_tree (ob
, jump_func
->value
.constant
.value
, true);
4422 case IPA_JF_PASS_THROUGH
:
4423 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.operation
);
4424 if (jump_func
->value
.pass_through
.operation
== NOP_EXPR
)
4426 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4427 bp
= bitpack_create (ob
->main_stream
);
4428 bp_pack_value (&bp
, jump_func
->value
.pass_through
.agg_preserved
, 1);
4429 streamer_write_bitpack (&bp
);
4433 stream_write_tree (ob
, jump_func
->value
.pass_through
.operand
, true);
4434 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4437 case IPA_JF_ANCESTOR
:
4438 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.offset
);
4439 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.formal_id
);
4440 bp
= bitpack_create (ob
->main_stream
);
4441 bp_pack_value (&bp
, jump_func
->value
.ancestor
.agg_preserved
, 1);
4442 streamer_write_bitpack (&bp
);
4446 count
= vec_safe_length (jump_func
->agg
.items
);
4447 streamer_write_uhwi (ob
, count
);
4450 bp
= bitpack_create (ob
->main_stream
);
4451 bp_pack_value (&bp
, jump_func
->agg
.by_ref
, 1);
4452 streamer_write_bitpack (&bp
);
4455 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, i
, item
)
4457 streamer_write_uhwi (ob
, item
->offset
);
4458 stream_write_tree (ob
, item
->value
, true);
4461 bp
= bitpack_create (ob
->main_stream
);
4462 bp_pack_value (&bp
, jump_func
->alignment
.known
, 1);
4463 streamer_write_bitpack (&bp
);
4464 if (jump_func
->alignment
.known
)
4466 streamer_write_uhwi (ob
, jump_func
->alignment
.align
);
4467 streamer_write_uhwi (ob
, jump_func
->alignment
.misalign
);
4471 /* Read in jump function JUMP_FUNC from IB. */
4474 ipa_read_jump_function (struct lto_input_block
*ib
,
4475 struct ipa_jump_func
*jump_func
,
4476 struct cgraph_edge
*cs
,
4477 struct data_in
*data_in
)
4479 enum jump_func_type jftype
;
4480 enum tree_code operation
;
4483 jftype
= (enum jump_func_type
) streamer_read_uhwi (ib
);
4486 case IPA_JF_UNKNOWN
:
4487 ipa_set_jf_unknown (jump_func
);
4490 ipa_set_jf_constant (jump_func
, stream_read_tree (ib
, data_in
), cs
);
4492 case IPA_JF_PASS_THROUGH
:
4493 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
4494 if (operation
== NOP_EXPR
)
4496 int formal_id
= streamer_read_uhwi (ib
);
4497 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4498 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4499 ipa_set_jf_simple_pass_through (jump_func
, formal_id
, agg_preserved
);
4503 tree operand
= stream_read_tree (ib
, data_in
);
4504 int formal_id
= streamer_read_uhwi (ib
);
4505 ipa_set_jf_arith_pass_through (jump_func
, formal_id
, operand
,
4509 case IPA_JF_ANCESTOR
:
4511 HOST_WIDE_INT offset
= streamer_read_uhwi (ib
);
4512 int formal_id
= streamer_read_uhwi (ib
);
4513 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4514 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4515 ipa_set_ancestor_jf (jump_func
, offset
, formal_id
, agg_preserved
);
4520 count
= streamer_read_uhwi (ib
);
4521 vec_alloc (jump_func
->agg
.items
, count
);
4524 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4525 jump_func
->agg
.by_ref
= bp_unpack_value (&bp
, 1);
4527 for (i
= 0; i
< count
; i
++)
4529 struct ipa_agg_jf_item item
;
4530 item
.offset
= streamer_read_uhwi (ib
);
4531 item
.value
= stream_read_tree (ib
, data_in
);
4532 jump_func
->agg
.items
->quick_push (item
);
4535 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4536 bool alignment_known
= bp_unpack_value (&bp
, 1);
4537 if (alignment_known
)
4539 jump_func
->alignment
.known
= true;
4540 jump_func
->alignment
.align
= streamer_read_uhwi (ib
);
4541 jump_func
->alignment
.misalign
= streamer_read_uhwi (ib
);
4544 jump_func
->alignment
.known
= false;
4547 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4548 relevant to indirect inlining to OB. */
4551 ipa_write_indirect_edge_info (struct output_block
*ob
,
4552 struct cgraph_edge
*cs
)
4554 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4555 struct bitpack_d bp
;
4557 streamer_write_hwi (ob
, ii
->param_index
);
4558 bp
= bitpack_create (ob
->main_stream
);
4559 bp_pack_value (&bp
, ii
->polymorphic
, 1);
4560 bp_pack_value (&bp
, ii
->agg_contents
, 1);
4561 bp_pack_value (&bp
, ii
->member_ptr
, 1);
4562 bp_pack_value (&bp
, ii
->by_ref
, 1);
4563 bp_pack_value (&bp
, ii
->vptr_changed
, 1);
4564 streamer_write_bitpack (&bp
);
4565 if (ii
->agg_contents
|| ii
->polymorphic
)
4566 streamer_write_hwi (ob
, ii
->offset
);
4568 gcc_assert (ii
->offset
== 0);
4570 if (ii
->polymorphic
)
4572 streamer_write_hwi (ob
, ii
->otr_token
);
4573 stream_write_tree (ob
, ii
->otr_type
, true);
4574 ii
->context
.stream_out (ob
);
4578 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4579 relevant to indirect inlining from IB. */
4582 ipa_read_indirect_edge_info (struct lto_input_block
*ib
,
4583 struct data_in
*data_in
,
4584 struct cgraph_edge
*cs
)
4586 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4587 struct bitpack_d bp
;
4589 ii
->param_index
= (int) streamer_read_hwi (ib
);
4590 bp
= streamer_read_bitpack (ib
);
4591 ii
->polymorphic
= bp_unpack_value (&bp
, 1);
4592 ii
->agg_contents
= bp_unpack_value (&bp
, 1);
4593 ii
->member_ptr
= bp_unpack_value (&bp
, 1);
4594 ii
->by_ref
= bp_unpack_value (&bp
, 1);
4595 ii
->vptr_changed
= bp_unpack_value (&bp
, 1);
4596 if (ii
->agg_contents
|| ii
->polymorphic
)
4597 ii
->offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4600 if (ii
->polymorphic
)
4602 ii
->otr_token
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4603 ii
->otr_type
= stream_read_tree (ib
, data_in
);
4604 ii
->context
.stream_in (ib
, data_in
);
4608 /* Stream out NODE info to OB. */
4611 ipa_write_node_info (struct output_block
*ob
, struct cgraph_node
*node
)
4614 lto_symtab_encoder_t encoder
;
4615 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
4617 struct cgraph_edge
*e
;
4618 struct bitpack_d bp
;
4620 encoder
= ob
->decl_state
->symtab_node_encoder
;
4621 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4622 streamer_write_uhwi (ob
, node_ref
);
4624 streamer_write_uhwi (ob
, ipa_get_param_count (info
));
4625 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4626 streamer_write_uhwi (ob
, ipa_get_param_move_cost (info
, j
));
4627 bp
= bitpack_create (ob
->main_stream
);
4628 gcc_assert (info
->analysis_done
4629 || ipa_get_param_count (info
) == 0);
4630 gcc_assert (!info
->node_enqueued
);
4631 gcc_assert (!info
->ipcp_orig_node
);
4632 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4633 bp_pack_value (&bp
, ipa_is_param_used (info
, j
), 1);
4634 streamer_write_bitpack (&bp
);
4635 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4636 streamer_write_hwi (ob
, ipa_get_controlled_uses (info
, j
));
4637 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4639 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4641 streamer_write_uhwi (ob
,
4642 ipa_get_cs_argument_count (args
) * 2
4643 + (args
->polymorphic_call_contexts
!= NULL
));
4644 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4646 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4647 if (args
->polymorphic_call_contexts
!= NULL
)
4648 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
4651 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4653 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4655 streamer_write_uhwi (ob
,
4656 ipa_get_cs_argument_count (args
) * 2
4657 + (args
->polymorphic_call_contexts
!= NULL
));
4658 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4660 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4661 if (args
->polymorphic_call_contexts
!= NULL
)
4662 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
4664 ipa_write_indirect_edge_info (ob
, e
);
4668 /* Stream in NODE info from IB. */
4671 ipa_read_node_info (struct lto_input_block
*ib
, struct cgraph_node
*node
,
4672 struct data_in
*data_in
)
4674 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
4676 struct cgraph_edge
*e
;
4677 struct bitpack_d bp
;
4679 ipa_alloc_node_params (node
, streamer_read_uhwi (ib
));
4681 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4682 info
->descriptors
[k
].move_cost
= streamer_read_uhwi (ib
);
4684 bp
= streamer_read_bitpack (ib
);
4685 if (ipa_get_param_count (info
) != 0)
4686 info
->analysis_done
= true;
4687 info
->node_enqueued
= false;
4688 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4689 ipa_set_param_used (info
, k
, bp_unpack_value (&bp
, 1));
4690 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4691 ipa_set_controlled_uses (info
, k
, streamer_read_hwi (ib
));
4692 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4694 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4695 int count
= streamer_read_uhwi (ib
);
4696 bool contexts_computed
= count
& 1;
4701 vec_safe_grow_cleared (args
->jump_functions
, count
);
4702 if (contexts_computed
)
4703 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
);
4705 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
4707 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4709 if (contexts_computed
)
4710 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in (ib
, data_in
);
4713 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4715 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4716 int count
= streamer_read_uhwi (ib
);
4717 bool contexts_computed
= count
& 1;
4722 vec_safe_grow_cleared (args
->jump_functions
, count
);
4723 if (contexts_computed
)
4724 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
);
4725 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
4727 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4729 if (contexts_computed
)
4730 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in (ib
, data_in
);
4733 ipa_read_indirect_edge_info (ib
, data_in
, e
);
4737 /* Write jump functions for nodes in SET. */
4740 ipa_prop_write_jump_functions (void)
4742 struct cgraph_node
*node
;
4743 struct output_block
*ob
;
4744 unsigned int count
= 0;
4745 lto_symtab_encoder_iterator lsei
;
4746 lto_symtab_encoder_t encoder
;
4748 if (!ipa_node_params_sum
)
4751 ob
= create_output_block (LTO_section_jump_functions
);
4752 encoder
= ob
->decl_state
->symtab_node_encoder
;
4754 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4755 lsei_next_function_in_partition (&lsei
))
4757 node
= lsei_cgraph_node (lsei
);
4758 if (node
->has_gimple_body_p ()
4759 && IPA_NODE_REF (node
) != NULL
)
4763 streamer_write_uhwi (ob
, count
);
4765 /* Process all of the functions. */
4766 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4767 lsei_next_function_in_partition (&lsei
))
4769 node
= lsei_cgraph_node (lsei
);
4770 if (node
->has_gimple_body_p ()
4771 && IPA_NODE_REF (node
) != NULL
)
4772 ipa_write_node_info (ob
, node
);
4774 streamer_write_char_stream (ob
->main_stream
, 0);
4775 produce_asm (ob
, NULL
);
4776 destroy_output_block (ob
);
4779 /* Read section in file FILE_DATA of length LEN with data DATA. */
4782 ipa_prop_read_section (struct lto_file_decl_data
*file_data
, const char *data
,
4785 const struct lto_function_header
*header
=
4786 (const struct lto_function_header
*) data
;
4787 const int cfg_offset
= sizeof (struct lto_function_header
);
4788 const int main_offset
= cfg_offset
+ header
->cfg_size
;
4789 const int string_offset
= main_offset
+ header
->main_size
;
4790 struct data_in
*data_in
;
4794 lto_input_block
ib_main ((const char *) data
+ main_offset
,
4795 header
->main_size
, file_data
->mode_table
);
4798 lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
4799 header
->string_size
, vNULL
);
4800 count
= streamer_read_uhwi (&ib_main
);
4802 for (i
= 0; i
< count
; i
++)
4805 struct cgraph_node
*node
;
4806 lto_symtab_encoder_t encoder
;
4808 index
= streamer_read_uhwi (&ib_main
);
4809 encoder
= file_data
->symtab_node_encoder
;
4810 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
4812 gcc_assert (node
->definition
);
4813 ipa_read_node_info (&ib_main
, node
, data_in
);
4815 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
4817 lto_data_in_delete (data_in
);
4820 /* Read ipcp jump functions. */
4823 ipa_prop_read_jump_functions (void)
4825 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
4826 struct lto_file_decl_data
*file_data
;
4829 ipa_check_create_node_params ();
4830 ipa_check_create_edge_args ();
4831 ipa_register_cgraph_hooks ();
4833 while ((file_data
= file_data_vec
[j
++]))
4836 const char *data
= lto_get_section_data (file_data
, LTO_section_jump_functions
, NULL
, &len
);
4839 ipa_prop_read_section (file_data
, data
, len
);
4843 /* After merging units, we can get mismatch in argument counts.
4844 Also decl merging might've rendered parameter lists obsolete.
4845 Also compute called_with_variable_arg info. */
4848 ipa_update_after_lto_read (void)
4850 ipa_check_create_node_params ();
4851 ipa_check_create_edge_args ();
4855 write_ipcp_transformation_info (output_block
*ob
, cgraph_node
*node
)
4858 unsigned int count
= 0;
4859 lto_symtab_encoder_t encoder
;
4860 struct ipa_agg_replacement_value
*aggvals
, *av
;
4862 aggvals
= ipa_get_agg_replacements_for_node (node
);
4863 encoder
= ob
->decl_state
->symtab_node_encoder
;
4864 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4865 streamer_write_uhwi (ob
, node_ref
);
4867 for (av
= aggvals
; av
; av
= av
->next
)
4869 streamer_write_uhwi (ob
, count
);
4871 for (av
= aggvals
; av
; av
= av
->next
)
4873 struct bitpack_d bp
;
4875 streamer_write_uhwi (ob
, av
->offset
);
4876 streamer_write_uhwi (ob
, av
->index
);
4877 stream_write_tree (ob
, av
->value
, true);
4879 bp
= bitpack_create (ob
->main_stream
);
4880 bp_pack_value (&bp
, av
->by_ref
, 1);
4881 streamer_write_bitpack (&bp
);
4884 ipcp_transformation_summary
*ts
= ipcp_get_transformation_summary (node
);
4885 if (ts
&& vec_safe_length (ts
->alignments
) > 0)
4887 count
= ts
->alignments
->length ();
4889 streamer_write_uhwi (ob
, count
);
4890 for (unsigned i
= 0; i
< count
; ++i
)
4892 ipa_alignment
*parm_al
= &(*ts
->alignments
)[i
];
4894 struct bitpack_d bp
;
4895 bp
= bitpack_create (ob
->main_stream
);
4896 bp_pack_value (&bp
, parm_al
->known
, 1);
4897 streamer_write_bitpack (&bp
);
4900 streamer_write_uhwi (ob
, parm_al
->align
);
4901 streamer_write_hwi_in_range (ob
->main_stream
, 0, parm_al
->align
,
4907 streamer_write_uhwi (ob
, 0);
4910 /* Stream in the aggregate value replacement chain for NODE from IB. */
4913 read_ipcp_transformation_info (lto_input_block
*ib
, cgraph_node
*node
,
4916 struct ipa_agg_replacement_value
*aggvals
= NULL
;
4917 unsigned int count
, i
;
4919 count
= streamer_read_uhwi (ib
);
4920 for (i
= 0; i
<count
; i
++)
4922 struct ipa_agg_replacement_value
*av
;
4923 struct bitpack_d bp
;
4925 av
= ggc_alloc
<ipa_agg_replacement_value
> ();
4926 av
->offset
= streamer_read_uhwi (ib
);
4927 av
->index
= streamer_read_uhwi (ib
);
4928 av
->value
= stream_read_tree (ib
, data_in
);
4929 bp
= streamer_read_bitpack (ib
);
4930 av
->by_ref
= bp_unpack_value (&bp
, 1);
4934 ipa_set_node_agg_value_chain (node
, aggvals
);
4936 count
= streamer_read_uhwi (ib
);
4939 ipcp_grow_transformations_if_necessary ();
4941 ipcp_transformation_summary
*ts
= ipcp_get_transformation_summary (node
);
4942 vec_safe_grow_cleared (ts
->alignments
, count
);
4944 for (i
= 0; i
< count
; i
++)
4946 ipa_alignment
*parm_al
;
4947 parm_al
= &(*ts
->alignments
)[i
];
4948 struct bitpack_d bp
;
4949 bp
= streamer_read_bitpack (ib
);
4950 parm_al
->known
= bp_unpack_value (&bp
, 1);
4953 parm_al
->align
= streamer_read_uhwi (ib
);
4955 = streamer_read_hwi_in_range (ib
, "ipa-prop misalign",
4962 /* Write all aggregate replacement for nodes in set. */
4965 ipcp_write_transformation_summaries (void)
4967 struct cgraph_node
*node
;
4968 struct output_block
*ob
;
4969 unsigned int count
= 0;
4970 lto_symtab_encoder_iterator lsei
;
4971 lto_symtab_encoder_t encoder
;
4973 ob
= create_output_block (LTO_section_ipcp_transform
);
4974 encoder
= ob
->decl_state
->symtab_node_encoder
;
4976 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4977 lsei_next_function_in_partition (&lsei
))
4979 node
= lsei_cgraph_node (lsei
);
4980 if (node
->has_gimple_body_p ())
4984 streamer_write_uhwi (ob
, count
);
4986 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4987 lsei_next_function_in_partition (&lsei
))
4989 node
= lsei_cgraph_node (lsei
);
4990 if (node
->has_gimple_body_p ())
4991 write_ipcp_transformation_info (ob
, node
);
4993 streamer_write_char_stream (ob
->main_stream
, 0);
4994 produce_asm (ob
, NULL
);
4995 destroy_output_block (ob
);
4998 /* Read replacements section in file FILE_DATA of length LEN with data
5002 read_replacements_section (struct lto_file_decl_data
*file_data
,
5006 const struct lto_function_header
*header
=
5007 (const struct lto_function_header
*) data
;
5008 const int cfg_offset
= sizeof (struct lto_function_header
);
5009 const int main_offset
= cfg_offset
+ header
->cfg_size
;
5010 const int string_offset
= main_offset
+ header
->main_size
;
5011 struct data_in
*data_in
;
5015 lto_input_block
ib_main ((const char *) data
+ main_offset
,
5016 header
->main_size
, file_data
->mode_table
);
5018 data_in
= lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
5019 header
->string_size
, vNULL
);
5020 count
= streamer_read_uhwi (&ib_main
);
5022 for (i
= 0; i
< count
; i
++)
5025 struct cgraph_node
*node
;
5026 lto_symtab_encoder_t encoder
;
5028 index
= streamer_read_uhwi (&ib_main
);
5029 encoder
= file_data
->symtab_node_encoder
;
5030 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
5032 gcc_assert (node
->definition
);
5033 read_ipcp_transformation_info (&ib_main
, node
, data_in
);
5035 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
5037 lto_data_in_delete (data_in
);
5040 /* Read IPA-CP aggregate replacements. */
5043 ipcp_read_transformation_summaries (void)
5045 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
5046 struct lto_file_decl_data
*file_data
;
5049 while ((file_data
= file_data_vec
[j
++]))
5052 const char *data
= lto_get_section_data (file_data
,
5053 LTO_section_ipcp_transform
,
5056 read_replacements_section (file_data
, data
, len
);
5060 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5064 adjust_agg_replacement_values (struct cgraph_node
*node
,
5065 struct ipa_agg_replacement_value
*aggval
)
5067 struct ipa_agg_replacement_value
*v
;
5068 int i
, c
= 0, d
= 0, *adj
;
5070 if (!node
->clone
.combined_args_to_skip
)
5073 for (v
= aggval
; v
; v
= v
->next
)
5075 gcc_assert (v
->index
>= 0);
5081 adj
= XALLOCAVEC (int, c
);
5082 for (i
= 0; i
< c
; i
++)
5083 if (bitmap_bit_p (node
->clone
.combined_args_to_skip
, i
))
5091 for (v
= aggval
; v
; v
= v
->next
)
5092 v
->index
= adj
[v
->index
];
5095 /* Dominator walker driving the ipcp modification phase. */
5097 class ipcp_modif_dom_walker
: public dom_walker
5100 ipcp_modif_dom_walker (struct ipa_func_body_info
*fbi
,
5101 vec
<ipa_param_descriptor
> descs
,
5102 struct ipa_agg_replacement_value
*av
,
5104 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
), m_descriptors (descs
),
5105 m_aggval (av
), m_something_changed (sc
), m_cfg_changed (cc
) {}
5107 virtual edge
before_dom_children (basic_block
);
5110 struct ipa_func_body_info
*m_fbi
;
5111 vec
<ipa_param_descriptor
> m_descriptors
;
5112 struct ipa_agg_replacement_value
*m_aggval
;
5113 bool *m_something_changed
, *m_cfg_changed
;
5117 ipcp_modif_dom_walker::before_dom_children (basic_block bb
)
5119 gimple_stmt_iterator gsi
;
5120 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5122 struct ipa_agg_replacement_value
*v
;
5123 gimple
*stmt
= gsi_stmt (gsi
);
5125 HOST_WIDE_INT offset
, size
;
5129 if (!gimple_assign_load_p (stmt
))
5131 rhs
= gimple_assign_rhs1 (stmt
);
5132 if (!is_gimple_reg_type (TREE_TYPE (rhs
)))
5137 while (handled_component_p (t
))
5139 /* V_C_E can do things like convert an array of integers to one
5140 bigger integer and similar things we do not handle below. */
5141 if (TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
)
5146 t
= TREE_OPERAND (t
, 0);
5151 if (!ipa_load_from_parm_agg (m_fbi
, m_descriptors
, stmt
, rhs
, &index
,
5152 &offset
, &size
, &by_ref
))
5154 for (v
= m_aggval
; v
; v
= v
->next
)
5155 if (v
->index
== index
5156 && v
->offset
== offset
)
5159 || v
->by_ref
!= by_ref
5160 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v
->value
))) != size
)
5163 gcc_checking_assert (is_gimple_ip_invariant (v
->value
));
5164 if (!useless_type_conversion_p (TREE_TYPE (rhs
), TREE_TYPE (v
->value
)))
5166 if (fold_convertible_p (TREE_TYPE (rhs
), v
->value
))
5167 val
= fold_build1 (NOP_EXPR
, TREE_TYPE (rhs
), v
->value
);
5168 else if (TYPE_SIZE (TREE_TYPE (rhs
))
5169 == TYPE_SIZE (TREE_TYPE (v
->value
)))
5170 val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (rhs
), v
->value
);
5175 fprintf (dump_file
, " const ");
5176 print_generic_expr (dump_file
, v
->value
, 0);
5177 fprintf (dump_file
, " can't be converted to type of ");
5178 print_generic_expr (dump_file
, rhs
, 0);
5179 fprintf (dump_file
, "\n");
5187 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5189 fprintf (dump_file
, "Modifying stmt:\n ");
5190 print_gimple_stmt (dump_file
, stmt
, 0, 0);
5192 gimple_assign_set_rhs_from_tree (&gsi
, val
);
5195 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5197 fprintf (dump_file
, "into:\n ");
5198 print_gimple_stmt (dump_file
, stmt
, 0, 0);
5199 fprintf (dump_file
, "\n");
5202 *m_something_changed
= true;
5203 if (maybe_clean_eh_stmt (stmt
)
5204 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
5205 *m_cfg_changed
= true;
5210 /* Update alignment of formal parameters as described in
5211 ipcp_transformation_summary. */
5214 ipcp_update_alignments (struct cgraph_node
*node
)
5216 tree fndecl
= node
->decl
;
5217 tree parm
= DECL_ARGUMENTS (fndecl
);
5218 tree next_parm
= parm
;
5219 ipcp_transformation_summary
*ts
= ipcp_get_transformation_summary (node
);
5220 if (!ts
|| vec_safe_length (ts
->alignments
) == 0)
5222 const vec
<ipa_alignment
, va_gc
> &alignments
= *ts
->alignments
;
5223 unsigned count
= alignments
.length ();
5225 for (unsigned i
= 0; i
< count
; ++i
, parm
= next_parm
)
5227 if (node
->clone
.combined_args_to_skip
5228 && bitmap_bit_p (node
->clone
.combined_args_to_skip
, i
))
5230 gcc_checking_assert (parm
);
5231 next_parm
= DECL_CHAIN (parm
);
5233 if (!alignments
[i
].known
|| !is_gimple_reg (parm
))
5235 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
), parm
);
5240 fprintf (dump_file
, " Adjusting alignment of param %u to %u, "
5241 "misalignment to %u\n", i
, alignments
[i
].align
,
5242 alignments
[i
].misalign
);
5244 struct ptr_info_def
*pi
= get_ptr_info (ddef
);
5245 gcc_checking_assert (pi
);
5247 unsigned old_misalign
;
5248 bool old_known
= get_ptr_info_alignment (pi
, &old_align
, &old_misalign
);
5251 && old_align
>= alignments
[i
].align
)
5254 fprintf (dump_file
, " But the alignment was already %u.\n",
5258 set_ptr_info_alignment (pi
, alignments
[i
].align
, alignments
[i
].misalign
);
5262 /* IPCP transformation phase doing propagation of aggregate values. */
5265 ipcp_transform_function (struct cgraph_node
*node
)
5267 vec
<ipa_param_descriptor
> descriptors
= vNULL
;
5268 struct ipa_func_body_info fbi
;
5269 struct ipa_agg_replacement_value
*aggval
;
5271 bool cfg_changed
= false, something_changed
= false;
5273 gcc_checking_assert (cfun
);
5274 gcc_checking_assert (current_function_decl
);
5277 fprintf (dump_file
, "Modification phase of node %s/%i\n",
5278 node
->name (), node
->order
);
5280 ipcp_update_alignments (node
);
5281 aggval
= ipa_get_agg_replacements_for_node (node
);
5284 param_count
= count_formal_params (node
->decl
);
5285 if (param_count
== 0)
5287 adjust_agg_replacement_values (node
, aggval
);
5289 ipa_dump_agg_replacement_values (dump_file
, aggval
);
5293 fbi
.bb_infos
= vNULL
;
5294 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
));
5295 fbi
.param_count
= param_count
;
5298 descriptors
.safe_grow_cleared (param_count
);
5299 ipa_populate_param_decls (node
, descriptors
);
5300 calculate_dominance_info (CDI_DOMINATORS
);
5301 ipcp_modif_dom_walker (&fbi
, descriptors
, aggval
, &something_changed
,
5302 &cfg_changed
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5305 struct ipa_bb_info
*bi
;
5306 FOR_EACH_VEC_ELT (fbi
.bb_infos
, i
, bi
)
5307 free_ipa_bb_info (bi
);
5308 fbi
.bb_infos
.release ();
5309 free_dominance_info (CDI_DOMINATORS
);
5310 (*ipcp_transformations
)[node
->uid
].agg_values
= NULL
;
5311 (*ipcp_transformations
)[node
->uid
].alignments
= NULL
;
5312 descriptors
.release ();
5314 if (!something_changed
)
5316 else if (cfg_changed
)
5317 return TODO_update_ssa_only_virtuals
| TODO_cleanup_cfg
;
5319 return TODO_update_ssa_only_virtuals
;