1 /* Interprocedural analyses.
2 Copyright (C) 2005-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
30 #include "tree-streamer.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
47 #include "tree-inline.h"
48 #include "ipa-inline.h"
49 #include "gimple-pretty-print.h"
51 #include "ipa-utils.h"
56 /* Function summary where the parameter infos are actually stored. */
57 ipa_node_params_t
*ipa_node_params_sum
= NULL
;
58 /* Vector of IPA-CP transformation data for each clone. */
59 vec
<ipcp_transformation_summary
, va_gc
> *ipcp_transformations
;
60 /* Vector where the parameter infos are actually stored. */
61 vec
<ipa_edge_args
, va_gc
> *ipa_edge_args_vector
;
63 /* Holders of ipa cgraph hooks: */
64 static struct cgraph_edge_hook_list
*edge_removal_hook_holder
;
65 static struct cgraph_2edge_hook_list
*edge_duplication_hook_holder
;
66 static struct cgraph_node_hook_list
*function_insertion_hook_holder
;
68 /* Description of a reference to an IPA constant. */
69 struct ipa_cst_ref_desc
71 /* Edge that corresponds to the statement which took the reference. */
72 struct cgraph_edge
*cs
;
73 /* Linked list of duplicates created when call graph edges are cloned. */
74 struct ipa_cst_ref_desc
*next_duplicate
;
75 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
80 /* Allocation pool for reference descriptions. */
82 static object_allocator
<ipa_cst_ref_desc
> ipa_refdesc_pool
83 ("IPA-PROP ref descriptions");
85 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
86 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
89 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node
*node
)
91 tree fs_opts
= DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node
->decl
);
95 return !opt_for_fn (node
->decl
, optimize
) || !opt_for_fn (node
->decl
, flag_ipa_cp
);
98 /* Return index of the formal whose tree is PTREE in function which corresponds
102 ipa_get_param_decl_index_1 (vec
<ipa_param_descriptor
> descriptors
, tree ptree
)
106 count
= descriptors
.length ();
107 for (i
= 0; i
< count
; i
++)
108 if (descriptors
[i
].decl
== ptree
)
114 /* Return index of the formal whose tree is PTREE in function which corresponds
118 ipa_get_param_decl_index (struct ipa_node_params
*info
, tree ptree
)
120 return ipa_get_param_decl_index_1 (info
->descriptors
, ptree
);
123 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
127 ipa_populate_param_decls (struct cgraph_node
*node
,
128 vec
<ipa_param_descriptor
> &descriptors
)
136 gcc_assert (gimple_has_body_p (fndecl
));
137 fnargs
= DECL_ARGUMENTS (fndecl
);
139 for (parm
= fnargs
; parm
; parm
= DECL_CHAIN (parm
))
141 descriptors
[param_num
].decl
= parm
;
142 descriptors
[param_num
].move_cost
= estimate_move_cost (TREE_TYPE (parm
),
148 /* Return how many formal parameters FNDECL has. */
151 count_formal_params (tree fndecl
)
155 gcc_assert (gimple_has_body_p (fndecl
));
157 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
163 /* Return the declaration of Ith formal parameter of the function corresponding
164 to INFO. Note there is no setter function as this array is built just once
165 using ipa_initialize_node_params. */
168 ipa_dump_param (FILE *file
, struct ipa_node_params
*info
, int i
)
170 fprintf (file
, "param #%i", i
);
171 if (info
->descriptors
[i
].decl
)
174 print_generic_expr (file
, info
->descriptors
[i
].decl
, 0);
178 /* Initialize the ipa_node_params structure associated with NODE
179 to hold PARAM_COUNT parameters. */
182 ipa_alloc_node_params (struct cgraph_node
*node
, int param_count
)
184 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
186 if (!info
->descriptors
.exists () && param_count
)
187 info
->descriptors
.safe_grow_cleared (param_count
);
190 /* Initialize the ipa_node_params structure associated with NODE by counting
191 the function parameters, creating the descriptors and populating their
195 ipa_initialize_node_params (struct cgraph_node
*node
)
197 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
199 if (!info
->descriptors
.exists ())
201 ipa_alloc_node_params (node
, count_formal_params (node
->decl
));
202 ipa_populate_param_decls (node
, info
->descriptors
);
206 /* Print the jump functions associated with call graph edge CS to file F. */
209 ipa_print_node_jump_functions_for_edge (FILE *f
, struct cgraph_edge
*cs
)
213 count
= ipa_get_cs_argument_count (IPA_EDGE_REF (cs
));
214 for (i
= 0; i
< count
; i
++)
216 struct ipa_jump_func
*jump_func
;
217 enum jump_func_type type
;
219 jump_func
= ipa_get_ith_jump_func (IPA_EDGE_REF (cs
), i
);
220 type
= jump_func
->type
;
222 fprintf (f
, " param %d: ", i
);
223 if (type
== IPA_JF_UNKNOWN
)
224 fprintf (f
, "UNKNOWN\n");
225 else if (type
== IPA_JF_CONST
)
227 tree val
= jump_func
->value
.constant
.value
;
228 fprintf (f
, "CONST: ");
229 print_generic_expr (f
, val
, 0);
230 if (TREE_CODE (val
) == ADDR_EXPR
231 && TREE_CODE (TREE_OPERAND (val
, 0)) == CONST_DECL
)
234 print_generic_expr (f
, DECL_INITIAL (TREE_OPERAND (val
, 0)),
239 else if (type
== IPA_JF_PASS_THROUGH
)
241 fprintf (f
, "PASS THROUGH: ");
242 fprintf (f
, "%d, op %s",
243 jump_func
->value
.pass_through
.formal_id
,
244 get_tree_code_name(jump_func
->value
.pass_through
.operation
));
245 if (jump_func
->value
.pass_through
.operation
!= NOP_EXPR
)
248 print_generic_expr (f
,
249 jump_func
->value
.pass_through
.operand
, 0);
251 if (jump_func
->value
.pass_through
.agg_preserved
)
252 fprintf (f
, ", agg_preserved");
255 else if (type
== IPA_JF_ANCESTOR
)
257 fprintf (f
, "ANCESTOR: ");
258 fprintf (f
, "%d, offset " HOST_WIDE_INT_PRINT_DEC
,
259 jump_func
->value
.ancestor
.formal_id
,
260 jump_func
->value
.ancestor
.offset
);
261 if (jump_func
->value
.ancestor
.agg_preserved
)
262 fprintf (f
, ", agg_preserved");
266 if (jump_func
->agg
.items
)
268 struct ipa_agg_jf_item
*item
;
271 fprintf (f
, " Aggregate passed by %s:\n",
272 jump_func
->agg
.by_ref
? "reference" : "value");
273 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, j
, item
)
275 fprintf (f
, " offset: " HOST_WIDE_INT_PRINT_DEC
", ",
277 if (TYPE_P (item
->value
))
278 fprintf (f
, "clobber of " HOST_WIDE_INT_PRINT_DEC
" bits",
279 tree_to_uhwi (TYPE_SIZE (item
->value
)));
282 fprintf (f
, "cst: ");
283 print_generic_expr (f
, item
->value
, 0);
289 struct ipa_polymorphic_call_context
*ctx
290 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs
), i
);
291 if (ctx
&& !ctx
->useless_p ())
293 fprintf (f
, " Context: ");
294 ctx
->dump (dump_file
);
297 if (jump_func
->alignment
.known
)
299 fprintf (f
, " Alignment: %u, misalignment: %u\n",
300 jump_func
->alignment
.align
,
301 jump_func
->alignment
.misalign
);
304 fprintf (f
, " Unknown alignment\n");
309 /* Print the jump functions of all arguments on all call graph edges going from
313 ipa_print_node_jump_functions (FILE *f
, struct cgraph_node
*node
)
315 struct cgraph_edge
*cs
;
317 fprintf (f
, " Jump functions of caller %s/%i:\n", node
->name (),
319 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
321 if (!ipa_edge_args_info_available_for_edge_p (cs
))
324 fprintf (f
, " callsite %s/%i -> %s/%i : \n",
325 xstrdup_for_dump (node
->name ()), node
->order
,
326 xstrdup_for_dump (cs
->callee
->name ()),
328 ipa_print_node_jump_functions_for_edge (f
, cs
);
331 for (cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
333 struct cgraph_indirect_call_info
*ii
;
334 if (!ipa_edge_args_info_available_for_edge_p (cs
))
337 ii
= cs
->indirect_info
;
338 if (ii
->agg_contents
)
339 fprintf (f
, " indirect %s callsite, calling param %i, "
340 "offset " HOST_WIDE_INT_PRINT_DEC
", %s",
341 ii
->member_ptr
? "member ptr" : "aggregate",
342 ii
->param_index
, ii
->offset
,
343 ii
->by_ref
? "by reference" : "by_value");
345 fprintf (f
, " indirect %s callsite, calling param %i, "
346 "offset " HOST_WIDE_INT_PRINT_DEC
,
347 ii
->polymorphic
? "polymorphic" : "simple", ii
->param_index
,
352 fprintf (f
, ", for stmt ");
353 print_gimple_stmt (f
, cs
->call_stmt
, 0, TDF_SLIM
);
358 ii
->context
.dump (f
);
359 ipa_print_node_jump_functions_for_edge (f
, cs
);
363 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
366 ipa_print_all_jump_functions (FILE *f
)
368 struct cgraph_node
*node
;
370 fprintf (f
, "\nJump functions:\n");
371 FOR_EACH_FUNCTION (node
)
373 ipa_print_node_jump_functions (f
, node
);
377 /* Set jfunc to be a know-really nothing jump function. */
380 ipa_set_jf_unknown (struct ipa_jump_func
*jfunc
)
382 jfunc
->type
= IPA_JF_UNKNOWN
;
383 jfunc
->alignment
.known
= false;
386 /* Set JFUNC to be a copy of another jmp (to be used by jump function
387 combination code). The two functions will share their rdesc. */
390 ipa_set_jf_cst_copy (struct ipa_jump_func
*dst
,
391 struct ipa_jump_func
*src
)
394 gcc_checking_assert (src
->type
== IPA_JF_CONST
);
395 dst
->type
= IPA_JF_CONST
;
396 dst
->value
.constant
= src
->value
.constant
;
399 /* Set JFUNC to be a constant jmp function. */
402 ipa_set_jf_constant (struct ipa_jump_func
*jfunc
, tree constant
,
403 struct cgraph_edge
*cs
)
405 jfunc
->type
= IPA_JF_CONST
;
406 jfunc
->value
.constant
.value
= unshare_expr_without_location (constant
);
408 if (TREE_CODE (constant
) == ADDR_EXPR
409 && TREE_CODE (TREE_OPERAND (constant
, 0)) == FUNCTION_DECL
)
411 struct ipa_cst_ref_desc
*rdesc
;
413 rdesc
= ipa_refdesc_pool
.allocate ();
415 rdesc
->next_duplicate
= NULL
;
417 jfunc
->value
.constant
.rdesc
= rdesc
;
420 jfunc
->value
.constant
.rdesc
= NULL
;
423 /* Set JFUNC to be a simple pass-through jump function. */
425 ipa_set_jf_simple_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
428 jfunc
->type
= IPA_JF_PASS_THROUGH
;
429 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
430 jfunc
->value
.pass_through
.formal_id
= formal_id
;
431 jfunc
->value
.pass_through
.operation
= NOP_EXPR
;
432 jfunc
->value
.pass_through
.agg_preserved
= agg_preserved
;
435 /* Set JFUNC to be an arithmetic pass through jump function. */
438 ipa_set_jf_arith_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
439 tree operand
, enum tree_code operation
)
441 jfunc
->type
= IPA_JF_PASS_THROUGH
;
442 jfunc
->value
.pass_through
.operand
= unshare_expr_without_location (operand
);
443 jfunc
->value
.pass_through
.formal_id
= formal_id
;
444 jfunc
->value
.pass_through
.operation
= operation
;
445 jfunc
->value
.pass_through
.agg_preserved
= false;
448 /* Set JFUNC to be an ancestor jump function. */
451 ipa_set_ancestor_jf (struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
,
452 int formal_id
, bool agg_preserved
)
454 jfunc
->type
= IPA_JF_ANCESTOR
;
455 jfunc
->value
.ancestor
.formal_id
= formal_id
;
456 jfunc
->value
.ancestor
.offset
= offset
;
457 jfunc
->value
.ancestor
.agg_preserved
= agg_preserved
;
460 /* Get IPA BB information about the given BB. FBI is the context of analyzis
461 of this function body. */
463 static struct ipa_bb_info
*
464 ipa_get_bb_info (struct ipa_func_body_info
*fbi
, basic_block bb
)
466 gcc_checking_assert (fbi
);
467 return &fbi
->bb_infos
[bb
->index
];
470 /* Structure to be passed in between detect_type_change and
471 check_stmt_for_type_change. */
473 struct prop_type_change_info
475 /* Offset into the object where there is the virtual method pointer we are
477 HOST_WIDE_INT offset
;
478 /* The declaration or SSA_NAME pointer of the base that we are checking for
481 /* Set to true if dynamic type change has been detected. */
482 bool type_maybe_changed
;
485 /* Return true if STMT can modify a virtual method table pointer.
487 This function makes special assumptions about both constructors and
488 destructors which are all the functions that are allowed to alter the VMT
489 pointers. It assumes that destructors begin with assignment into all VMT
490 pointers and that constructors essentially look in the following way:
492 1) The very first thing they do is that they call constructors of ancestor
493 sub-objects that have them.
495 2) Then VMT pointers of this and all its ancestors is set to new values
496 corresponding to the type corresponding to the constructor.
498 3) Only afterwards, other stuff such as constructor of member sub-objects
499 and the code written by the user is run. Only this may include calling
500 virtual functions, directly or indirectly.
502 There is no way to call a constructor of an ancestor sub-object in any
505 This means that we do not have to care whether constructors get the correct
506 type information because they will always change it (in fact, if we define
507 the type to be given by the VMT pointer, it is undefined).
509 The most important fact to derive from the above is that if, for some
510 statement in the section 3, we try to detect whether the dynamic type has
511 changed, we can safely ignore all calls as we examine the function body
512 backwards until we reach statements in section 2 because these calls cannot
513 be ancestor constructors or destructors (if the input is not bogus) and so
514 do not change the dynamic type (this holds true only for automatically
515 allocated objects but at the moment we devirtualize only these). We then
516 must detect that statements in section 2 change the dynamic type and can try
517 to derive the new type. That is enough and we can stop, we will never see
518 the calls into constructors of sub-objects in this code. Therefore we can
519 safely ignore all call statements that we traverse.
523 stmt_may_be_vtbl_ptr_store (gimple
*stmt
)
525 if (is_gimple_call (stmt
))
527 if (gimple_clobber_p (stmt
))
529 else if (is_gimple_assign (stmt
))
531 tree lhs
= gimple_assign_lhs (stmt
);
533 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs
)))
535 if (flag_strict_aliasing
536 && !POINTER_TYPE_P (TREE_TYPE (lhs
)))
539 if (TREE_CODE (lhs
) == COMPONENT_REF
540 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
542 /* In the future we might want to use get_base_ref_and_offset to find
543 if there is a field corresponding to the offset and if so, proceed
544 almost like if it was a component ref. */
550 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
551 to check whether a particular statement may modify the virtual table
552 pointerIt stores its result into DATA, which points to a
553 prop_type_change_info structure. */
556 check_stmt_for_type_change (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef
, void *data
)
558 gimple
*stmt
= SSA_NAME_DEF_STMT (vdef
);
559 struct prop_type_change_info
*tci
= (struct prop_type_change_info
*) data
;
561 if (stmt_may_be_vtbl_ptr_store (stmt
))
563 tci
->type_maybe_changed
= true;
570 /* See if ARG is PARAM_DECl describing instance passed by pointer
571 or reference in FUNCTION. Return false if the dynamic type may change
572 in between beggining of the function until CALL is invoked.
574 Generally functions are not allowed to change type of such instances,
575 but they call destructors. We assume that methods can not destroy the THIS
576 pointer. Also as a special cases, constructor and destructors may change
577 type of the THIS pointer. */
580 param_type_may_change_p (tree function
, tree arg
, gimple
*call
)
582 /* Pure functions can not do any changes on the dynamic type;
583 that require writting to memory. */
584 if (flags_from_decl_or_type (function
) & (ECF_PURE
| ECF_CONST
))
586 /* We need to check if we are within inlined consturctor
587 or destructor (ideally we would have way to check that the
588 inline cdtor is actually working on ARG, but we don't have
589 easy tie on this, so punt on all non-pure cdtors.
590 We may also record the types of cdtors and once we know type
591 of the instance match them.
593 Also code unification optimizations may merge calls from
594 different blocks making return values unreliable. So
595 do nothing during late optimization. */
596 if (DECL_STRUCT_FUNCTION (function
)->after_inlining
)
598 if (TREE_CODE (arg
) == SSA_NAME
599 && SSA_NAME_IS_DEFAULT_DEF (arg
)
600 && TREE_CODE (SSA_NAME_VAR (arg
)) == PARM_DECL
)
602 /* Normal (non-THIS) argument. */
603 if ((SSA_NAME_VAR (arg
) != DECL_ARGUMENTS (function
)
604 || TREE_CODE (TREE_TYPE (function
)) != METHOD_TYPE
)
605 /* THIS pointer of an method - here we want to watch constructors
606 and destructors as those definitely may change the dynamic
608 || (TREE_CODE (TREE_TYPE (function
)) == METHOD_TYPE
609 && !DECL_CXX_CONSTRUCTOR_P (function
)
610 && !DECL_CXX_DESTRUCTOR_P (function
)
611 && (SSA_NAME_VAR (arg
) == DECL_ARGUMENTS (function
))))
613 /* Walk the inline stack and watch out for ctors/dtors. */
614 for (tree block
= gimple_block (call
); block
&& TREE_CODE (block
) == BLOCK
;
615 block
= BLOCK_SUPERCONTEXT (block
))
616 if (inlined_polymorphic_ctor_dtor_block_p (block
, false))
624 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
625 callsite CALL) by looking for assignments to its virtual table pointer. If
626 it is, return true and fill in the jump function JFUNC with relevant type
627 information or set it to unknown. ARG is the object itself (not a pointer
628 to it, unless dereferenced). BASE is the base of the memory access as
629 returned by get_ref_base_and_extent, as is the offset.
631 This is helper function for detect_type_change and detect_type_change_ssa
632 that does the heavy work which is usually unnecesary. */
635 detect_type_change_from_memory_writes (tree arg
, tree base
, tree comp_type
,
636 gcall
*call
, struct ipa_jump_func
*jfunc
,
637 HOST_WIDE_INT offset
)
639 struct prop_type_change_info tci
;
641 bool entry_reached
= false;
643 gcc_checking_assert (DECL_P (arg
)
644 || TREE_CODE (arg
) == MEM_REF
645 || handled_component_p (arg
));
647 comp_type
= TYPE_MAIN_VARIANT (comp_type
);
649 /* Const calls cannot call virtual methods through VMT and so type changes do
651 if (!flag_devirtualize
|| !gimple_vuse (call
)
652 /* Be sure expected_type is polymorphic. */
654 || TREE_CODE (comp_type
) != RECORD_TYPE
655 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))
656 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))))
659 ao_ref_init (&ao
, arg
);
662 ao
.size
= POINTER_SIZE
;
663 ao
.max_size
= ao
.size
;
666 tci
.object
= get_base_address (arg
);
667 tci
.type_maybe_changed
= false;
669 walk_aliased_vdefs (&ao
, gimple_vuse (call
), check_stmt_for_type_change
,
670 &tci
, NULL
, &entry_reached
);
671 if (!tci
.type_maybe_changed
)
674 ipa_set_jf_unknown (jfunc
);
678 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
679 If it is, return true and fill in the jump function JFUNC with relevant type
680 information or set it to unknown. ARG is the object itself (not a pointer
681 to it, unless dereferenced). BASE is the base of the memory access as
682 returned by get_ref_base_and_extent, as is the offset. */
685 detect_type_change (tree arg
, tree base
, tree comp_type
, gcall
*call
,
686 struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
)
688 if (!flag_devirtualize
)
691 if (TREE_CODE (base
) == MEM_REF
692 && !param_type_may_change_p (current_function_decl
,
693 TREE_OPERAND (base
, 0),
696 return detect_type_change_from_memory_writes (arg
, base
, comp_type
,
697 call
, jfunc
, offset
);
700 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
701 SSA name (its dereference will become the base and the offset is assumed to
705 detect_type_change_ssa (tree arg
, tree comp_type
,
706 gcall
*call
, struct ipa_jump_func
*jfunc
)
708 gcc_checking_assert (TREE_CODE (arg
) == SSA_NAME
);
709 if (!flag_devirtualize
710 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
713 if (!param_type_may_change_p (current_function_decl
, arg
, call
))
716 arg
= build2 (MEM_REF
, ptr_type_node
, arg
,
717 build_int_cst (ptr_type_node
, 0));
719 return detect_type_change_from_memory_writes (arg
, arg
, comp_type
,
723 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
724 boolean variable pointed to by DATA. */
727 mark_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
730 bool *b
= (bool *) data
;
735 /* Return true if we have already walked so many statements in AA that we
736 should really just start giving up. */
739 aa_overwalked (struct ipa_func_body_info
*fbi
)
741 gcc_checking_assert (fbi
);
742 return fbi
->aa_walked
> (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS
);
745 /* Find the nearest valid aa status for parameter specified by INDEX that
748 static struct ipa_param_aa_status
*
749 find_dominating_aa_status (struct ipa_func_body_info
*fbi
, basic_block bb
,
754 bb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
757 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
758 if (!bi
->param_aa_statuses
.is_empty ()
759 && bi
->param_aa_statuses
[index
].valid
)
760 return &bi
->param_aa_statuses
[index
];
764 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
765 structures and/or intialize the result with a dominating description as
768 static struct ipa_param_aa_status
*
769 parm_bb_aa_status_for_bb (struct ipa_func_body_info
*fbi
, basic_block bb
,
772 gcc_checking_assert (fbi
);
773 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
774 if (bi
->param_aa_statuses
.is_empty ())
775 bi
->param_aa_statuses
.safe_grow_cleared (fbi
->param_count
);
776 struct ipa_param_aa_status
*paa
= &bi
->param_aa_statuses
[index
];
779 gcc_checking_assert (!paa
->parm_modified
780 && !paa
->ref_modified
781 && !paa
->pt_modified
);
782 struct ipa_param_aa_status
*dom_paa
;
783 dom_paa
= find_dominating_aa_status (fbi
, bb
, index
);
793 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
794 a value known not to be modified in this function before reaching the
795 statement STMT. FBI holds information about the function we have so far
796 gathered but do not survive the summary building stage. */
799 parm_preserved_before_stmt_p (struct ipa_func_body_info
*fbi
, int index
,
800 gimple
*stmt
, tree parm_load
)
802 struct ipa_param_aa_status
*paa
;
803 bool modified
= false;
806 tree base
= get_base_address (parm_load
);
807 gcc_assert (TREE_CODE (base
) == PARM_DECL
);
808 if (TREE_READONLY (base
))
811 /* FIXME: FBI can be NULL if we are being called from outside
812 ipa_node_analysis or ipcp_transform_function, which currently happens
813 during inlining analysis. It would be great to extend fbi's lifetime and
814 always have it. Currently, we are just not afraid of too much walking in
818 if (aa_overwalked (fbi
))
820 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
821 if (paa
->parm_modified
)
827 gcc_checking_assert (gimple_vuse (stmt
) != NULL_TREE
);
828 ao_ref_init (&refd
, parm_load
);
829 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
832 fbi
->aa_walked
+= walked
;
834 paa
->parm_modified
= true;
838 /* If STMT is an assignment that loads a value from an parameter declaration,
839 return the index of the parameter in ipa_node_params which has not been
840 modified. Otherwise return -1. */
843 load_from_unmodified_param (struct ipa_func_body_info
*fbi
,
844 vec
<ipa_param_descriptor
> descriptors
,
850 if (!gimple_assign_single_p (stmt
))
853 op1
= gimple_assign_rhs1 (stmt
);
854 if (TREE_CODE (op1
) != PARM_DECL
)
857 index
= ipa_get_param_decl_index_1 (descriptors
, op1
);
859 || !parm_preserved_before_stmt_p (fbi
, index
, stmt
, op1
))
865 /* Return true if memory reference REF (which must be a load through parameter
866 with INDEX) loads data that are known to be unmodified in this function
867 before reaching statement STMT. */
870 parm_ref_data_preserved_p (struct ipa_func_body_info
*fbi
,
871 int index
, gimple
*stmt
, tree ref
)
873 struct ipa_param_aa_status
*paa
;
874 bool modified
= false;
877 /* FIXME: FBI can be NULL if we are being called from outside
878 ipa_node_analysis or ipcp_transform_function, which currently happens
879 during inlining analysis. It would be great to extend fbi's lifetime and
880 always have it. Currently, we are just not afraid of too much walking in
884 if (aa_overwalked (fbi
))
886 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
887 if (paa
->ref_modified
)
893 gcc_checking_assert (gimple_vuse (stmt
));
894 ao_ref_init (&refd
, ref
);
895 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
898 fbi
->aa_walked
+= walked
;
900 paa
->ref_modified
= true;
904 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
905 is known to be unmodified in this function before reaching call statement
906 CALL into which it is passed. FBI describes the function body. */
909 parm_ref_data_pass_through_p (struct ipa_func_body_info
*fbi
, int index
,
910 gimple
*call
, tree parm
)
912 bool modified
= false;
915 /* It's unnecessary to calculate anything about memory contnets for a const
916 function because it is not goin to use it. But do not cache the result
917 either. Also, no such calculations for non-pointers. */
918 if (!gimple_vuse (call
)
919 || !POINTER_TYPE_P (TREE_TYPE (parm
))
920 || aa_overwalked (fbi
))
923 struct ipa_param_aa_status
*paa
= parm_bb_aa_status_for_bb (fbi
,
926 if (paa
->pt_modified
)
929 ao_ref_init_from_ptr_and_size (&refd
, parm
, NULL_TREE
);
930 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (call
), mark_modified
,
932 fbi
->aa_walked
+= walked
;
934 paa
->pt_modified
= true;
938 /* Return true if we can prove that OP is a memory reference loading
939 data from an aggregate passed as a parameter.
941 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
942 false if it cannot prove that the value has not been modified before the
943 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
944 if it cannot prove the value has not been modified, in that case it will
945 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
947 INFO and PARMS_AINFO describe parameters of the current function (but the
948 latter can be NULL), STMT is the load statement. If function returns true,
949 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
950 within the aggregate and whether it is a load from a value passed by
951 reference respectively. */
954 ipa_load_from_parm_agg (struct ipa_func_body_info
*fbi
,
955 vec
<ipa_param_descriptor
> descriptors
,
956 gimple
*stmt
, tree op
, int *index_p
,
957 HOST_WIDE_INT
*offset_p
, HOST_WIDE_INT
*size_p
,
958 bool *by_ref_p
, bool *guaranteed_unmodified
)
961 HOST_WIDE_INT size
, max_size
;
964 = get_ref_base_and_extent (op
, offset_p
, &size
, &max_size
, &reverse
);
966 if (max_size
== -1 || max_size
!= size
|| *offset_p
< 0)
971 int index
= ipa_get_param_decl_index_1 (descriptors
, base
);
973 && parm_preserved_before_stmt_p (fbi
, index
, stmt
, op
))
979 if (guaranteed_unmodified
)
980 *guaranteed_unmodified
= true;
986 if (TREE_CODE (base
) != MEM_REF
987 || TREE_CODE (TREE_OPERAND (base
, 0)) != SSA_NAME
988 || !integer_zerop (TREE_OPERAND (base
, 1)))
991 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base
, 0)))
993 tree parm
= SSA_NAME_VAR (TREE_OPERAND (base
, 0));
994 index
= ipa_get_param_decl_index_1 (descriptors
, parm
);
998 /* This branch catches situations where a pointer parameter is not a
999 gimple register, for example:
1001 void hip7(S*) (struct S * p)
1003 void (*<T2e4>) (struct S *) D.1867;
1008 D.1867_2 = p.1_1->f;
1013 gimple
*def
= SSA_NAME_DEF_STMT (TREE_OPERAND (base
, 0));
1014 index
= load_from_unmodified_param (fbi
, descriptors
, def
);
1019 bool data_preserved
= parm_ref_data_preserved_p (fbi
, index
, stmt
, op
);
1020 if (!data_preserved
&& !guaranteed_unmodified
)
1027 if (guaranteed_unmodified
)
1028 *guaranteed_unmodified
= data_preserved
;
1034 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1035 of an assignment statement STMT, try to determine whether we are actually
1036 handling any of the following cases and construct an appropriate jump
1037 function into JFUNC if so:
1039 1) The passed value is loaded from a formal parameter which is not a gimple
1040 register (most probably because it is addressable, the value has to be
1041 scalar) and we can guarantee the value has not changed. This case can
1042 therefore be described by a simple pass-through jump function. For example:
1051 2) The passed value can be described by a simple arithmetic pass-through
1058 D.2064_4 = a.1(D) + 4;
1061 This case can also occur in combination of the previous one, e.g.:
1069 D.2064_4 = a.0_3 + 4;
1072 3) The passed value is an address of an object within another one (which
1073 also passed by reference). Such situations are described by an ancestor
1074 jump function and describe situations such as:
1076 B::foo() (struct B * const this)
1080 D.1845_2 = &this_1(D)->D.1748;
1083 INFO is the structure describing individual parameters access different
1084 stages of IPA optimizations. PARMS_AINFO contains the information that is
1085 only needed for intraprocedural analysis. */
1088 compute_complex_assign_jump_func (struct ipa_func_body_info
*fbi
,
1089 struct ipa_node_params
*info
,
1090 struct ipa_jump_func
*jfunc
,
1091 gcall
*call
, gimple
*stmt
, tree name
,
1094 HOST_WIDE_INT offset
, size
, max_size
;
1095 tree op1
, tc_ssa
, base
, ssa
;
1099 op1
= gimple_assign_rhs1 (stmt
);
1101 if (TREE_CODE (op1
) == SSA_NAME
)
1103 if (SSA_NAME_IS_DEFAULT_DEF (op1
))
1104 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (op1
));
1106 index
= load_from_unmodified_param (fbi
, info
->descriptors
,
1107 SSA_NAME_DEF_STMT (op1
));
1112 index
= load_from_unmodified_param (fbi
, info
->descriptors
, stmt
);
1113 tc_ssa
= gimple_assign_lhs (stmt
);
1118 tree op2
= gimple_assign_rhs2 (stmt
);
1122 if (!is_gimple_ip_invariant (op2
)
1123 || (TREE_CODE_CLASS (gimple_expr_code (stmt
)) != tcc_comparison
1124 && !useless_type_conversion_p (TREE_TYPE (name
),
1128 ipa_set_jf_arith_pass_through (jfunc
, index
, op2
,
1129 gimple_assign_rhs_code (stmt
));
1131 else if (gimple_assign_single_p (stmt
))
1133 bool agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
, tc_ssa
);
1134 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1139 if (TREE_CODE (op1
) != ADDR_EXPR
)
1141 op1
= TREE_OPERAND (op1
, 0);
1142 if (TREE_CODE (TREE_TYPE (op1
)) != RECORD_TYPE
)
1144 base
= get_ref_base_and_extent (op1
, &offset
, &size
, &max_size
, &reverse
);
1145 if (TREE_CODE (base
) != MEM_REF
1146 /* If this is a varying address, punt. */
1148 || max_size
!= size
)
1150 offset
+= mem_ref_offset (base
).to_short_addr () * BITS_PER_UNIT
;
1151 ssa
= TREE_OPERAND (base
, 0);
1152 if (TREE_CODE (ssa
) != SSA_NAME
1153 || !SSA_NAME_IS_DEFAULT_DEF (ssa
)
1157 /* Dynamic types are changed in constructors and destructors. */
1158 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (ssa
));
1159 if (index
>= 0 && param_type
&& POINTER_TYPE_P (param_type
))
1160 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1161 parm_ref_data_pass_through_p (fbi
, index
, call
, ssa
));
1164 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1167 iftmp.1_3 = &obj_2(D)->D.1762;
1169 The base of the MEM_REF must be a default definition SSA NAME of a
1170 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1171 whole MEM_REF expression is returned and the offset calculated from any
1172 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1173 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1176 get_ancestor_addr_info (gimple
*assign
, tree
*obj_p
, HOST_WIDE_INT
*offset
)
1178 HOST_WIDE_INT size
, max_size
;
1179 tree expr
, parm
, obj
;
1182 if (!gimple_assign_single_p (assign
))
1184 expr
= gimple_assign_rhs1 (assign
);
1186 if (TREE_CODE (expr
) != ADDR_EXPR
)
1188 expr
= TREE_OPERAND (expr
, 0);
1190 expr
= get_ref_base_and_extent (expr
, offset
, &size
, &max_size
, &reverse
);
1192 if (TREE_CODE (expr
) != MEM_REF
1193 /* If this is a varying address, punt. */
1198 parm
= TREE_OPERAND (expr
, 0);
1199 if (TREE_CODE (parm
) != SSA_NAME
1200 || !SSA_NAME_IS_DEFAULT_DEF (parm
)
1201 || TREE_CODE (SSA_NAME_VAR (parm
)) != PARM_DECL
)
1204 *offset
+= mem_ref_offset (expr
).to_short_addr () * BITS_PER_UNIT
;
1210 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1211 statement PHI, try to find out whether NAME is in fact a
1212 multiple-inheritance typecast from a descendant into an ancestor of a formal
1213 parameter and thus can be described by an ancestor jump function and if so,
1214 write the appropriate function into JFUNC.
1216 Essentially we want to match the following pattern:
1224 iftmp.1_3 = &obj_2(D)->D.1762;
1227 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1228 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1232 compute_complex_ancestor_jump_func (struct ipa_func_body_info
*fbi
,
1233 struct ipa_node_params
*info
,
1234 struct ipa_jump_func
*jfunc
,
1235 gcall
*call
, gphi
*phi
)
1237 HOST_WIDE_INT offset
;
1238 gimple
*assign
, *cond
;
1239 basic_block phi_bb
, assign_bb
, cond_bb
;
1240 tree tmp
, parm
, expr
, obj
;
1243 if (gimple_phi_num_args (phi
) != 2)
1246 if (integer_zerop (PHI_ARG_DEF (phi
, 1)))
1247 tmp
= PHI_ARG_DEF (phi
, 0);
1248 else if (integer_zerop (PHI_ARG_DEF (phi
, 0)))
1249 tmp
= PHI_ARG_DEF (phi
, 1);
1252 if (TREE_CODE (tmp
) != SSA_NAME
1253 || SSA_NAME_IS_DEFAULT_DEF (tmp
)
1254 || !POINTER_TYPE_P (TREE_TYPE (tmp
))
1255 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp
))) != RECORD_TYPE
)
1258 assign
= SSA_NAME_DEF_STMT (tmp
);
1259 assign_bb
= gimple_bb (assign
);
1260 if (!single_pred_p (assign_bb
))
1262 expr
= get_ancestor_addr_info (assign
, &obj
, &offset
);
1265 parm
= TREE_OPERAND (expr
, 0);
1266 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (parm
));
1270 cond_bb
= single_pred (assign_bb
);
1271 cond
= last_stmt (cond_bb
);
1273 || gimple_code (cond
) != GIMPLE_COND
1274 || gimple_cond_code (cond
) != NE_EXPR
1275 || gimple_cond_lhs (cond
) != parm
1276 || !integer_zerop (gimple_cond_rhs (cond
)))
1279 phi_bb
= gimple_bb (phi
);
1280 for (i
= 0; i
< 2; i
++)
1282 basic_block pred
= EDGE_PRED (phi_bb
, i
)->src
;
1283 if (pred
!= assign_bb
&& pred
!= cond_bb
)
1287 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1288 parm_ref_data_pass_through_p (fbi
, index
, call
, parm
));
1291 /* Inspect the given TYPE and return true iff it has the same structure (the
1292 same number of fields of the same types) as a C++ member pointer. If
1293 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1294 corresponding fields there. */
1297 type_like_member_ptr_p (tree type
, tree
*method_ptr
, tree
*delta
)
1301 if (TREE_CODE (type
) != RECORD_TYPE
)
1304 fld
= TYPE_FIELDS (type
);
1305 if (!fld
|| !POINTER_TYPE_P (TREE_TYPE (fld
))
1306 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld
))) != METHOD_TYPE
1307 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1313 fld
= DECL_CHAIN (fld
);
1314 if (!fld
|| INTEGRAL_TYPE_P (fld
)
1315 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1320 if (DECL_CHAIN (fld
))
1326 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1327 return the rhs of its defining statement. Otherwise return RHS as it
1331 get_ssa_def_if_simple_copy (tree rhs
)
1333 while (TREE_CODE (rhs
) == SSA_NAME
&& !SSA_NAME_IS_DEFAULT_DEF (rhs
))
1335 gimple
*def_stmt
= SSA_NAME_DEF_STMT (rhs
);
1337 if (gimple_assign_single_p (def_stmt
))
1338 rhs
= gimple_assign_rhs1 (def_stmt
);
1345 /* Simple linked list, describing known contents of an aggregate beforere
1348 struct ipa_known_agg_contents_list
1350 /* Offset and size of the described part of the aggregate. */
1351 HOST_WIDE_INT offset
, size
;
1352 /* Known constant value or NULL if the contents is known to be unknown. */
1354 /* Pointer to the next structure in the list. */
1355 struct ipa_known_agg_contents_list
*next
;
1358 /* Find the proper place in linked list of ipa_known_agg_contents_list
1359 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1360 unless there is a partial overlap, in which case return NULL, or such
1361 element is already there, in which case set *ALREADY_THERE to true. */
1363 static struct ipa_known_agg_contents_list
**
1364 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list
**list
,
1365 HOST_WIDE_INT lhs_offset
,
1366 HOST_WIDE_INT lhs_size
,
1367 bool *already_there
)
1369 struct ipa_known_agg_contents_list
**p
= list
;
1370 while (*p
&& (*p
)->offset
< lhs_offset
)
1372 if ((*p
)->offset
+ (*p
)->size
> lhs_offset
)
1377 if (*p
&& (*p
)->offset
< lhs_offset
+ lhs_size
)
1379 if ((*p
)->offset
== lhs_offset
&& (*p
)->size
== lhs_size
)
1380 /* We already know this value is subsequently overwritten with
1382 *already_there
= true;
1384 /* Otherwise this is a partial overlap which we cannot
1391 /* Build aggregate jump function from LIST, assuming there are exactly
1392 CONST_COUNT constant entries there and that th offset of the passed argument
1393 is ARG_OFFSET and store it into JFUNC. */
1396 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list
*list
,
1397 int const_count
, HOST_WIDE_INT arg_offset
,
1398 struct ipa_jump_func
*jfunc
)
1400 vec_alloc (jfunc
->agg
.items
, const_count
);
1405 struct ipa_agg_jf_item item
;
1406 item
.offset
= list
->offset
- arg_offset
;
1407 gcc_assert ((item
.offset
% BITS_PER_UNIT
) == 0);
1408 item
.value
= unshare_expr_without_location (list
->constant
);
1409 jfunc
->agg
.items
->quick_push (item
);
1415 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1416 in ARG is filled in with constant values. ARG can either be an aggregate
1417 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1418 aggregate. JFUNC is the jump function into which the constants are
1419 subsequently stored. */
1422 determine_locally_known_aggregate_parts (gcall
*call
, tree arg
,
1424 struct ipa_jump_func
*jfunc
)
1426 struct ipa_known_agg_contents_list
*list
= NULL
;
1427 int item_count
= 0, const_count
= 0;
1428 HOST_WIDE_INT arg_offset
, arg_size
;
1429 gimple_stmt_iterator gsi
;
1431 bool check_ref
, by_ref
;
1434 if (PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
) == 0)
1437 /* The function operates in three stages. First, we prepare check_ref, r,
1438 arg_base and arg_offset based on what is actually passed as an actual
1441 if (POINTER_TYPE_P (arg_type
))
1444 if (TREE_CODE (arg
) == SSA_NAME
)
1447 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type
))))
1452 type_size
= TYPE_SIZE (TREE_TYPE (arg_type
));
1453 arg_size
= tree_to_uhwi (type_size
);
1454 ao_ref_init_from_ptr_and_size (&r
, arg_base
, NULL_TREE
);
1456 else if (TREE_CODE (arg
) == ADDR_EXPR
)
1458 HOST_WIDE_INT arg_max_size
;
1461 arg
= TREE_OPERAND (arg
, 0);
1462 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1463 &arg_max_size
, &reverse
);
1464 if (arg_max_size
== -1
1465 || arg_max_size
!= arg_size
1468 if (DECL_P (arg_base
))
1471 ao_ref_init (&r
, arg_base
);
1481 HOST_WIDE_INT arg_max_size
;
1484 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg
)));
1488 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1489 &arg_max_size
, &reverse
);
1490 if (arg_max_size
== -1
1491 || arg_max_size
!= arg_size
1495 ao_ref_init (&r
, arg
);
1498 /* Second stage walks back the BB, looks at individual statements and as long
1499 as it is confident of how the statements affect contents of the
1500 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1502 gsi
= gsi_for_stmt (call
);
1504 for (; !gsi_end_p (gsi
); gsi_prev (&gsi
))
1506 struct ipa_known_agg_contents_list
*n
, **p
;
1507 gimple
*stmt
= gsi_stmt (gsi
);
1508 HOST_WIDE_INT lhs_offset
, lhs_size
, lhs_max_size
;
1509 tree lhs
, rhs
, lhs_base
;
1512 if (!stmt_may_clobber_ref_p_1 (stmt
, &r
))
1514 if (!gimple_assign_single_p (stmt
))
1517 lhs
= gimple_assign_lhs (stmt
);
1518 rhs
= gimple_assign_rhs1 (stmt
);
1519 if (!is_gimple_reg_type (TREE_TYPE (rhs
))
1520 || TREE_CODE (lhs
) == BIT_FIELD_REF
1521 || contains_bitfld_component_ref_p (lhs
))
1524 lhs_base
= get_ref_base_and_extent (lhs
, &lhs_offset
, &lhs_size
,
1525 &lhs_max_size
, &reverse
);
1526 if (lhs_max_size
== -1
1527 || lhs_max_size
!= lhs_size
)
1532 if (TREE_CODE (lhs_base
) != MEM_REF
1533 || TREE_OPERAND (lhs_base
, 0) != arg_base
1534 || !integer_zerop (TREE_OPERAND (lhs_base
, 1)))
1537 else if (lhs_base
!= arg_base
)
1539 if (DECL_P (lhs_base
))
1545 bool already_there
= false;
1546 p
= get_place_in_agg_contents_list (&list
, lhs_offset
, lhs_size
,
1553 rhs
= get_ssa_def_if_simple_copy (rhs
);
1554 n
= XALLOCA (struct ipa_known_agg_contents_list
);
1556 n
->offset
= lhs_offset
;
1557 if (is_gimple_ip_invariant (rhs
))
1563 n
->constant
= NULL_TREE
;
1568 if (const_count
== PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
)
1569 || item_count
== 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
))
1573 /* Third stage just goes over the list and creates an appropriate vector of
1574 ipa_agg_jf_item structures out of it, of sourse only if there are
1575 any known constants to begin with. */
1579 jfunc
->agg
.by_ref
= by_ref
;
1580 build_agg_jump_func_from_list (list
, const_count
, arg_offset
, jfunc
);
1585 ipa_get_callee_param_type (struct cgraph_edge
*e
, int i
)
1588 tree type
= (e
->callee
1589 ? TREE_TYPE (e
->callee
->decl
)
1590 : gimple_call_fntype (e
->call_stmt
));
1591 tree t
= TYPE_ARG_TYPES (type
);
1593 for (n
= 0; n
< i
; n
++)
1600 return TREE_VALUE (t
);
1603 t
= DECL_ARGUMENTS (e
->callee
->decl
);
1604 for (n
= 0; n
< i
; n
++)
1611 return TREE_TYPE (t
);
1615 /* Compute jump function for all arguments of callsite CS and insert the
1616 information in the jump_functions array in the ipa_edge_args corresponding
1617 to this callsite. */
1620 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info
*fbi
,
1621 struct cgraph_edge
*cs
)
1623 struct ipa_node_params
*info
= IPA_NODE_REF (cs
->caller
);
1624 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
1625 gcall
*call
= cs
->call_stmt
;
1626 int n
, arg_num
= gimple_call_num_args (call
);
1627 bool useful_context
= false;
1629 if (arg_num
== 0 || args
->jump_functions
)
1631 vec_safe_grow_cleared (args
->jump_functions
, arg_num
);
1632 if (flag_devirtualize
)
1633 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, arg_num
);
1635 if (gimple_call_internal_p (call
))
1637 if (ipa_func_spec_opts_forbid_analysis_p (cs
->caller
))
1640 for (n
= 0; n
< arg_num
; n
++)
1642 struct ipa_jump_func
*jfunc
= ipa_get_ith_jump_func (args
, n
);
1643 tree arg
= gimple_call_arg (call
, n
);
1644 tree param_type
= ipa_get_callee_param_type (cs
, n
);
1645 if (flag_devirtualize
&& POINTER_TYPE_P (TREE_TYPE (arg
)))
1648 struct ipa_polymorphic_call_context
context (cs
->caller
->decl
,
1651 context
.get_dynamic_type (instance
, arg
, NULL
, cs
->call_stmt
);
1652 *ipa_get_ith_polymorhic_call_context (args
, n
) = context
;
1653 if (!context
.useless_p ())
1654 useful_context
= true;
1657 if (POINTER_TYPE_P (TREE_TYPE(arg
)))
1659 unsigned HOST_WIDE_INT hwi_bitpos
;
1662 get_pointer_alignment_1 (arg
, &align
, &hwi_bitpos
);
1663 if (align
> BITS_PER_UNIT
1664 && align
% BITS_PER_UNIT
== 0
1665 && hwi_bitpos
% BITS_PER_UNIT
== 0)
1667 jfunc
->alignment
.known
= true;
1668 jfunc
->alignment
.align
= align
/ BITS_PER_UNIT
;
1669 jfunc
->alignment
.misalign
= hwi_bitpos
/ BITS_PER_UNIT
;
1672 gcc_assert (!jfunc
->alignment
.known
);
1675 gcc_assert (!jfunc
->alignment
.known
);
1677 if (is_gimple_ip_invariant (arg
)
1678 || (TREE_CODE (arg
) == VAR_DECL
1679 && is_global_var (arg
)
1680 && TREE_READONLY (arg
)))
1681 ipa_set_jf_constant (jfunc
, arg
, cs
);
1682 else if (!is_gimple_reg_type (TREE_TYPE (arg
))
1683 && TREE_CODE (arg
) == PARM_DECL
)
1685 int index
= ipa_get_param_decl_index (info
, arg
);
1687 gcc_assert (index
>=0);
1688 /* Aggregate passed by value, check for pass-through, otherwise we
1689 will attempt to fill in aggregate contents later in this
1691 if (parm_preserved_before_stmt_p (fbi
, index
, call
, arg
))
1693 ipa_set_jf_simple_pass_through (jfunc
, index
, false);
1697 else if (TREE_CODE (arg
) == SSA_NAME
)
1699 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
1701 int index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (arg
));
1705 agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
, arg
);
1706 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1711 gimple
*stmt
= SSA_NAME_DEF_STMT (arg
);
1712 if (is_gimple_assign (stmt
))
1713 compute_complex_assign_jump_func (fbi
, info
, jfunc
,
1714 call
, stmt
, arg
, param_type
);
1715 else if (gimple_code (stmt
) == GIMPLE_PHI
)
1716 compute_complex_ancestor_jump_func (fbi
, info
, jfunc
,
1718 as_a
<gphi
*> (stmt
));
1722 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1723 passed (because type conversions are ignored in gimple). Usually we can
1724 safely get type from function declaration, but in case of K&R prototypes or
1725 variadic functions we can try our luck with type of the pointer passed.
1726 TODO: Since we look for actual initialization of the memory object, we may better
1727 work out the type based on the memory stores we find. */
1729 param_type
= TREE_TYPE (arg
);
1731 if ((jfunc
->type
!= IPA_JF_PASS_THROUGH
1732 || !ipa_get_jf_pass_through_agg_preserved (jfunc
))
1733 && (jfunc
->type
!= IPA_JF_ANCESTOR
1734 || !ipa_get_jf_ancestor_agg_preserved (jfunc
))
1735 && (AGGREGATE_TYPE_P (TREE_TYPE (arg
))
1736 || POINTER_TYPE_P (param_type
)))
1737 determine_locally_known_aggregate_parts (call
, arg
, param_type
, jfunc
);
1739 if (!useful_context
)
1740 vec_free (args
->polymorphic_call_contexts
);
1743 /* Compute jump functions for all edges - both direct and indirect - outgoing
1747 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info
*fbi
, basic_block bb
)
1749 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
1751 struct cgraph_edge
*cs
;
1753 FOR_EACH_VEC_ELT_REVERSE (bi
->cg_edges
, i
, cs
)
1755 struct cgraph_node
*callee
= cs
->callee
;
1759 callee
->ultimate_alias_target ();
1760 /* We do not need to bother analyzing calls to unknown functions
1761 unless they may become known during lto/whopr. */
1762 if (!callee
->definition
&& !flag_lto
)
1765 ipa_compute_jump_functions_for_edge (fbi
, cs
);
1769 /* If STMT looks like a statement loading a value from a member pointer formal
1770 parameter, return that parameter and store the offset of the field to
1771 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1772 might be clobbered). If USE_DELTA, then we look for a use of the delta
1773 field rather than the pfn. */
1776 ipa_get_stmt_member_ptr_load_param (gimple
*stmt
, bool use_delta
,
1777 HOST_WIDE_INT
*offset_p
)
1779 tree rhs
, rec
, ref_field
, ref_offset
, fld
, ptr_field
, delta_field
;
1781 if (!gimple_assign_single_p (stmt
))
1784 rhs
= gimple_assign_rhs1 (stmt
);
1785 if (TREE_CODE (rhs
) == COMPONENT_REF
)
1787 ref_field
= TREE_OPERAND (rhs
, 1);
1788 rhs
= TREE_OPERAND (rhs
, 0);
1791 ref_field
= NULL_TREE
;
1792 if (TREE_CODE (rhs
) != MEM_REF
)
1794 rec
= TREE_OPERAND (rhs
, 0);
1795 if (TREE_CODE (rec
) != ADDR_EXPR
)
1797 rec
= TREE_OPERAND (rec
, 0);
1798 if (TREE_CODE (rec
) != PARM_DECL
1799 || !type_like_member_ptr_p (TREE_TYPE (rec
), &ptr_field
, &delta_field
))
1801 ref_offset
= TREE_OPERAND (rhs
, 1);
1808 *offset_p
= int_bit_position (fld
);
1812 if (integer_nonzerop (ref_offset
))
1814 return ref_field
== fld
? rec
: NULL_TREE
;
1817 return tree_int_cst_equal (byte_position (fld
), ref_offset
) ? rec
1821 /* Returns true iff T is an SSA_NAME defined by a statement. */
1824 ipa_is_ssa_with_stmt_def (tree t
)
1826 if (TREE_CODE (t
) == SSA_NAME
1827 && !SSA_NAME_IS_DEFAULT_DEF (t
))
1833 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1834 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1835 indirect call graph edge. */
1837 static struct cgraph_edge
*
1838 ipa_note_param_call (struct cgraph_node
*node
, int param_index
,
1841 struct cgraph_edge
*cs
;
1843 cs
= node
->get_edge (stmt
);
1844 cs
->indirect_info
->param_index
= param_index
;
1845 cs
->indirect_info
->agg_contents
= 0;
1846 cs
->indirect_info
->member_ptr
= 0;
1847 cs
->indirect_info
->guaranteed_unmodified
= 0;
1851 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1852 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1853 intermediate information about each formal parameter. Currently it checks
1854 whether the call calls a pointer that is a formal parameter and if so, the
1855 parameter is marked with the called flag and an indirect call graph edge
1856 describing the call is created. This is very simple for ordinary pointers
1857 represented in SSA but not-so-nice when it comes to member pointers. The
1858 ugly part of this function does nothing more than trying to match the
1859 pattern of such a call. An example of such a pattern is the gimple dump
1860 below, the call is on the last line:
1863 f$__delta_5 = f.__delta;
1864 f$__pfn_24 = f.__pfn;
1868 f$__delta_5 = MEM[(struct *)&f];
1869 f$__pfn_24 = MEM[(struct *)&f + 4B];
1871 and a few lines below:
1874 D.2496_3 = (int) f$__pfn_24;
1875 D.2497_4 = D.2496_3 & 1;
1882 D.2500_7 = (unsigned int) f$__delta_5;
1883 D.2501_8 = &S + D.2500_7;
1884 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1885 D.2503_10 = *D.2502_9;
1886 D.2504_12 = f$__pfn_24 + -1;
1887 D.2505_13 = (unsigned int) D.2504_12;
1888 D.2506_14 = D.2503_10 + D.2505_13;
1889 D.2507_15 = *D.2506_14;
1890 iftmp.11_16 = (String:: *) D.2507_15;
1893 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1894 D.2500_19 = (unsigned int) f$__delta_5;
1895 D.2508_20 = &S + D.2500_19;
1896 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1898 Such patterns are results of simple calls to a member pointer:
1900 int doprinting (int (MyString::* f)(int) const)
1902 MyString S ("somestring");
1907 Moreover, the function also looks for called pointers loaded from aggregates
1908 passed by value or reference. */
1911 ipa_analyze_indirect_call_uses (struct ipa_func_body_info
*fbi
, gcall
*call
,
1914 struct ipa_node_params
*info
= fbi
->info
;
1915 HOST_WIDE_INT offset
;
1918 if (SSA_NAME_IS_DEFAULT_DEF (target
))
1920 tree var
= SSA_NAME_VAR (target
);
1921 int index
= ipa_get_param_decl_index (info
, var
);
1923 ipa_note_param_call (fbi
->node
, index
, call
);
1928 gimple
*def
= SSA_NAME_DEF_STMT (target
);
1929 bool guaranteed_unmodified
;
1930 if (gimple_assign_single_p (def
)
1931 && ipa_load_from_parm_agg (fbi
, info
->descriptors
, def
,
1932 gimple_assign_rhs1 (def
), &index
, &offset
,
1933 NULL
, &by_ref
, &guaranteed_unmodified
))
1935 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
1936 cs
->indirect_info
->offset
= offset
;
1937 cs
->indirect_info
->agg_contents
= 1;
1938 cs
->indirect_info
->by_ref
= by_ref
;
1939 cs
->indirect_info
->guaranteed_unmodified
= guaranteed_unmodified
;
1943 /* Now we need to try to match the complex pattern of calling a member
1945 if (gimple_code (def
) != GIMPLE_PHI
1946 || gimple_phi_num_args (def
) != 2
1947 || !POINTER_TYPE_P (TREE_TYPE (target
))
1948 || TREE_CODE (TREE_TYPE (TREE_TYPE (target
))) != METHOD_TYPE
)
1951 /* First, we need to check whether one of these is a load from a member
1952 pointer that is a parameter to this function. */
1953 tree n1
= PHI_ARG_DEF (def
, 0);
1954 tree n2
= PHI_ARG_DEF (def
, 1);
1955 if (!ipa_is_ssa_with_stmt_def (n1
) || !ipa_is_ssa_with_stmt_def (n2
))
1957 gimple
*d1
= SSA_NAME_DEF_STMT (n1
);
1958 gimple
*d2
= SSA_NAME_DEF_STMT (n2
);
1961 basic_block bb
, virt_bb
;
1962 basic_block join
= gimple_bb (def
);
1963 if ((rec
= ipa_get_stmt_member_ptr_load_param (d1
, false, &offset
)))
1965 if (ipa_get_stmt_member_ptr_load_param (d2
, false, NULL
))
1968 bb
= EDGE_PRED (join
, 0)->src
;
1969 virt_bb
= gimple_bb (d2
);
1971 else if ((rec
= ipa_get_stmt_member_ptr_load_param (d2
, false, &offset
)))
1973 bb
= EDGE_PRED (join
, 1)->src
;
1974 virt_bb
= gimple_bb (d1
);
1979 /* Second, we need to check that the basic blocks are laid out in the way
1980 corresponding to the pattern. */
1982 if (!single_pred_p (virt_bb
) || !single_succ_p (virt_bb
)
1983 || single_pred (virt_bb
) != bb
1984 || single_succ (virt_bb
) != join
)
1987 /* Third, let's see that the branching is done depending on the least
1988 significant bit of the pfn. */
1990 gimple
*branch
= last_stmt (bb
);
1991 if (!branch
|| gimple_code (branch
) != GIMPLE_COND
)
1994 if ((gimple_cond_code (branch
) != NE_EXPR
1995 && gimple_cond_code (branch
) != EQ_EXPR
)
1996 || !integer_zerop (gimple_cond_rhs (branch
)))
1999 tree cond
= gimple_cond_lhs (branch
);
2000 if (!ipa_is_ssa_with_stmt_def (cond
))
2003 def
= SSA_NAME_DEF_STMT (cond
);
2004 if (!is_gimple_assign (def
)
2005 || gimple_assign_rhs_code (def
) != BIT_AND_EXPR
2006 || !integer_onep (gimple_assign_rhs2 (def
)))
2009 cond
= gimple_assign_rhs1 (def
);
2010 if (!ipa_is_ssa_with_stmt_def (cond
))
2013 def
= SSA_NAME_DEF_STMT (cond
);
2015 if (is_gimple_assign (def
)
2016 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
2018 cond
= gimple_assign_rhs1 (def
);
2019 if (!ipa_is_ssa_with_stmt_def (cond
))
2021 def
= SSA_NAME_DEF_STMT (cond
);
2025 rec2
= ipa_get_stmt_member_ptr_load_param (def
,
2026 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2027 == ptrmemfunc_vbit_in_delta
),
2032 index
= ipa_get_param_decl_index (info
, rec
);
2034 && parm_preserved_before_stmt_p (fbi
, index
, call
, rec
))
2036 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2037 cs
->indirect_info
->offset
= offset
;
2038 cs
->indirect_info
->agg_contents
= 1;
2039 cs
->indirect_info
->member_ptr
= 1;
2040 cs
->indirect_info
->guaranteed_unmodified
= 1;
2046 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2047 object referenced in the expression is a formal parameter of the caller
2048 FBI->node (described by FBI->info), create a call note for the
2052 ipa_analyze_virtual_call_uses (struct ipa_func_body_info
*fbi
,
2053 gcall
*call
, tree target
)
2055 tree obj
= OBJ_TYPE_REF_OBJECT (target
);
2057 HOST_WIDE_INT anc_offset
;
2059 if (!flag_devirtualize
)
2062 if (TREE_CODE (obj
) != SSA_NAME
)
2065 struct ipa_node_params
*info
= fbi
->info
;
2066 if (SSA_NAME_IS_DEFAULT_DEF (obj
))
2068 struct ipa_jump_func jfunc
;
2069 if (TREE_CODE (SSA_NAME_VAR (obj
)) != PARM_DECL
)
2073 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (obj
));
2074 gcc_assert (index
>= 0);
2075 if (detect_type_change_ssa (obj
, obj_type_ref_class (target
),
2081 struct ipa_jump_func jfunc
;
2082 gimple
*stmt
= SSA_NAME_DEF_STMT (obj
);
2085 expr
= get_ancestor_addr_info (stmt
, &obj
, &anc_offset
);
2088 index
= ipa_get_param_decl_index (info
,
2089 SSA_NAME_VAR (TREE_OPERAND (expr
, 0)));
2090 gcc_assert (index
>= 0);
2091 if (detect_type_change (obj
, expr
, obj_type_ref_class (target
),
2092 call
, &jfunc
, anc_offset
))
2096 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2097 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
2098 ii
->offset
= anc_offset
;
2099 ii
->otr_token
= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target
));
2100 ii
->otr_type
= obj_type_ref_class (target
);
2101 ii
->polymorphic
= 1;
2104 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2105 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2106 containing intermediate information about each formal parameter. */
2109 ipa_analyze_call_uses (struct ipa_func_body_info
*fbi
, gcall
*call
)
2111 tree target
= gimple_call_fn (call
);
2114 || (TREE_CODE (target
) != SSA_NAME
2115 && !virtual_method_call_p (target
)))
2118 struct cgraph_edge
*cs
= fbi
->node
->get_edge (call
);
2119 /* If we previously turned the call into a direct call, there is
2120 no need to analyze. */
2121 if (cs
&& !cs
->indirect_unknown_callee
)
2124 if (cs
->indirect_info
->polymorphic
&& flag_devirtualize
)
2127 tree target
= gimple_call_fn (call
);
2128 ipa_polymorphic_call_context
context (current_function_decl
,
2129 target
, call
, &instance
);
2131 gcc_checking_assert (cs
->indirect_info
->otr_type
2132 == obj_type_ref_class (target
));
2133 gcc_checking_assert (cs
->indirect_info
->otr_token
2134 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target
)));
2136 cs
->indirect_info
->vptr_changed
2137 = !context
.get_dynamic_type (instance
,
2138 OBJ_TYPE_REF_OBJECT (target
),
2139 obj_type_ref_class (target
), call
);
2140 cs
->indirect_info
->context
= context
;
2143 if (TREE_CODE (target
) == SSA_NAME
)
2144 ipa_analyze_indirect_call_uses (fbi
, call
, target
);
2145 else if (virtual_method_call_p (target
))
2146 ipa_analyze_virtual_call_uses (fbi
, call
, target
);
2150 /* Analyze the call statement STMT with respect to formal parameters (described
2151 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2152 formal parameters are called. */
2155 ipa_analyze_stmt_uses (struct ipa_func_body_info
*fbi
, gimple
*stmt
)
2157 if (is_gimple_call (stmt
))
2158 ipa_analyze_call_uses (fbi
, as_a
<gcall
*> (stmt
));
2161 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2162 If OP is a parameter declaration, mark it as used in the info structure
2166 visit_ref_for_mod_analysis (gimple
*, tree op
, tree
, void *data
)
2168 struct ipa_node_params
*info
= (struct ipa_node_params
*) data
;
2170 op
= get_base_address (op
);
2172 && TREE_CODE (op
) == PARM_DECL
)
2174 int index
= ipa_get_param_decl_index (info
, op
);
2175 gcc_assert (index
>= 0);
2176 ipa_set_param_used (info
, index
, true);
2182 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2183 the findings in various structures of the associated ipa_node_params
2184 structure, such as parameter flags, notes etc. FBI holds various data about
2185 the function being analyzed. */
2188 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info
*fbi
, basic_block bb
)
2190 gimple_stmt_iterator gsi
;
2191 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2193 gimple
*stmt
= gsi_stmt (gsi
);
2195 if (is_gimple_debug (stmt
))
2198 ipa_analyze_stmt_uses (fbi
, stmt
);
2199 walk_stmt_load_store_addr_ops (stmt
, fbi
->info
,
2200 visit_ref_for_mod_analysis
,
2201 visit_ref_for_mod_analysis
,
2202 visit_ref_for_mod_analysis
);
2204 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2205 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), fbi
->info
,
2206 visit_ref_for_mod_analysis
,
2207 visit_ref_for_mod_analysis
,
2208 visit_ref_for_mod_analysis
);
2211 /* Calculate controlled uses of parameters of NODE. */
2214 ipa_analyze_controlled_uses (struct cgraph_node
*node
)
2216 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
2218 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2220 tree parm
= ipa_get_param (info
, i
);
2221 int controlled_uses
= 0;
2223 /* For SSA regs see if parameter is used. For non-SSA we compute
2224 the flag during modification analysis. */
2225 if (is_gimple_reg (parm
))
2227 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
),
2229 if (ddef
&& !has_zero_uses (ddef
))
2231 imm_use_iterator imm_iter
;
2232 use_operand_p use_p
;
2234 ipa_set_param_used (info
, i
, true);
2235 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, ddef
)
2236 if (!is_gimple_call (USE_STMT (use_p
)))
2238 if (!is_gimple_debug (USE_STMT (use_p
)))
2240 controlled_uses
= IPA_UNDESCRIBED_USE
;
2248 controlled_uses
= 0;
2251 controlled_uses
= IPA_UNDESCRIBED_USE
;
2252 ipa_set_controlled_uses (info
, i
, controlled_uses
);
2256 /* Free stuff in BI. */
2259 free_ipa_bb_info (struct ipa_bb_info
*bi
)
2261 bi
->cg_edges
.release ();
2262 bi
->param_aa_statuses
.release ();
2265 /* Dominator walker driving the analysis. */
2267 class analysis_dom_walker
: public dom_walker
2270 analysis_dom_walker (struct ipa_func_body_info
*fbi
)
2271 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
) {}
2273 virtual edge
before_dom_children (basic_block
);
2276 struct ipa_func_body_info
*m_fbi
;
2280 analysis_dom_walker::before_dom_children (basic_block bb
)
2282 ipa_analyze_params_uses_in_bb (m_fbi
, bb
);
2283 ipa_compute_jump_functions_for_bb (m_fbi
, bb
);
2287 /* Release body info FBI. */
2290 ipa_release_body_info (struct ipa_func_body_info
*fbi
)
2293 struct ipa_bb_info
*bi
;
2295 FOR_EACH_VEC_ELT (fbi
->bb_infos
, i
, bi
)
2296 free_ipa_bb_info (bi
);
2297 fbi
->bb_infos
.release ();
2300 /* Initialize the array describing properties of formal parameters
2301 of NODE, analyze their uses and compute jump functions associated
2302 with actual arguments of calls from within NODE. */
2305 ipa_analyze_node (struct cgraph_node
*node
)
2307 struct ipa_func_body_info fbi
;
2308 struct ipa_node_params
*info
;
2310 ipa_check_create_node_params ();
2311 ipa_check_create_edge_args ();
2312 info
= IPA_NODE_REF (node
);
2314 if (info
->analysis_done
)
2316 info
->analysis_done
= 1;
2318 if (ipa_func_spec_opts_forbid_analysis_p (node
))
2320 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2322 ipa_set_param_used (info
, i
, true);
2323 ipa_set_controlled_uses (info
, i
, IPA_UNDESCRIBED_USE
);
2328 struct function
*func
= DECL_STRUCT_FUNCTION (node
->decl
);
2330 calculate_dominance_info (CDI_DOMINATORS
);
2331 ipa_initialize_node_params (node
);
2332 ipa_analyze_controlled_uses (node
);
2335 fbi
.info
= IPA_NODE_REF (node
);
2336 fbi
.bb_infos
= vNULL
;
2337 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
));
2338 fbi
.param_count
= ipa_get_param_count (info
);
2341 for (struct cgraph_edge
*cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
2343 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
2344 bi
->cg_edges
.safe_push (cs
);
2347 for (struct cgraph_edge
*cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
2349 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
2350 bi
->cg_edges
.safe_push (cs
);
2353 analysis_dom_walker (&fbi
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
2355 ipa_release_body_info (&fbi
);
2356 free_dominance_info (CDI_DOMINATORS
);
2360 /* Update the jump functions associated with call graph edge E when the call
2361 graph edge CS is being inlined, assuming that E->caller is already (possibly
2362 indirectly) inlined into CS->callee and that E has not been inlined. */
2365 update_jump_functions_after_inlining (struct cgraph_edge
*cs
,
2366 struct cgraph_edge
*e
)
2368 struct ipa_edge_args
*top
= IPA_EDGE_REF (cs
);
2369 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
2370 int count
= ipa_get_cs_argument_count (args
);
2373 for (i
= 0; i
< count
; i
++)
2375 struct ipa_jump_func
*dst
= ipa_get_ith_jump_func (args
, i
);
2376 struct ipa_polymorphic_call_context
*dst_ctx
2377 = ipa_get_ith_polymorhic_call_context (args
, i
);
2379 if (dst
->type
== IPA_JF_ANCESTOR
)
2381 struct ipa_jump_func
*src
;
2382 int dst_fid
= dst
->value
.ancestor
.formal_id
;
2383 struct ipa_polymorphic_call_context
*src_ctx
2384 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
2386 /* Variable number of arguments can cause havoc if we try to access
2387 one that does not exist in the inlined edge. So make sure we
2389 if (dst_fid
>= ipa_get_cs_argument_count (top
))
2391 ipa_set_jf_unknown (dst
);
2395 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2397 if (src_ctx
&& !src_ctx
->useless_p ())
2399 struct ipa_polymorphic_call_context ctx
= *src_ctx
;
2401 /* TODO: Make type preserved safe WRT contexts. */
2402 if (!ipa_get_jf_ancestor_type_preserved (dst
))
2403 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
2404 ctx
.offset_by (dst
->value
.ancestor
.offset
);
2405 if (!ctx
.useless_p ())
2409 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
2411 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
2414 dst_ctx
->combine_with (ctx
);
2419 && (dst
->value
.ancestor
.agg_preserved
|| !src
->agg
.by_ref
))
2421 struct ipa_agg_jf_item
*item
;
2424 /* Currently we do not produce clobber aggregate jump functions,
2425 replace with merging when we do. */
2426 gcc_assert (!dst
->agg
.items
);
2428 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2429 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2430 FOR_EACH_VEC_SAFE_ELT (dst
->agg
.items
, j
, item
)
2431 item
->offset
-= dst
->value
.ancestor
.offset
;
2434 if (src
->type
== IPA_JF_PASS_THROUGH
2435 && src
->value
.pass_through
.operation
== NOP_EXPR
)
2437 dst
->value
.ancestor
.formal_id
= src
->value
.pass_through
.formal_id
;
2438 dst
->value
.ancestor
.agg_preserved
&=
2439 src
->value
.pass_through
.agg_preserved
;
2441 else if (src
->type
== IPA_JF_ANCESTOR
)
2443 dst
->value
.ancestor
.formal_id
= src
->value
.ancestor
.formal_id
;
2444 dst
->value
.ancestor
.offset
+= src
->value
.ancestor
.offset
;
2445 dst
->value
.ancestor
.agg_preserved
&=
2446 src
->value
.ancestor
.agg_preserved
;
2449 ipa_set_jf_unknown (dst
);
2451 else if (dst
->type
== IPA_JF_PASS_THROUGH
)
2453 struct ipa_jump_func
*src
;
2454 /* We must check range due to calls with variable number of arguments
2455 and we cannot combine jump functions with operations. */
2456 if (dst
->value
.pass_through
.operation
== NOP_EXPR
2457 && (dst
->value
.pass_through
.formal_id
2458 < ipa_get_cs_argument_count (top
)))
2460 int dst_fid
= dst
->value
.pass_through
.formal_id
;
2461 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2462 bool dst_agg_p
= ipa_get_jf_pass_through_agg_preserved (dst
);
2463 struct ipa_polymorphic_call_context
*src_ctx
2464 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
2466 if (src_ctx
&& !src_ctx
->useless_p ())
2468 struct ipa_polymorphic_call_context ctx
= *src_ctx
;
2470 /* TODO: Make type preserved safe WRT contexts. */
2471 if (!ipa_get_jf_pass_through_type_preserved (dst
))
2472 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
2473 if (!ctx
.useless_p ())
2477 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
2479 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
2481 dst_ctx
->combine_with (ctx
);
2486 case IPA_JF_UNKNOWN
:
2487 ipa_set_jf_unknown (dst
);
2490 ipa_set_jf_cst_copy (dst
, src
);
2493 case IPA_JF_PASS_THROUGH
:
2495 int formal_id
= ipa_get_jf_pass_through_formal_id (src
);
2496 enum tree_code operation
;
2497 operation
= ipa_get_jf_pass_through_operation (src
);
2499 if (operation
== NOP_EXPR
)
2503 && ipa_get_jf_pass_through_agg_preserved (src
);
2504 ipa_set_jf_simple_pass_through (dst
, formal_id
, agg_p
);
2508 tree operand
= ipa_get_jf_pass_through_operand (src
);
2509 ipa_set_jf_arith_pass_through (dst
, formal_id
, operand
,
2514 case IPA_JF_ANCESTOR
:
2518 && ipa_get_jf_ancestor_agg_preserved (src
);
2519 ipa_set_ancestor_jf (dst
,
2520 ipa_get_jf_ancestor_offset (src
),
2521 ipa_get_jf_ancestor_formal_id (src
),
2530 && (dst_agg_p
|| !src
->agg
.by_ref
))
2532 /* Currently we do not produce clobber aggregate jump
2533 functions, replace with merging when we do. */
2534 gcc_assert (!dst
->agg
.items
);
2536 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2537 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2541 ipa_set_jf_unknown (dst
);
2546 /* If TARGET is an addr_expr of a function declaration, make it the
2547 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2548 Otherwise, return NULL. */
2550 struct cgraph_edge
*
2551 ipa_make_edge_direct_to_target (struct cgraph_edge
*ie
, tree target
,
2554 struct cgraph_node
*callee
;
2555 struct inline_edge_summary
*es
= inline_edge_summary (ie
);
2556 bool unreachable
= false;
2558 if (TREE_CODE (target
) == ADDR_EXPR
)
2559 target
= TREE_OPERAND (target
, 0);
2560 if (TREE_CODE (target
) != FUNCTION_DECL
)
2562 target
= canonicalize_constructor_val (target
, NULL
);
2563 if (!target
|| TREE_CODE (target
) != FUNCTION_DECL
)
2565 /* Member pointer call that goes through a VMT lookup. */
2566 if (ie
->indirect_info
->member_ptr
2567 /* Or if target is not an invariant expression and we do not
2568 know if it will evaulate to function at runtime.
2569 This can happen when folding through &VAR, where &VAR
2570 is IP invariant, but VAR itself is not.
2572 TODO: Revisit this when GCC 5 is branched. It seems that
2573 member_ptr check is not needed and that we may try to fold
2574 the expression and see if VAR is readonly. */
2575 || !is_gimple_ip_invariant (target
))
2577 if (dump_enabled_p ())
2579 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2580 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2581 "discovered direct call non-invariant "
2583 ie
->caller
->name (), ie
->caller
->order
);
2589 if (dump_enabled_p ())
2591 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2592 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2593 "discovered direct call to non-function in %s/%i, "
2594 "making it __builtin_unreachable\n",
2595 ie
->caller
->name (), ie
->caller
->order
);
2598 target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2599 callee
= cgraph_node::get_create (target
);
2603 callee
= cgraph_node::get (target
);
2606 callee
= cgraph_node::get (target
);
2608 /* Because may-edges are not explicitely represented and vtable may be external,
2609 we may create the first reference to the object in the unit. */
2610 if (!callee
|| callee
->global
.inlined_to
)
2613 /* We are better to ensure we can refer to it.
2614 In the case of static functions we are out of luck, since we already
2615 removed its body. In the case of public functions we may or may
2616 not introduce the reference. */
2617 if (!canonicalize_constructor_val (target
, NULL
)
2618 || !TREE_PUBLIC (target
))
2621 fprintf (dump_file
, "ipa-prop: Discovered call to a known target "
2622 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2623 xstrdup_for_dump (ie
->caller
->name ()),
2625 xstrdup_for_dump (ie
->callee
->name ()),
2629 callee
= cgraph_node::get_create (target
);
2632 /* If the edge is already speculated. */
2633 if (speculative
&& ie
->speculative
)
2635 struct cgraph_edge
*e2
;
2636 struct ipa_ref
*ref
;
2637 ie
->speculative_call_info (e2
, ie
, ref
);
2638 if (e2
->callee
->ultimate_alias_target ()
2639 != callee
->ultimate_alias_target ())
2642 fprintf (dump_file
, "ipa-prop: Discovered call to a speculative target "
2643 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2644 xstrdup_for_dump (ie
->caller
->name ()),
2646 xstrdup_for_dump (callee
->name ()),
2648 xstrdup_for_dump (e2
->callee
->name ()),
2654 fprintf (dump_file
, "ipa-prop: Discovered call to a speculative target "
2655 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2656 xstrdup_for_dump (ie
->caller
->name ()),
2658 xstrdup_for_dump (callee
->name ()),
2664 if (!dbg_cnt (devirt
))
2667 ipa_check_create_node_params ();
2669 /* We can not make edges to inline clones. It is bug that someone removed
2670 the cgraph node too early. */
2671 gcc_assert (!callee
->global
.inlined_to
);
2673 if (dump_file
&& !unreachable
)
2675 fprintf (dump_file
, "ipa-prop: Discovered %s call to a %s target "
2676 "(%s/%i -> %s/%i), for stmt ",
2677 ie
->indirect_info
->polymorphic
? "a virtual" : "an indirect",
2678 speculative
? "speculative" : "known",
2679 xstrdup_for_dump (ie
->caller
->name ()),
2681 xstrdup_for_dump (callee
->name ()),
2684 print_gimple_stmt (dump_file
, ie
->call_stmt
, 2, TDF_SLIM
);
2686 fprintf (dump_file
, "with uid %i\n", ie
->lto_stmt_uid
);
2688 if (dump_enabled_p ())
2690 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2692 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2693 "converting indirect call in %s to direct call to %s\n",
2694 ie
->caller
->name (), callee
->name ());
2698 struct cgraph_edge
*orig
= ie
;
2699 ie
= ie
->make_direct (callee
);
2700 /* If we resolved speculative edge the cost is already up to date
2701 for direct call (adjusted by inline_edge_duplication_hook). */
2704 es
= inline_edge_summary (ie
);
2705 es
->call_stmt_size
-= (eni_size_weights
.indirect_call_cost
2706 - eni_size_weights
.call_cost
);
2707 es
->call_stmt_time
-= (eni_time_weights
.indirect_call_cost
2708 - eni_time_weights
.call_cost
);
2713 if (!callee
->can_be_discarded_p ())
2716 alias
= dyn_cast
<cgraph_node
*> (callee
->noninterposable_alias ());
2720 /* make_speculative will update ie's cost to direct call cost. */
2721 ie
= ie
->make_speculative
2722 (callee
, ie
->count
* 8 / 10, ie
->frequency
* 8 / 10);
2728 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
2729 CONSTRUCTOR and return it. Return NULL if the search fails for some
2733 find_constructor_constant_at_offset (tree constructor
, HOST_WIDE_INT req_offset
)
2735 tree type
= TREE_TYPE (constructor
);
2736 if (TREE_CODE (type
) != ARRAY_TYPE
2737 && TREE_CODE (type
) != RECORD_TYPE
)
2742 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor
), ix
, index
, val
)
2744 HOST_WIDE_INT elt_offset
;
2745 if (TREE_CODE (type
) == ARRAY_TYPE
)
2748 tree unit_size
= TYPE_SIZE_UNIT (TREE_TYPE (type
));
2749 gcc_assert (TREE_CODE (unit_size
) == INTEGER_CST
);
2753 off
= wi::to_offset (index
);
2754 if (TYPE_DOMAIN (type
) && TYPE_MIN_VALUE (TYPE_DOMAIN (type
)))
2756 tree low_bound
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
2757 gcc_assert (TREE_CODE (unit_size
) == INTEGER_CST
);
2758 off
= wi::sext (off
- wi::to_offset (low_bound
),
2759 TYPE_PRECISION (TREE_TYPE (index
)));
2761 off
*= wi::to_offset (unit_size
);
2764 off
= wi::to_offset (unit_size
) * ix
;
2766 off
= wi::lshift (off
, LOG2_BITS_PER_UNIT
);
2767 if (!wi::fits_shwi_p (off
) || wi::neg_p (off
))
2769 elt_offset
= off
.to_shwi ();
2771 else if (TREE_CODE (type
) == RECORD_TYPE
)
2773 gcc_checking_assert (index
&& TREE_CODE (index
) == FIELD_DECL
);
2774 if (DECL_BIT_FIELD (index
))
2776 elt_offset
= int_bit_position (index
);
2781 if (elt_offset
> req_offset
)
2784 if (TREE_CODE (val
) == CONSTRUCTOR
)
2785 return find_constructor_constant_at_offset (val
,
2786 req_offset
- elt_offset
);
2788 if (elt_offset
== req_offset
2789 && is_gimple_reg_type (TREE_TYPE (val
))
2790 && is_gimple_ip_invariant (val
))
2796 /* Check whether SCALAR could be used to look up an aggregate interprocedural
2797 invariant from a static constructor and if so, return it. Otherwise return
2801 ipa_find_agg_cst_from_init (tree scalar
, HOST_WIDE_INT offset
, bool by_ref
)
2805 if (TREE_CODE (scalar
) != ADDR_EXPR
)
2807 scalar
= TREE_OPERAND (scalar
, 0);
2810 if (TREE_CODE (scalar
) != VAR_DECL
2811 || !is_global_var (scalar
)
2812 || !TREE_READONLY (scalar
)
2813 || !DECL_INITIAL (scalar
)
2814 || TREE_CODE (DECL_INITIAL (scalar
)) != CONSTRUCTOR
)
2817 return find_constructor_constant_at_offset (DECL_INITIAL (scalar
), offset
);
2820 /* Retrieve value from aggregate jump function AGG or static initializer of
2821 SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
2822 none. BY_REF specifies whether the value has to be passed by reference or
2823 by value. If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
2824 to is set to true if the value comes from an initializer of a constant. */
2827 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function
*agg
, tree scalar
,
2828 HOST_WIDE_INT offset
, bool by_ref
,
2829 bool *from_global_constant
)
2831 struct ipa_agg_jf_item
*item
;
2836 tree res
= ipa_find_agg_cst_from_init (scalar
, offset
, by_ref
);
2839 if (from_global_constant
)
2840 *from_global_constant
= true;
2846 || by_ref
!= agg
->by_ref
)
2849 FOR_EACH_VEC_SAFE_ELT (agg
->items
, i
, item
)
2850 if (item
->offset
== offset
)
2852 /* Currently we do not have clobber values, return NULL for them once
2854 gcc_checking_assert (is_gimple_ip_invariant (item
->value
));
2855 if (from_global_constant
)
2856 *from_global_constant
= false;
2862 /* Remove a reference to SYMBOL from the list of references of a node given by
2863 reference description RDESC. Return true if the reference has been
2864 successfully found and removed. */
2867 remove_described_reference (symtab_node
*symbol
, struct ipa_cst_ref_desc
*rdesc
)
2869 struct ipa_ref
*to_del
;
2870 struct cgraph_edge
*origin
;
2875 to_del
= origin
->caller
->find_reference (symbol
, origin
->call_stmt
,
2876 origin
->lto_stmt_uid
);
2880 to_del
->remove_reference ();
2882 fprintf (dump_file
, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2883 xstrdup_for_dump (origin
->caller
->name ()),
2884 origin
->caller
->order
, xstrdup_for_dump (symbol
->name ()));
2888 /* If JFUNC has a reference description with refcount different from
2889 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2890 NULL. JFUNC must be a constant jump function. */
2892 static struct ipa_cst_ref_desc
*
2893 jfunc_rdesc_usable (struct ipa_jump_func
*jfunc
)
2895 struct ipa_cst_ref_desc
*rdesc
= ipa_get_jf_constant_rdesc (jfunc
);
2896 if (rdesc
&& rdesc
->refcount
!= IPA_UNDESCRIBED_USE
)
2902 /* If the value of constant jump function JFUNC is an address of a function
2903 declaration, return the associated call graph node. Otherwise return
2906 static cgraph_node
*
2907 cgraph_node_for_jfunc (struct ipa_jump_func
*jfunc
)
2909 gcc_checking_assert (jfunc
->type
== IPA_JF_CONST
);
2910 tree cst
= ipa_get_jf_constant (jfunc
);
2911 if (TREE_CODE (cst
) != ADDR_EXPR
2912 || TREE_CODE (TREE_OPERAND (cst
, 0)) != FUNCTION_DECL
)
2915 return cgraph_node::get (TREE_OPERAND (cst
, 0));
2919 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2920 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2921 the edge specified in the rdesc. Return false if either the symbol or the
2922 reference could not be found, otherwise return true. */
2925 try_decrement_rdesc_refcount (struct ipa_jump_func
*jfunc
)
2927 struct ipa_cst_ref_desc
*rdesc
;
2928 if (jfunc
->type
== IPA_JF_CONST
2929 && (rdesc
= jfunc_rdesc_usable (jfunc
))
2930 && --rdesc
->refcount
== 0)
2932 symtab_node
*symbol
= cgraph_node_for_jfunc (jfunc
);
2936 return remove_described_reference (symbol
, rdesc
);
2941 /* Try to find a destination for indirect edge IE that corresponds to a simple
2942 call or a call of a member function pointer and where the destination is a
2943 pointer formal parameter described by jump function JFUNC. If it can be
2944 determined, return the newly direct edge, otherwise return NULL.
2945 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2947 static struct cgraph_edge
*
2948 try_make_edge_direct_simple_call (struct cgraph_edge
*ie
,
2949 struct ipa_jump_func
*jfunc
,
2950 struct ipa_node_params
*new_root_info
)
2952 struct cgraph_edge
*cs
;
2954 bool agg_contents
= ie
->indirect_info
->agg_contents
;
2955 tree scalar
= ipa_value_from_jfunc (new_root_info
, jfunc
);
2958 bool from_global_constant
;
2959 target
= ipa_find_agg_cst_for_param (&jfunc
->agg
, scalar
,
2960 ie
->indirect_info
->offset
,
2961 ie
->indirect_info
->by_ref
,
2962 &from_global_constant
);
2964 && !from_global_constant
2965 && !ie
->indirect_info
->guaranteed_unmodified
)
2972 cs
= ipa_make_edge_direct_to_target (ie
, target
);
2974 if (cs
&& !agg_contents
)
2977 gcc_checking_assert (cs
->callee
2979 || jfunc
->type
!= IPA_JF_CONST
2980 || !cgraph_node_for_jfunc (jfunc
)
2981 || cs
->callee
== cgraph_node_for_jfunc (jfunc
)));
2982 ok
= try_decrement_rdesc_refcount (jfunc
);
2983 gcc_checking_assert (ok
);
2989 /* Return the target to be used in cases of impossible devirtualization. IE
2990 and target (the latter can be NULL) are dumped when dumping is enabled. */
2993 ipa_impossible_devirt_target (struct cgraph_edge
*ie
, tree target
)
2999 "Type inconsistent devirtualization: %s/%i->%s\n",
3000 ie
->caller
->name (), ie
->caller
->order
,
3001 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target
)));
3004 "No devirtualization target in %s/%i\n",
3005 ie
->caller
->name (), ie
->caller
->order
);
3007 tree new_target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
3008 cgraph_node::get_create (new_target
);
3012 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3013 call based on a formal parameter which is described by jump function JFUNC
3014 and if it can be determined, make it direct and return the direct edge.
3015 Otherwise, return NULL. CTX describes the polymorphic context that the
3016 parameter the call is based on brings along with it. */
3018 static struct cgraph_edge
*
3019 try_make_edge_direct_virtual_call (struct cgraph_edge
*ie
,
3020 struct ipa_jump_func
*jfunc
,
3021 struct ipa_polymorphic_call_context ctx
)
3024 bool speculative
= false;
3026 if (!opt_for_fn (ie
->caller
->decl
, flag_devirtualize
))
3029 gcc_assert (!ie
->indirect_info
->by_ref
);
3031 /* Try to do lookup via known virtual table pointer value. */
3032 if (!ie
->indirect_info
->vptr_changed
3033 || opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
))
3036 unsigned HOST_WIDE_INT offset
;
3037 tree scalar
= (jfunc
->type
== IPA_JF_CONST
) ? ipa_get_jf_constant (jfunc
)
3039 tree t
= ipa_find_agg_cst_for_param (&jfunc
->agg
, scalar
,
3040 ie
->indirect_info
->offset
,
3042 if (t
&& vtable_pointer_value_to_vtable (t
, &vtable
, &offset
))
3045 t
= gimple_get_virt_method_for_vtable (ie
->indirect_info
->otr_token
,
3046 vtable
, offset
, &can_refer
);
3050 || (TREE_CODE (TREE_TYPE (t
)) == FUNCTION_TYPE
3051 && DECL_FUNCTION_CODE (t
) == BUILT_IN_UNREACHABLE
)
3052 || !possible_polymorphic_call_target_p
3053 (ie
, cgraph_node::get (t
)))
3055 /* Do not speculate builtin_unreachable, it is stupid! */
3056 if (!ie
->indirect_info
->vptr_changed
)
3057 target
= ipa_impossible_devirt_target (ie
, target
);
3064 speculative
= ie
->indirect_info
->vptr_changed
;
3070 ipa_polymorphic_call_context
ie_context (ie
);
3071 vec
<cgraph_node
*>targets
;
3074 ctx
.offset_by (ie
->indirect_info
->offset
);
3075 if (ie
->indirect_info
->vptr_changed
)
3076 ctx
.possible_dynamic_type_change (ie
->in_polymorphic_cdtor
,
3077 ie
->indirect_info
->otr_type
);
3078 ctx
.combine_with (ie_context
, ie
->indirect_info
->otr_type
);
3079 targets
= possible_polymorphic_call_targets
3080 (ie
->indirect_info
->otr_type
,
3081 ie
->indirect_info
->otr_token
,
3083 if (final
&& targets
.length () <= 1)
3085 speculative
= false;
3086 if (targets
.length () == 1)
3087 target
= targets
[0]->decl
;
3089 target
= ipa_impossible_devirt_target (ie
, NULL_TREE
);
3091 else if (!target
&& opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
)
3092 && !ie
->speculative
&& ie
->maybe_hot_p ())
3095 n
= try_speculative_devirtualization (ie
->indirect_info
->otr_type
,
3096 ie
->indirect_info
->otr_token
,
3097 ie
->indirect_info
->context
);
3107 if (!possible_polymorphic_call_target_p
3108 (ie
, cgraph_node::get_create (target
)))
3112 target
= ipa_impossible_devirt_target (ie
, target
);
3114 return ipa_make_edge_direct_to_target (ie
, target
, speculative
);
3120 /* Update the param called notes associated with NODE when CS is being inlined,
3121 assuming NODE is (potentially indirectly) inlined into CS->callee.
3122 Moreover, if the callee is discovered to be constant, create a new cgraph
3123 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3124 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3127 update_indirect_edges_after_inlining (struct cgraph_edge
*cs
,
3128 struct cgraph_node
*node
,
3129 vec
<cgraph_edge
*> *new_edges
)
3131 struct ipa_edge_args
*top
;
3132 struct cgraph_edge
*ie
, *next_ie
, *new_direct_edge
;
3133 struct ipa_node_params
*new_root_info
;
3136 ipa_check_create_edge_args ();
3137 top
= IPA_EDGE_REF (cs
);
3138 new_root_info
= IPA_NODE_REF (cs
->caller
->global
.inlined_to
3139 ? cs
->caller
->global
.inlined_to
3142 for (ie
= node
->indirect_calls
; ie
; ie
= next_ie
)
3144 struct cgraph_indirect_call_info
*ici
= ie
->indirect_info
;
3145 struct ipa_jump_func
*jfunc
;
3147 cgraph_node
*spec_target
= NULL
;
3149 next_ie
= ie
->next_callee
;
3151 if (ici
->param_index
== -1)
3154 /* We must check range due to calls with variable number of arguments: */
3155 if (ici
->param_index
>= ipa_get_cs_argument_count (top
))
3157 ici
->param_index
= -1;
3161 param_index
= ici
->param_index
;
3162 jfunc
= ipa_get_ith_jump_func (top
, param_index
);
3164 if (ie
->speculative
)
3166 struct cgraph_edge
*de
;
3167 struct ipa_ref
*ref
;
3168 ie
->speculative_call_info (de
, ie
, ref
);
3169 spec_target
= de
->callee
;
3172 if (!opt_for_fn (node
->decl
, flag_indirect_inlining
))
3173 new_direct_edge
= NULL
;
3174 else if (ici
->polymorphic
)
3176 ipa_polymorphic_call_context ctx
;
3177 ctx
= ipa_context_from_jfunc (new_root_info
, cs
, param_index
, jfunc
);
3178 new_direct_edge
= try_make_edge_direct_virtual_call (ie
, jfunc
, ctx
);
3181 new_direct_edge
= try_make_edge_direct_simple_call (ie
, jfunc
,
3183 /* If speculation was removed, then we need to do nothing. */
3184 if (new_direct_edge
&& new_direct_edge
!= ie
3185 && new_direct_edge
->callee
== spec_target
)
3187 new_direct_edge
->indirect_inlining_edge
= 1;
3188 top
= IPA_EDGE_REF (cs
);
3190 if (!new_direct_edge
->speculative
)
3193 else if (new_direct_edge
)
3195 new_direct_edge
->indirect_inlining_edge
= 1;
3196 if (new_direct_edge
->call_stmt
)
3197 new_direct_edge
->call_stmt_cannot_inline_p
3198 = !gimple_check_call_matching_types (
3199 new_direct_edge
->call_stmt
,
3200 new_direct_edge
->callee
->decl
, false);
3203 new_edges
->safe_push (new_direct_edge
);
3206 top
= IPA_EDGE_REF (cs
);
3207 /* If speculative edge was introduced we still need to update
3208 call info of the indirect edge. */
3209 if (!new_direct_edge
->speculative
)
3212 if (jfunc
->type
== IPA_JF_PASS_THROUGH
3213 && ipa_get_jf_pass_through_operation (jfunc
) == NOP_EXPR
)
3215 if (ici
->agg_contents
3216 && !ipa_get_jf_pass_through_agg_preserved (jfunc
)
3217 && !ici
->polymorphic
)
3218 ici
->param_index
= -1;
3221 ici
->param_index
= ipa_get_jf_pass_through_formal_id (jfunc
);
3222 if (ici
->polymorphic
3223 && !ipa_get_jf_pass_through_type_preserved (jfunc
))
3224 ici
->vptr_changed
= true;
3227 else if (jfunc
->type
== IPA_JF_ANCESTOR
)
3229 if (ici
->agg_contents
3230 && !ipa_get_jf_ancestor_agg_preserved (jfunc
)
3231 && !ici
->polymorphic
)
3232 ici
->param_index
= -1;
3235 ici
->param_index
= ipa_get_jf_ancestor_formal_id (jfunc
);
3236 ici
->offset
+= ipa_get_jf_ancestor_offset (jfunc
);
3237 if (ici
->polymorphic
3238 && !ipa_get_jf_ancestor_type_preserved (jfunc
))
3239 ici
->vptr_changed
= true;
3243 /* Either we can find a destination for this edge now or never. */
3244 ici
->param_index
= -1;
3250 /* Recursively traverse subtree of NODE (including node) made of inlined
3251 cgraph_edges when CS has been inlined and invoke
3252 update_indirect_edges_after_inlining on all nodes and
3253 update_jump_functions_after_inlining on all non-inlined edges that lead out
3254 of this subtree. Newly discovered indirect edges will be added to
3255 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3259 propagate_info_to_inlined_callees (struct cgraph_edge
*cs
,
3260 struct cgraph_node
*node
,
3261 vec
<cgraph_edge
*> *new_edges
)
3263 struct cgraph_edge
*e
;
3266 res
= update_indirect_edges_after_inlining (cs
, node
, new_edges
);
3268 for (e
= node
->callees
; e
; e
= e
->next_callee
)
3269 if (!e
->inline_failed
)
3270 res
|= propagate_info_to_inlined_callees (cs
, e
->callee
, new_edges
);
3272 update_jump_functions_after_inlining (cs
, e
);
3273 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
3274 update_jump_functions_after_inlining (cs
, e
);
3279 /* Combine two controlled uses counts as done during inlining. */
3282 combine_controlled_uses_counters (int c
, int d
)
3284 if (c
== IPA_UNDESCRIBED_USE
|| d
== IPA_UNDESCRIBED_USE
)
3285 return IPA_UNDESCRIBED_USE
;
3290 /* Propagate number of controlled users from CS->caleee to the new root of the
3291 tree of inlined nodes. */
3294 propagate_controlled_uses (struct cgraph_edge
*cs
)
3296 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
3297 struct cgraph_node
*new_root
= cs
->caller
->global
.inlined_to
3298 ? cs
->caller
->global
.inlined_to
: cs
->caller
;
3299 struct ipa_node_params
*new_root_info
= IPA_NODE_REF (new_root
);
3300 struct ipa_node_params
*old_root_info
= IPA_NODE_REF (cs
->callee
);
3303 count
= MIN (ipa_get_cs_argument_count (args
),
3304 ipa_get_param_count (old_root_info
));
3305 for (i
= 0; i
< count
; i
++)
3307 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
3308 struct ipa_cst_ref_desc
*rdesc
;
3310 if (jf
->type
== IPA_JF_PASS_THROUGH
)
3313 src_idx
= ipa_get_jf_pass_through_formal_id (jf
);
3314 c
= ipa_get_controlled_uses (new_root_info
, src_idx
);
3315 d
= ipa_get_controlled_uses (old_root_info
, i
);
3317 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf
)
3318 == NOP_EXPR
|| c
== IPA_UNDESCRIBED_USE
);
3319 c
= combine_controlled_uses_counters (c
, d
);
3320 ipa_set_controlled_uses (new_root_info
, src_idx
, c
);
3321 if (c
== 0 && new_root_info
->ipcp_orig_node
)
3323 struct cgraph_node
*n
;
3324 struct ipa_ref
*ref
;
3325 tree t
= new_root_info
->known_csts
[src_idx
];
3327 if (t
&& TREE_CODE (t
) == ADDR_EXPR
3328 && TREE_CODE (TREE_OPERAND (t
, 0)) == FUNCTION_DECL
3329 && (n
= cgraph_node::get (TREE_OPERAND (t
, 0)))
3330 && (ref
= new_root
->find_reference (n
, NULL
, 0)))
3333 fprintf (dump_file
, "ipa-prop: Removing cloning-created "
3334 "reference from %s/%i to %s/%i.\n",
3335 xstrdup_for_dump (new_root
->name ()),
3337 xstrdup_for_dump (n
->name ()), n
->order
);
3338 ref
->remove_reference ();
3342 else if (jf
->type
== IPA_JF_CONST
3343 && (rdesc
= jfunc_rdesc_usable (jf
)))
3345 int d
= ipa_get_controlled_uses (old_root_info
, i
);
3346 int c
= rdesc
->refcount
;
3347 rdesc
->refcount
= combine_controlled_uses_counters (c
, d
);
3348 if (rdesc
->refcount
== 0)
3350 tree cst
= ipa_get_jf_constant (jf
);
3351 struct cgraph_node
*n
;
3352 gcc_checking_assert (TREE_CODE (cst
) == ADDR_EXPR
3353 && TREE_CODE (TREE_OPERAND (cst
, 0))
3355 n
= cgraph_node::get (TREE_OPERAND (cst
, 0));
3358 struct cgraph_node
*clone
;
3360 ok
= remove_described_reference (n
, rdesc
);
3361 gcc_checking_assert (ok
);
3364 while (clone
->global
.inlined_to
3365 && clone
!= rdesc
->cs
->caller
3366 && IPA_NODE_REF (clone
)->ipcp_orig_node
)
3368 struct ipa_ref
*ref
;
3369 ref
= clone
->find_reference (n
, NULL
, 0);
3373 fprintf (dump_file
, "ipa-prop: Removing "
3374 "cloning-created reference "
3375 "from %s/%i to %s/%i.\n",
3376 xstrdup_for_dump (clone
->name ()),
3378 xstrdup_for_dump (n
->name ()),
3380 ref
->remove_reference ();
3382 clone
= clone
->callers
->caller
;
3389 for (i
= ipa_get_param_count (old_root_info
);
3390 i
< ipa_get_cs_argument_count (args
);
3393 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
3395 if (jf
->type
== IPA_JF_CONST
)
3397 struct ipa_cst_ref_desc
*rdesc
= jfunc_rdesc_usable (jf
);
3399 rdesc
->refcount
= IPA_UNDESCRIBED_USE
;
3401 else if (jf
->type
== IPA_JF_PASS_THROUGH
)
3402 ipa_set_controlled_uses (new_root_info
,
3403 jf
->value
.pass_through
.formal_id
,
3404 IPA_UNDESCRIBED_USE
);
3408 /* Update jump functions and call note functions on inlining the call site CS.
3409 CS is expected to lead to a node already cloned by
3410 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3411 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3415 ipa_propagate_indirect_call_infos (struct cgraph_edge
*cs
,
3416 vec
<cgraph_edge
*> *new_edges
)
3419 /* Do nothing if the preparation phase has not been carried out yet
3420 (i.e. during early inlining). */
3421 if (!ipa_node_params_sum
)
3423 gcc_assert (ipa_edge_args_vector
);
3425 propagate_controlled_uses (cs
);
3426 changed
= propagate_info_to_inlined_callees (cs
, cs
->callee
, new_edges
);
3431 /* Frees all dynamically allocated structures that the argument info points
3435 ipa_free_edge_args_substructures (struct ipa_edge_args
*args
)
3437 vec_free (args
->jump_functions
);
3438 memset (args
, 0, sizeof (*args
));
3441 /* Free all ipa_edge structures. */
3444 ipa_free_all_edge_args (void)
3447 struct ipa_edge_args
*args
;
3449 if (!ipa_edge_args_vector
)
3452 FOR_EACH_VEC_ELT (*ipa_edge_args_vector
, i
, args
)
3453 ipa_free_edge_args_substructures (args
);
3455 vec_free (ipa_edge_args_vector
);
3458 /* Frees all dynamically allocated structures that the param info points
3461 ipa_node_params::~ipa_node_params ()
3463 descriptors
.release ();
3465 /* Lattice values and their sources are deallocated with their alocation
3467 known_csts
.release ();
3468 known_contexts
.release ();
3471 ipcp_orig_node
= NULL
;
3474 do_clone_for_all_contexts
= 0;
3475 is_all_contexts_clone
= 0;
3479 /* Free all ipa_node_params structures. */
3482 ipa_free_all_node_params (void)
3484 delete ipa_node_params_sum
;
3485 ipa_node_params_sum
= NULL
;
3488 /* Grow ipcp_transformations if necessary. */
3491 ipcp_grow_transformations_if_necessary (void)
3493 if (vec_safe_length (ipcp_transformations
)
3494 <= (unsigned) symtab
->cgraph_max_uid
)
3495 vec_safe_grow_cleared (ipcp_transformations
, symtab
->cgraph_max_uid
+ 1);
3498 /* Set the aggregate replacements of NODE to be AGGVALS. */
3501 ipa_set_node_agg_value_chain (struct cgraph_node
*node
,
3502 struct ipa_agg_replacement_value
*aggvals
)
3504 ipcp_grow_transformations_if_necessary ();
3505 (*ipcp_transformations
)[node
->uid
].agg_values
= aggvals
;
3508 /* Hook that is called by cgraph.c when an edge is removed. */
3511 ipa_edge_removal_hook (struct cgraph_edge
*cs
, void *data ATTRIBUTE_UNUSED
)
3513 struct ipa_edge_args
*args
;
3515 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3516 if (vec_safe_length (ipa_edge_args_vector
) <= (unsigned)cs
->uid
)
3519 args
= IPA_EDGE_REF (cs
);
3520 if (args
->jump_functions
)
3522 struct ipa_jump_func
*jf
;
3524 FOR_EACH_VEC_ELT (*args
->jump_functions
, i
, jf
)
3526 struct ipa_cst_ref_desc
*rdesc
;
3527 try_decrement_rdesc_refcount (jf
);
3528 if (jf
->type
== IPA_JF_CONST
3529 && (rdesc
= ipa_get_jf_constant_rdesc (jf
))
3535 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs
));
3538 /* Hook that is called by cgraph.c when an edge is duplicated. */
3541 ipa_edge_duplication_hook (struct cgraph_edge
*src
, struct cgraph_edge
*dst
,
3544 struct ipa_edge_args
*old_args
, *new_args
;
3547 ipa_check_create_edge_args ();
3549 old_args
= IPA_EDGE_REF (src
);
3550 new_args
= IPA_EDGE_REF (dst
);
3552 new_args
->jump_functions
= vec_safe_copy (old_args
->jump_functions
);
3553 if (old_args
->polymorphic_call_contexts
)
3554 new_args
->polymorphic_call_contexts
3555 = vec_safe_copy (old_args
->polymorphic_call_contexts
);
3557 for (i
= 0; i
< vec_safe_length (old_args
->jump_functions
); i
++)
3559 struct ipa_jump_func
*src_jf
= ipa_get_ith_jump_func (old_args
, i
);
3560 struct ipa_jump_func
*dst_jf
= ipa_get_ith_jump_func (new_args
, i
);
3562 dst_jf
->agg
.items
= vec_safe_copy (dst_jf
->agg
.items
);
3564 if (src_jf
->type
== IPA_JF_CONST
)
3566 struct ipa_cst_ref_desc
*src_rdesc
= jfunc_rdesc_usable (src_jf
);
3569 dst_jf
->value
.constant
.rdesc
= NULL
;
3570 else if (src
->caller
== dst
->caller
)
3572 struct ipa_ref
*ref
;
3573 symtab_node
*n
= cgraph_node_for_jfunc (src_jf
);
3574 gcc_checking_assert (n
);
3575 ref
= src
->caller
->find_reference (n
, src
->call_stmt
,
3577 gcc_checking_assert (ref
);
3578 dst
->caller
->clone_reference (ref
, ref
->stmt
);
3580 struct ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
3581 dst_rdesc
->cs
= dst
;
3582 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3583 dst_rdesc
->next_duplicate
= NULL
;
3584 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3586 else if (src_rdesc
->cs
== src
)
3588 struct ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
3589 dst_rdesc
->cs
= dst
;
3590 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3591 dst_rdesc
->next_duplicate
= src_rdesc
->next_duplicate
;
3592 src_rdesc
->next_duplicate
= dst_rdesc
;
3593 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3597 struct ipa_cst_ref_desc
*dst_rdesc
;
3598 /* This can happen during inlining, when a JFUNC can refer to a
3599 reference taken in a function up in the tree of inline clones.
3600 We need to find the duplicate that refers to our tree of
3603 gcc_assert (dst
->caller
->global
.inlined_to
);
3604 for (dst_rdesc
= src_rdesc
->next_duplicate
;
3606 dst_rdesc
= dst_rdesc
->next_duplicate
)
3608 struct cgraph_node
*top
;
3609 top
= dst_rdesc
->cs
->caller
->global
.inlined_to
3610 ? dst_rdesc
->cs
->caller
->global
.inlined_to
3611 : dst_rdesc
->cs
->caller
;
3612 if (dst
->caller
->global
.inlined_to
== top
)
3615 gcc_assert (dst_rdesc
);
3616 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3619 else if (dst_jf
->type
== IPA_JF_PASS_THROUGH
3620 && src
->caller
== dst
->caller
)
3622 struct cgraph_node
*inline_root
= dst
->caller
->global
.inlined_to
3623 ? dst
->caller
->global
.inlined_to
: dst
->caller
;
3624 struct ipa_node_params
*root_info
= IPA_NODE_REF (inline_root
);
3625 int idx
= ipa_get_jf_pass_through_formal_id (dst_jf
);
3627 int c
= ipa_get_controlled_uses (root_info
, idx
);
3628 if (c
!= IPA_UNDESCRIBED_USE
)
3631 ipa_set_controlled_uses (root_info
, idx
, c
);
3637 /* Analyze newly added function into callgraph. */
3640 ipa_add_new_function (cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
3642 if (node
->has_gimple_body_p ())
3643 ipa_analyze_node (node
);
3646 /* Hook that is called by summary when a node is duplicated. */
3649 ipa_node_params_t::duplicate(cgraph_node
*src
, cgraph_node
*dst
,
3650 ipa_node_params
*old_info
,
3651 ipa_node_params
*new_info
)
3653 ipa_agg_replacement_value
*old_av
, *new_av
;
3655 new_info
->descriptors
= old_info
->descriptors
.copy ();
3656 new_info
->lattices
= NULL
;
3657 new_info
->ipcp_orig_node
= old_info
->ipcp_orig_node
;
3659 new_info
->analysis_done
= old_info
->analysis_done
;
3660 new_info
->node_enqueued
= old_info
->node_enqueued
;
3661 new_info
->versionable
= old_info
->versionable
;
3663 old_av
= ipa_get_agg_replacements_for_node (src
);
3669 struct ipa_agg_replacement_value
*v
;
3671 v
= ggc_alloc
<ipa_agg_replacement_value
> ();
3672 memcpy (v
, old_av
, sizeof (*v
));
3675 old_av
= old_av
->next
;
3677 ipa_set_node_agg_value_chain (dst
, new_av
);
3680 ipcp_transformation_summary
*src_trans
= ipcp_get_transformation_summary (src
);
3682 if (src_trans
&& vec_safe_length (src_trans
->alignments
) > 0)
3684 ipcp_grow_transformations_if_necessary ();
3685 src_trans
= ipcp_get_transformation_summary (src
);
3686 const vec
<ipa_alignment
, va_gc
> *src_alignments
= src_trans
->alignments
;
3687 vec
<ipa_alignment
, va_gc
> *&dst_alignments
3688 = ipcp_get_transformation_summary (dst
)->alignments
;
3689 vec_safe_reserve_exact (dst_alignments
, src_alignments
->length ());
3690 for (unsigned i
= 0; i
< src_alignments
->length (); ++i
)
3691 dst_alignments
->quick_push ((*src_alignments
)[i
]);
3695 /* Register our cgraph hooks if they are not already there. */
3698 ipa_register_cgraph_hooks (void)
3700 ipa_check_create_node_params ();
3702 if (!edge_removal_hook_holder
)
3703 edge_removal_hook_holder
=
3704 symtab
->add_edge_removal_hook (&ipa_edge_removal_hook
, NULL
);
3705 if (!edge_duplication_hook_holder
)
3706 edge_duplication_hook_holder
=
3707 symtab
->add_edge_duplication_hook (&ipa_edge_duplication_hook
, NULL
);
3708 function_insertion_hook_holder
=
3709 symtab
->add_cgraph_insertion_hook (&ipa_add_new_function
, NULL
);
3712 /* Unregister our cgraph hooks if they are not already there. */
3715 ipa_unregister_cgraph_hooks (void)
3717 symtab
->remove_edge_removal_hook (edge_removal_hook_holder
);
3718 edge_removal_hook_holder
= NULL
;
3719 symtab
->remove_edge_duplication_hook (edge_duplication_hook_holder
);
3720 edge_duplication_hook_holder
= NULL
;
3721 symtab
->remove_cgraph_insertion_hook (function_insertion_hook_holder
);
3722 function_insertion_hook_holder
= NULL
;
3725 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3726 longer needed after ipa-cp. */
3729 ipa_free_all_structures_after_ipa_cp (void)
3731 if (!optimize
&& !in_lto_p
)
3733 ipa_free_all_edge_args ();
3734 ipa_free_all_node_params ();
3735 ipcp_sources_pool
.release ();
3736 ipcp_cst_values_pool
.release ();
3737 ipcp_poly_ctx_values_pool
.release ();
3738 ipcp_agg_lattice_pool
.release ();
3739 ipa_unregister_cgraph_hooks ();
3740 ipa_refdesc_pool
.release ();
3744 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3745 longer needed after indirect inlining. */
3748 ipa_free_all_structures_after_iinln (void)
3750 ipa_free_all_edge_args ();
3751 ipa_free_all_node_params ();
3752 ipa_unregister_cgraph_hooks ();
3753 ipcp_sources_pool
.release ();
3754 ipcp_cst_values_pool
.release ();
3755 ipcp_poly_ctx_values_pool
.release ();
3756 ipcp_agg_lattice_pool
.release ();
3757 ipa_refdesc_pool
.release ();
3760 /* Print ipa_tree_map data structures of all functions in the
3764 ipa_print_node_params (FILE *f
, struct cgraph_node
*node
)
3767 struct ipa_node_params
*info
;
3769 if (!node
->definition
)
3771 info
= IPA_NODE_REF (node
);
3772 fprintf (f
, " function %s/%i parameter descriptors:\n",
3773 node
->name (), node
->order
);
3774 count
= ipa_get_param_count (info
);
3775 for (i
= 0; i
< count
; i
++)
3780 ipa_dump_param (f
, info
, i
);
3781 if (ipa_is_param_used (info
, i
))
3782 fprintf (f
, " used");
3783 c
= ipa_get_controlled_uses (info
, i
);
3784 if (c
== IPA_UNDESCRIBED_USE
)
3785 fprintf (f
, " undescribed_use");
3787 fprintf (f
, " controlled_uses=%i", c
);
3792 /* Print ipa_tree_map data structures of all functions in the
3796 ipa_print_all_params (FILE * f
)
3798 struct cgraph_node
*node
;
3800 fprintf (f
, "\nFunction parameters:\n");
3801 FOR_EACH_FUNCTION (node
)
3802 ipa_print_node_params (f
, node
);
3805 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3808 ipa_get_vector_of_formal_parms (tree fndecl
)
3814 gcc_assert (!flag_wpa
);
3815 count
= count_formal_params (fndecl
);
3816 args
.create (count
);
3817 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
3818 args
.quick_push (parm
);
3823 /* Return a heap allocated vector containing types of formal parameters of
3824 function type FNTYPE. */
3827 ipa_get_vector_of_formal_parm_types (tree fntype
)
3833 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
3836 types
.create (count
);
3837 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
3838 types
.quick_push (TREE_VALUE (t
));
3843 /* Modify the function declaration FNDECL and its type according to the plan in
3844 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3845 to reflect the actual parameters being modified which are determined by the
3846 base_index field. */
3849 ipa_modify_formal_parameters (tree fndecl
, ipa_parm_adjustment_vec adjustments
)
3851 vec
<tree
> oparms
= ipa_get_vector_of_formal_parms (fndecl
);
3852 tree orig_type
= TREE_TYPE (fndecl
);
3853 tree old_arg_types
= TYPE_ARG_TYPES (orig_type
);
3855 /* The following test is an ugly hack, some functions simply don't have any
3856 arguments in their type. This is probably a bug but well... */
3857 bool care_for_types
= (old_arg_types
!= NULL_TREE
);
3858 bool last_parm_void
;
3862 last_parm_void
= (TREE_VALUE (tree_last (old_arg_types
))
3864 otypes
= ipa_get_vector_of_formal_parm_types (orig_type
);
3866 gcc_assert (oparms
.length () + 1 == otypes
.length ());
3868 gcc_assert (oparms
.length () == otypes
.length ());
3872 last_parm_void
= false;
3876 int len
= adjustments
.length ();
3877 tree
*link
= &DECL_ARGUMENTS (fndecl
);
3878 tree new_arg_types
= NULL
;
3879 for (int i
= 0; i
< len
; i
++)
3881 struct ipa_parm_adjustment
*adj
;
3884 adj
= &adjustments
[i
];
3886 if (adj
->op
== IPA_PARM_OP_NEW
)
3889 parm
= oparms
[adj
->base_index
];
3892 if (adj
->op
== IPA_PARM_OP_COPY
)
3895 new_arg_types
= tree_cons (NULL_TREE
, otypes
[adj
->base_index
],
3898 link
= &DECL_CHAIN (parm
);
3900 else if (adj
->op
!= IPA_PARM_OP_REMOVE
)
3906 ptype
= build_pointer_type (adj
->type
);
3910 if (is_gimple_reg_type (ptype
))
3912 unsigned malign
= GET_MODE_ALIGNMENT (TYPE_MODE (ptype
));
3913 if (TYPE_ALIGN (ptype
) < malign
)
3914 ptype
= build_aligned_type (ptype
, malign
);
3919 new_arg_types
= tree_cons (NULL_TREE
, ptype
, new_arg_types
);
3921 new_parm
= build_decl (UNKNOWN_LOCATION
, PARM_DECL
, NULL_TREE
,
3923 const char *prefix
= adj
->arg_prefix
? adj
->arg_prefix
: "SYNTH";
3924 DECL_NAME (new_parm
) = create_tmp_var_name (prefix
);
3925 DECL_ARTIFICIAL (new_parm
) = 1;
3926 DECL_ARG_TYPE (new_parm
) = ptype
;
3927 DECL_CONTEXT (new_parm
) = fndecl
;
3928 TREE_USED (new_parm
) = 1;
3929 DECL_IGNORED_P (new_parm
) = 1;
3930 layout_decl (new_parm
, 0);
3932 if (adj
->op
== IPA_PARM_OP_NEW
)
3936 adj
->new_decl
= new_parm
;
3939 link
= &DECL_CHAIN (new_parm
);
3945 tree new_reversed
= NULL
;
3948 new_reversed
= nreverse (new_arg_types
);
3952 TREE_CHAIN (new_arg_types
) = void_list_node
;
3954 new_reversed
= void_list_node
;
3958 /* Use copy_node to preserve as much as possible from original type
3959 (debug info, attribute lists etc.)
3960 Exception is METHOD_TYPEs must have THIS argument.
3961 When we are asked to remove it, we need to build new FUNCTION_TYPE
3963 tree new_type
= NULL
;
3964 if (TREE_CODE (orig_type
) != METHOD_TYPE
3965 || (adjustments
[0].op
== IPA_PARM_OP_COPY
3966 && adjustments
[0].base_index
== 0))
3968 new_type
= build_distinct_type_copy (orig_type
);
3969 TYPE_ARG_TYPES (new_type
) = new_reversed
;
3974 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type
),
3976 TYPE_CONTEXT (new_type
) = TYPE_CONTEXT (orig_type
);
3977 DECL_VINDEX (fndecl
) = NULL_TREE
;
3980 /* When signature changes, we need to clear builtin info. */
3981 if (DECL_BUILT_IN (fndecl
))
3983 DECL_BUILT_IN_CLASS (fndecl
) = NOT_BUILT_IN
;
3984 DECL_FUNCTION_CODE (fndecl
) = (enum built_in_function
) 0;
3987 TREE_TYPE (fndecl
) = new_type
;
3988 DECL_VIRTUAL_P (fndecl
) = 0;
3989 DECL_LANG_SPECIFIC (fndecl
) = NULL
;
3994 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3995 If this is a directly recursive call, CS must be NULL. Otherwise it must
3996 contain the corresponding call graph edge. */
3999 ipa_modify_call_arguments (struct cgraph_edge
*cs
, gcall
*stmt
,
4000 ipa_parm_adjustment_vec adjustments
)
4002 struct cgraph_node
*current_node
= cgraph_node::get (current_function_decl
);
4004 vec
<tree
, va_gc
> **debug_args
= NULL
;
4006 gimple_stmt_iterator gsi
, prev_gsi
;
4010 len
= adjustments
.length ();
4012 callee_decl
= !cs
? gimple_call_fndecl (stmt
) : cs
->callee
->decl
;
4013 current_node
->remove_stmt_references (stmt
);
4015 gsi
= gsi_for_stmt (stmt
);
4017 gsi_prev (&prev_gsi
);
4018 for (i
= 0; i
< len
; i
++)
4020 struct ipa_parm_adjustment
*adj
;
4022 adj
= &adjustments
[i
];
4024 if (adj
->op
== IPA_PARM_OP_COPY
)
4026 tree arg
= gimple_call_arg (stmt
, adj
->base_index
);
4028 vargs
.quick_push (arg
);
4030 else if (adj
->op
!= IPA_PARM_OP_REMOVE
)
4032 tree expr
, base
, off
;
4034 unsigned int deref_align
= 0;
4035 bool deref_base
= false;
4037 /* We create a new parameter out of the value of the old one, we can
4038 do the following kind of transformations:
4040 - A scalar passed by reference is converted to a scalar passed by
4041 value. (adj->by_ref is false and the type of the original
4042 actual argument is a pointer to a scalar).
4044 - A part of an aggregate is passed instead of the whole aggregate.
4045 The part can be passed either by value or by reference, this is
4046 determined by value of adj->by_ref. Moreover, the code below
4047 handles both situations when the original aggregate is passed by
4048 value (its type is not a pointer) and when it is passed by
4049 reference (it is a pointer to an aggregate).
4051 When the new argument is passed by reference (adj->by_ref is true)
4052 it must be a part of an aggregate and therefore we form it by
4053 simply taking the address of a reference inside the original
4056 gcc_checking_assert (adj
->offset
% BITS_PER_UNIT
== 0);
4057 base
= gimple_call_arg (stmt
, adj
->base_index
);
4058 loc
= DECL_P (base
) ? DECL_SOURCE_LOCATION (base
)
4059 : EXPR_LOCATION (base
);
4061 if (TREE_CODE (base
) != ADDR_EXPR
4062 && POINTER_TYPE_P (TREE_TYPE (base
)))
4063 off
= build_int_cst (adj
->alias_ptr_type
,
4064 adj
->offset
/ BITS_PER_UNIT
);
4067 HOST_WIDE_INT base_offset
;
4071 if (TREE_CODE (base
) == ADDR_EXPR
)
4073 base
= TREE_OPERAND (base
, 0);
4079 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
4080 /* Aggregate arguments can have non-invariant addresses. */
4083 base
= build_fold_addr_expr (prev_base
);
4084 off
= build_int_cst (adj
->alias_ptr_type
,
4085 adj
->offset
/ BITS_PER_UNIT
);
4087 else if (TREE_CODE (base
) == MEM_REF
)
4092 deref_align
= TYPE_ALIGN (TREE_TYPE (base
));
4094 off
= build_int_cst (adj
->alias_ptr_type
,
4096 + adj
->offset
/ BITS_PER_UNIT
);
4097 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1),
4099 base
= TREE_OPERAND (base
, 0);
4103 off
= build_int_cst (adj
->alias_ptr_type
,
4105 + adj
->offset
/ BITS_PER_UNIT
);
4106 base
= build_fold_addr_expr (base
);
4112 tree type
= adj
->type
;
4114 unsigned HOST_WIDE_INT misalign
;
4118 align
= deref_align
;
4123 get_pointer_alignment_1 (base
, &align
, &misalign
);
4124 if (TYPE_ALIGN (type
) > align
)
4125 align
= TYPE_ALIGN (type
);
4127 misalign
+= (offset_int::from (off
, SIGNED
).to_short_addr ()
4129 misalign
= misalign
& (align
- 1);
4131 align
= (misalign
& -misalign
);
4132 if (align
< TYPE_ALIGN (type
))
4133 type
= build_aligned_type (type
, align
);
4134 base
= force_gimple_operand_gsi (&gsi
, base
,
4135 true, NULL
, true, GSI_SAME_STMT
);
4136 expr
= fold_build2_loc (loc
, MEM_REF
, type
, base
, off
);
4137 REF_REVERSE_STORAGE_ORDER (expr
) = adj
->reverse
;
4138 /* If expr is not a valid gimple call argument emit
4139 a load into a temporary. */
4140 if (is_gimple_reg_type (TREE_TYPE (expr
)))
4142 gimple
*tem
= gimple_build_assign (NULL_TREE
, expr
);
4143 if (gimple_in_ssa_p (cfun
))
4145 gimple_set_vuse (tem
, gimple_vuse (stmt
));
4146 expr
= make_ssa_name (TREE_TYPE (expr
), tem
);
4149 expr
= create_tmp_reg (TREE_TYPE (expr
));
4150 gimple_assign_set_lhs (tem
, expr
);
4151 gsi_insert_before (&gsi
, tem
, GSI_SAME_STMT
);
4156 expr
= fold_build2_loc (loc
, MEM_REF
, adj
->type
, base
, off
);
4157 REF_REVERSE_STORAGE_ORDER (expr
) = adj
->reverse
;
4158 expr
= build_fold_addr_expr (expr
);
4159 expr
= force_gimple_operand_gsi (&gsi
, expr
,
4160 true, NULL
, true, GSI_SAME_STMT
);
4162 vargs
.quick_push (expr
);
4164 if (adj
->op
!= IPA_PARM_OP_COPY
&& MAY_HAVE_DEBUG_STMTS
)
4167 tree ddecl
= NULL_TREE
, origin
= DECL_ORIGIN (adj
->base
), arg
;
4170 arg
= gimple_call_arg (stmt
, adj
->base_index
);
4171 if (!useless_type_conversion_p (TREE_TYPE (origin
), TREE_TYPE (arg
)))
4173 if (!fold_convertible_p (TREE_TYPE (origin
), arg
))
4175 arg
= fold_convert_loc (gimple_location (stmt
),
4176 TREE_TYPE (origin
), arg
);
4178 if (debug_args
== NULL
)
4179 debug_args
= decl_debug_args_insert (callee_decl
);
4180 for (ix
= 0; vec_safe_iterate (*debug_args
, ix
, &ddecl
); ix
+= 2)
4181 if (ddecl
== origin
)
4183 ddecl
= (**debug_args
)[ix
+ 1];
4188 ddecl
= make_node (DEBUG_EXPR_DECL
);
4189 DECL_ARTIFICIAL (ddecl
) = 1;
4190 TREE_TYPE (ddecl
) = TREE_TYPE (origin
);
4191 DECL_MODE (ddecl
) = DECL_MODE (origin
);
4193 vec_safe_push (*debug_args
, origin
);
4194 vec_safe_push (*debug_args
, ddecl
);
4196 def_temp
= gimple_build_debug_bind (ddecl
, unshare_expr (arg
), stmt
);
4197 gsi_insert_before (&gsi
, def_temp
, GSI_SAME_STMT
);
4201 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4203 fprintf (dump_file
, "replacing stmt:");
4204 print_gimple_stmt (dump_file
, gsi_stmt (gsi
), 0, 0);
4207 new_stmt
= gimple_build_call_vec (callee_decl
, vargs
);
4209 if (gimple_call_lhs (stmt
))
4210 gimple_call_set_lhs (new_stmt
, gimple_call_lhs (stmt
));
4212 gimple_set_block (new_stmt
, gimple_block (stmt
));
4213 if (gimple_has_location (stmt
))
4214 gimple_set_location (new_stmt
, gimple_location (stmt
));
4215 gimple_call_set_chain (new_stmt
, gimple_call_chain (stmt
));
4216 gimple_call_copy_flags (new_stmt
, stmt
);
4217 if (gimple_in_ssa_p (cfun
))
4219 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
4220 if (gimple_vdef (stmt
))
4222 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
4223 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
4227 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4229 fprintf (dump_file
, "with stmt:");
4230 print_gimple_stmt (dump_file
, new_stmt
, 0, 0);
4231 fprintf (dump_file
, "\n");
4233 gsi_replace (&gsi
, new_stmt
, true);
4235 cs
->set_call_stmt (new_stmt
);
4238 current_node
->record_stmt_references (gsi_stmt (gsi
));
4241 while (gsi_stmt (gsi
) != gsi_stmt (prev_gsi
));
4244 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4245 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4246 specifies whether the function should care about type incompatibility the
4247 current and new expressions. If it is false, the function will leave
4248 incompatibility issues to the caller. Return true iff the expression
4252 ipa_modify_expr (tree
*expr
, bool convert
,
4253 ipa_parm_adjustment_vec adjustments
)
4255 struct ipa_parm_adjustment
*cand
4256 = ipa_get_adjustment_candidate (&expr
, &convert
, adjustments
, false);
4263 src
= build_simple_mem_ref (cand
->new_decl
);
4264 REF_REVERSE_STORAGE_ORDER (src
) = cand
->reverse
;
4267 src
= cand
->new_decl
;
4269 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4271 fprintf (dump_file
, "About to replace expr ");
4272 print_generic_expr (dump_file
, *expr
, 0);
4273 fprintf (dump_file
, " with ");
4274 print_generic_expr (dump_file
, src
, 0);
4275 fprintf (dump_file
, "\n");
4278 if (convert
&& !useless_type_conversion_p (TREE_TYPE (*expr
), cand
->type
))
4280 tree vce
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (*expr
), src
);
4288 /* If T is an SSA_NAME, return NULL if it is not a default def or
4289 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4290 the base variable is always returned, regardless if it is a default
4291 def. Return T if it is not an SSA_NAME. */
4294 get_ssa_base_param (tree t
, bool ignore_default_def
)
4296 if (TREE_CODE (t
) == SSA_NAME
)
4298 if (ignore_default_def
|| SSA_NAME_IS_DEFAULT_DEF (t
))
4299 return SSA_NAME_VAR (t
);
4306 /* Given an expression, return an adjustment entry specifying the
4307 transformation to be done on EXPR. If no suitable adjustment entry
4308 was found, returns NULL.
4310 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4311 default def, otherwise bail on them.
4313 If CONVERT is non-NULL, this function will set *CONVERT if the
4314 expression provided is a component reference. ADJUSTMENTS is the
4315 adjustments vector. */
4317 ipa_parm_adjustment
*
4318 ipa_get_adjustment_candidate (tree
**expr
, bool *convert
,
4319 ipa_parm_adjustment_vec adjustments
,
4320 bool ignore_default_def
)
4322 if (TREE_CODE (**expr
) == BIT_FIELD_REF
4323 || TREE_CODE (**expr
) == IMAGPART_EXPR
4324 || TREE_CODE (**expr
) == REALPART_EXPR
)
4326 *expr
= &TREE_OPERAND (**expr
, 0);
4331 HOST_WIDE_INT offset
, size
, max_size
;
4334 = get_ref_base_and_extent (**expr
, &offset
, &size
, &max_size
, &reverse
);
4335 if (!base
|| size
== -1 || max_size
== -1)
4338 if (TREE_CODE (base
) == MEM_REF
)
4340 offset
+= mem_ref_offset (base
).to_short_addr () * BITS_PER_UNIT
;
4341 base
= TREE_OPERAND (base
, 0);
4344 base
= get_ssa_base_param (base
, ignore_default_def
);
4345 if (!base
|| TREE_CODE (base
) != PARM_DECL
)
4348 struct ipa_parm_adjustment
*cand
= NULL
;
4349 unsigned int len
= adjustments
.length ();
4350 for (unsigned i
= 0; i
< len
; i
++)
4352 struct ipa_parm_adjustment
*adj
= &adjustments
[i
];
4354 if (adj
->base
== base
4355 && (adj
->offset
== offset
|| adj
->op
== IPA_PARM_OP_REMOVE
))
4362 if (!cand
|| cand
->op
== IPA_PARM_OP_COPY
|| cand
->op
== IPA_PARM_OP_REMOVE
)
4367 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4370 index_in_adjustments_multiple_times_p (int base_index
,
4371 ipa_parm_adjustment_vec adjustments
)
4373 int i
, len
= adjustments
.length ();
4376 for (i
= 0; i
< len
; i
++)
4378 struct ipa_parm_adjustment
*adj
;
4379 adj
= &adjustments
[i
];
4381 if (adj
->base_index
== base_index
)
4393 /* Return adjustments that should have the same effect on function parameters
4394 and call arguments as if they were first changed according to adjustments in
4395 INNER and then by adjustments in OUTER. */
4397 ipa_parm_adjustment_vec
4398 ipa_combine_adjustments (ipa_parm_adjustment_vec inner
,
4399 ipa_parm_adjustment_vec outer
)
4401 int i
, outlen
= outer
.length ();
4402 int inlen
= inner
.length ();
4404 ipa_parm_adjustment_vec adjustments
, tmp
;
4407 for (i
= 0; i
< inlen
; i
++)
4409 struct ipa_parm_adjustment
*n
;
4412 if (n
->op
== IPA_PARM_OP_REMOVE
)
4416 /* FIXME: Handling of new arguments are not implemented yet. */
4417 gcc_assert (n
->op
!= IPA_PARM_OP_NEW
);
4418 tmp
.quick_push (*n
);
4422 adjustments
.create (outlen
+ removals
);
4423 for (i
= 0; i
< outlen
; i
++)
4425 struct ipa_parm_adjustment r
;
4426 struct ipa_parm_adjustment
*out
= &outer
[i
];
4427 struct ipa_parm_adjustment
*in
= &tmp
[out
->base_index
];
4429 memset (&r
, 0, sizeof (r
));
4430 gcc_assert (in
->op
!= IPA_PARM_OP_REMOVE
);
4431 if (out
->op
== IPA_PARM_OP_REMOVE
)
4433 if (!index_in_adjustments_multiple_times_p (in
->base_index
, tmp
))
4435 r
.op
= IPA_PARM_OP_REMOVE
;
4436 adjustments
.quick_push (r
);
4442 /* FIXME: Handling of new arguments are not implemented yet. */
4443 gcc_assert (out
->op
!= IPA_PARM_OP_NEW
);
4446 r
.base_index
= in
->base_index
;
4449 /* FIXME: Create nonlocal value too. */
4451 if (in
->op
== IPA_PARM_OP_COPY
&& out
->op
== IPA_PARM_OP_COPY
)
4452 r
.op
= IPA_PARM_OP_COPY
;
4453 else if (in
->op
== IPA_PARM_OP_COPY
)
4454 r
.offset
= out
->offset
;
4455 else if (out
->op
== IPA_PARM_OP_COPY
)
4456 r
.offset
= in
->offset
;
4458 r
.offset
= in
->offset
+ out
->offset
;
4459 adjustments
.quick_push (r
);
4462 for (i
= 0; i
< inlen
; i
++)
4464 struct ipa_parm_adjustment
*n
= &inner
[i
];
4466 if (n
->op
== IPA_PARM_OP_REMOVE
)
4467 adjustments
.quick_push (*n
);
4474 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4475 friendly way, assuming they are meant to be applied to FNDECL. */
4478 ipa_dump_param_adjustments (FILE *file
, ipa_parm_adjustment_vec adjustments
,
4481 int i
, len
= adjustments
.length ();
4483 vec
<tree
> parms
= ipa_get_vector_of_formal_parms (fndecl
);
4485 fprintf (file
, "IPA param adjustments: ");
4486 for (i
= 0; i
< len
; i
++)
4488 struct ipa_parm_adjustment
*adj
;
4489 adj
= &adjustments
[i
];
4492 fprintf (file
, " ");
4496 fprintf (file
, "%i. base_index: %i - ", i
, adj
->base_index
);
4497 print_generic_expr (file
, parms
[adj
->base_index
], 0);
4500 fprintf (file
, ", base: ");
4501 print_generic_expr (file
, adj
->base
, 0);
4505 fprintf (file
, ", new_decl: ");
4506 print_generic_expr (file
, adj
->new_decl
, 0);
4508 if (adj
->new_ssa_base
)
4510 fprintf (file
, ", new_ssa_base: ");
4511 print_generic_expr (file
, adj
->new_ssa_base
, 0);
4514 if (adj
->op
== IPA_PARM_OP_COPY
)
4515 fprintf (file
, ", copy_param");
4516 else if (adj
->op
== IPA_PARM_OP_REMOVE
)
4517 fprintf (file
, ", remove_param");
4519 fprintf (file
, ", offset %li", (long) adj
->offset
);
4521 fprintf (file
, ", by_ref");
4522 print_node_brief (file
, ", type: ", adj
->type
, 0);
4523 fprintf (file
, "\n");
4528 /* Dump the AV linked list. */
4531 ipa_dump_agg_replacement_values (FILE *f
, struct ipa_agg_replacement_value
*av
)
4534 fprintf (f
, " Aggregate replacements:");
4535 for (; av
; av
= av
->next
)
4537 fprintf (f
, "%s %i[" HOST_WIDE_INT_PRINT_DEC
"]=", comma
? "," : "",
4538 av
->index
, av
->offset
);
4539 print_generic_expr (f
, av
->value
, 0);
4545 /* Stream out jump function JUMP_FUNC to OB. */
4548 ipa_write_jump_function (struct output_block
*ob
,
4549 struct ipa_jump_func
*jump_func
)
4551 struct ipa_agg_jf_item
*item
;
4552 struct bitpack_d bp
;
4555 streamer_write_uhwi (ob
, jump_func
->type
);
4556 switch (jump_func
->type
)
4558 case IPA_JF_UNKNOWN
:
4562 EXPR_LOCATION (jump_func
->value
.constant
.value
) == UNKNOWN_LOCATION
);
4563 stream_write_tree (ob
, jump_func
->value
.constant
.value
, true);
4565 case IPA_JF_PASS_THROUGH
:
4566 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.operation
);
4567 if (jump_func
->value
.pass_through
.operation
== NOP_EXPR
)
4569 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4570 bp
= bitpack_create (ob
->main_stream
);
4571 bp_pack_value (&bp
, jump_func
->value
.pass_through
.agg_preserved
, 1);
4572 streamer_write_bitpack (&bp
);
4576 stream_write_tree (ob
, jump_func
->value
.pass_through
.operand
, true);
4577 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4580 case IPA_JF_ANCESTOR
:
4581 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.offset
);
4582 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.formal_id
);
4583 bp
= bitpack_create (ob
->main_stream
);
4584 bp_pack_value (&bp
, jump_func
->value
.ancestor
.agg_preserved
, 1);
4585 streamer_write_bitpack (&bp
);
4589 count
= vec_safe_length (jump_func
->agg
.items
);
4590 streamer_write_uhwi (ob
, count
);
4593 bp
= bitpack_create (ob
->main_stream
);
4594 bp_pack_value (&bp
, jump_func
->agg
.by_ref
, 1);
4595 streamer_write_bitpack (&bp
);
4598 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, i
, item
)
4600 streamer_write_uhwi (ob
, item
->offset
);
4601 stream_write_tree (ob
, item
->value
, true);
4604 bp
= bitpack_create (ob
->main_stream
);
4605 bp_pack_value (&bp
, jump_func
->alignment
.known
, 1);
4606 streamer_write_bitpack (&bp
);
4607 if (jump_func
->alignment
.known
)
4609 streamer_write_uhwi (ob
, jump_func
->alignment
.align
);
4610 streamer_write_uhwi (ob
, jump_func
->alignment
.misalign
);
4614 /* Read in jump function JUMP_FUNC from IB. */
4617 ipa_read_jump_function (struct lto_input_block
*ib
,
4618 struct ipa_jump_func
*jump_func
,
4619 struct cgraph_edge
*cs
,
4620 struct data_in
*data_in
)
4622 enum jump_func_type jftype
;
4623 enum tree_code operation
;
4626 jftype
= (enum jump_func_type
) streamer_read_uhwi (ib
);
4629 case IPA_JF_UNKNOWN
:
4630 ipa_set_jf_unknown (jump_func
);
4633 ipa_set_jf_constant (jump_func
, stream_read_tree (ib
, data_in
), cs
);
4635 case IPA_JF_PASS_THROUGH
:
4636 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
4637 if (operation
== NOP_EXPR
)
4639 int formal_id
= streamer_read_uhwi (ib
);
4640 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4641 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4642 ipa_set_jf_simple_pass_through (jump_func
, formal_id
, agg_preserved
);
4646 tree operand
= stream_read_tree (ib
, data_in
);
4647 int formal_id
= streamer_read_uhwi (ib
);
4648 ipa_set_jf_arith_pass_through (jump_func
, formal_id
, operand
,
4652 case IPA_JF_ANCESTOR
:
4654 HOST_WIDE_INT offset
= streamer_read_uhwi (ib
);
4655 int formal_id
= streamer_read_uhwi (ib
);
4656 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4657 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4658 ipa_set_ancestor_jf (jump_func
, offset
, formal_id
, agg_preserved
);
4663 count
= streamer_read_uhwi (ib
);
4664 vec_alloc (jump_func
->agg
.items
, count
);
4667 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4668 jump_func
->agg
.by_ref
= bp_unpack_value (&bp
, 1);
4670 for (i
= 0; i
< count
; i
++)
4672 struct ipa_agg_jf_item item
;
4673 item
.offset
= streamer_read_uhwi (ib
);
4674 item
.value
= stream_read_tree (ib
, data_in
);
4675 jump_func
->agg
.items
->quick_push (item
);
4678 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4679 bool alignment_known
= bp_unpack_value (&bp
, 1);
4680 if (alignment_known
)
4682 jump_func
->alignment
.known
= true;
4683 jump_func
->alignment
.align
= streamer_read_uhwi (ib
);
4684 jump_func
->alignment
.misalign
= streamer_read_uhwi (ib
);
4687 jump_func
->alignment
.known
= false;
4690 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4691 relevant to indirect inlining to OB. */
4694 ipa_write_indirect_edge_info (struct output_block
*ob
,
4695 struct cgraph_edge
*cs
)
4697 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4698 struct bitpack_d bp
;
4700 streamer_write_hwi (ob
, ii
->param_index
);
4701 bp
= bitpack_create (ob
->main_stream
);
4702 bp_pack_value (&bp
, ii
->polymorphic
, 1);
4703 bp_pack_value (&bp
, ii
->agg_contents
, 1);
4704 bp_pack_value (&bp
, ii
->member_ptr
, 1);
4705 bp_pack_value (&bp
, ii
->by_ref
, 1);
4706 bp_pack_value (&bp
, ii
->guaranteed_unmodified
, 1);
4707 bp_pack_value (&bp
, ii
->vptr_changed
, 1);
4708 streamer_write_bitpack (&bp
);
4709 if (ii
->agg_contents
|| ii
->polymorphic
)
4710 streamer_write_hwi (ob
, ii
->offset
);
4712 gcc_assert (ii
->offset
== 0);
4714 if (ii
->polymorphic
)
4716 streamer_write_hwi (ob
, ii
->otr_token
);
4717 stream_write_tree (ob
, ii
->otr_type
, true);
4718 ii
->context
.stream_out (ob
);
4722 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4723 relevant to indirect inlining from IB. */
4726 ipa_read_indirect_edge_info (struct lto_input_block
*ib
,
4727 struct data_in
*data_in
,
4728 struct cgraph_edge
*cs
)
4730 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4731 struct bitpack_d bp
;
4733 ii
->param_index
= (int) streamer_read_hwi (ib
);
4734 bp
= streamer_read_bitpack (ib
);
4735 ii
->polymorphic
= bp_unpack_value (&bp
, 1);
4736 ii
->agg_contents
= bp_unpack_value (&bp
, 1);
4737 ii
->member_ptr
= bp_unpack_value (&bp
, 1);
4738 ii
->by_ref
= bp_unpack_value (&bp
, 1);
4739 ii
->guaranteed_unmodified
= bp_unpack_value (&bp
, 1);
4740 ii
->vptr_changed
= bp_unpack_value (&bp
, 1);
4741 if (ii
->agg_contents
|| ii
->polymorphic
)
4742 ii
->offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4745 if (ii
->polymorphic
)
4747 ii
->otr_token
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4748 ii
->otr_type
= stream_read_tree (ib
, data_in
);
4749 ii
->context
.stream_in (ib
, data_in
);
4753 /* Stream out NODE info to OB. */
4756 ipa_write_node_info (struct output_block
*ob
, struct cgraph_node
*node
)
4759 lto_symtab_encoder_t encoder
;
4760 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
4762 struct cgraph_edge
*e
;
4763 struct bitpack_d bp
;
4765 encoder
= ob
->decl_state
->symtab_node_encoder
;
4766 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4767 streamer_write_uhwi (ob
, node_ref
);
4769 streamer_write_uhwi (ob
, ipa_get_param_count (info
));
4770 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4771 streamer_write_uhwi (ob
, ipa_get_param_move_cost (info
, j
));
4772 bp
= bitpack_create (ob
->main_stream
);
4773 gcc_assert (info
->analysis_done
4774 || ipa_get_param_count (info
) == 0);
4775 gcc_assert (!info
->node_enqueued
);
4776 gcc_assert (!info
->ipcp_orig_node
);
4777 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4778 bp_pack_value (&bp
, ipa_is_param_used (info
, j
), 1);
4779 streamer_write_bitpack (&bp
);
4780 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4781 streamer_write_hwi (ob
, ipa_get_controlled_uses (info
, j
));
4782 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4784 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4786 streamer_write_uhwi (ob
,
4787 ipa_get_cs_argument_count (args
) * 2
4788 + (args
->polymorphic_call_contexts
!= NULL
));
4789 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4791 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4792 if (args
->polymorphic_call_contexts
!= NULL
)
4793 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
4796 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4798 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4800 streamer_write_uhwi (ob
,
4801 ipa_get_cs_argument_count (args
) * 2
4802 + (args
->polymorphic_call_contexts
!= NULL
));
4803 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4805 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4806 if (args
->polymorphic_call_contexts
!= NULL
)
4807 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
4809 ipa_write_indirect_edge_info (ob
, e
);
4813 /* Stream in NODE info from IB. */
4816 ipa_read_node_info (struct lto_input_block
*ib
, struct cgraph_node
*node
,
4817 struct data_in
*data_in
)
4819 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
4821 struct cgraph_edge
*e
;
4822 struct bitpack_d bp
;
4824 ipa_alloc_node_params (node
, streamer_read_uhwi (ib
));
4826 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4827 info
->descriptors
[k
].move_cost
= streamer_read_uhwi (ib
);
4829 bp
= streamer_read_bitpack (ib
);
4830 if (ipa_get_param_count (info
) != 0)
4831 info
->analysis_done
= true;
4832 info
->node_enqueued
= false;
4833 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4834 ipa_set_param_used (info
, k
, bp_unpack_value (&bp
, 1));
4835 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4836 ipa_set_controlled_uses (info
, k
, streamer_read_hwi (ib
));
4837 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4839 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4840 int count
= streamer_read_uhwi (ib
);
4841 bool contexts_computed
= count
& 1;
4846 vec_safe_grow_cleared (args
->jump_functions
, count
);
4847 if (contexts_computed
)
4848 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
);
4850 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
4852 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4854 if (contexts_computed
)
4855 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in (ib
, data_in
);
4858 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4860 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4861 int count
= streamer_read_uhwi (ib
);
4862 bool contexts_computed
= count
& 1;
4867 vec_safe_grow_cleared (args
->jump_functions
, count
);
4868 if (contexts_computed
)
4869 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
);
4870 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
4872 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4874 if (contexts_computed
)
4875 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in (ib
, data_in
);
4878 ipa_read_indirect_edge_info (ib
, data_in
, e
);
4882 /* Write jump functions for nodes in SET. */
4885 ipa_prop_write_jump_functions (void)
4887 struct cgraph_node
*node
;
4888 struct output_block
*ob
;
4889 unsigned int count
= 0;
4890 lto_symtab_encoder_iterator lsei
;
4891 lto_symtab_encoder_t encoder
;
4893 if (!ipa_node_params_sum
)
4896 ob
= create_output_block (LTO_section_jump_functions
);
4897 encoder
= ob
->decl_state
->symtab_node_encoder
;
4899 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4900 lsei_next_function_in_partition (&lsei
))
4902 node
= lsei_cgraph_node (lsei
);
4903 if (node
->has_gimple_body_p ()
4904 && IPA_NODE_REF (node
) != NULL
)
4908 streamer_write_uhwi (ob
, count
);
4910 /* Process all of the functions. */
4911 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4912 lsei_next_function_in_partition (&lsei
))
4914 node
= lsei_cgraph_node (lsei
);
4915 if (node
->has_gimple_body_p ()
4916 && IPA_NODE_REF (node
) != NULL
)
4917 ipa_write_node_info (ob
, node
);
4919 streamer_write_char_stream (ob
->main_stream
, 0);
4920 produce_asm (ob
, NULL
);
4921 destroy_output_block (ob
);
4924 /* Read section in file FILE_DATA of length LEN with data DATA. */
4927 ipa_prop_read_section (struct lto_file_decl_data
*file_data
, const char *data
,
4930 const struct lto_function_header
*header
=
4931 (const struct lto_function_header
*) data
;
4932 const int cfg_offset
= sizeof (struct lto_function_header
);
4933 const int main_offset
= cfg_offset
+ header
->cfg_size
;
4934 const int string_offset
= main_offset
+ header
->main_size
;
4935 struct data_in
*data_in
;
4939 lto_input_block
ib_main ((const char *) data
+ main_offset
,
4940 header
->main_size
, file_data
->mode_table
);
4943 lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
4944 header
->string_size
, vNULL
);
4945 count
= streamer_read_uhwi (&ib_main
);
4947 for (i
= 0; i
< count
; i
++)
4950 struct cgraph_node
*node
;
4951 lto_symtab_encoder_t encoder
;
4953 index
= streamer_read_uhwi (&ib_main
);
4954 encoder
= file_data
->symtab_node_encoder
;
4955 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
4957 gcc_assert (node
->definition
);
4958 ipa_read_node_info (&ib_main
, node
, data_in
);
4960 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
4962 lto_data_in_delete (data_in
);
4965 /* Read ipcp jump functions. */
4968 ipa_prop_read_jump_functions (void)
4970 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
4971 struct lto_file_decl_data
*file_data
;
4974 ipa_check_create_node_params ();
4975 ipa_check_create_edge_args ();
4976 ipa_register_cgraph_hooks ();
4978 while ((file_data
= file_data_vec
[j
++]))
4981 const char *data
= lto_get_section_data (file_data
, LTO_section_jump_functions
, NULL
, &len
);
4984 ipa_prop_read_section (file_data
, data
, len
);
4988 /* After merging units, we can get mismatch in argument counts.
4989 Also decl merging might've rendered parameter lists obsolete.
4990 Also compute called_with_variable_arg info. */
4993 ipa_update_after_lto_read (void)
4995 ipa_check_create_node_params ();
4996 ipa_check_create_edge_args ();
5000 write_ipcp_transformation_info (output_block
*ob
, cgraph_node
*node
)
5003 unsigned int count
= 0;
5004 lto_symtab_encoder_t encoder
;
5005 struct ipa_agg_replacement_value
*aggvals
, *av
;
5007 aggvals
= ipa_get_agg_replacements_for_node (node
);
5008 encoder
= ob
->decl_state
->symtab_node_encoder
;
5009 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
5010 streamer_write_uhwi (ob
, node_ref
);
5012 for (av
= aggvals
; av
; av
= av
->next
)
5014 streamer_write_uhwi (ob
, count
);
5016 for (av
= aggvals
; av
; av
= av
->next
)
5018 struct bitpack_d bp
;
5020 streamer_write_uhwi (ob
, av
->offset
);
5021 streamer_write_uhwi (ob
, av
->index
);
5022 stream_write_tree (ob
, av
->value
, true);
5024 bp
= bitpack_create (ob
->main_stream
);
5025 bp_pack_value (&bp
, av
->by_ref
, 1);
5026 streamer_write_bitpack (&bp
);
5029 ipcp_transformation_summary
*ts
= ipcp_get_transformation_summary (node
);
5030 if (ts
&& vec_safe_length (ts
->alignments
) > 0)
5032 count
= ts
->alignments
->length ();
5034 streamer_write_uhwi (ob
, count
);
5035 for (unsigned i
= 0; i
< count
; ++i
)
5037 ipa_alignment
*parm_al
= &(*ts
->alignments
)[i
];
5039 struct bitpack_d bp
;
5040 bp
= bitpack_create (ob
->main_stream
);
5041 bp_pack_value (&bp
, parm_al
->known
, 1);
5042 streamer_write_bitpack (&bp
);
5045 streamer_write_uhwi (ob
, parm_al
->align
);
5046 streamer_write_hwi_in_range (ob
->main_stream
, 0, parm_al
->align
,
5052 streamer_write_uhwi (ob
, 0);
5055 /* Stream in the aggregate value replacement chain for NODE from IB. */
5058 read_ipcp_transformation_info (lto_input_block
*ib
, cgraph_node
*node
,
5061 struct ipa_agg_replacement_value
*aggvals
= NULL
;
5062 unsigned int count
, i
;
5064 count
= streamer_read_uhwi (ib
);
5065 for (i
= 0; i
<count
; i
++)
5067 struct ipa_agg_replacement_value
*av
;
5068 struct bitpack_d bp
;
5070 av
= ggc_alloc
<ipa_agg_replacement_value
> ();
5071 av
->offset
= streamer_read_uhwi (ib
);
5072 av
->index
= streamer_read_uhwi (ib
);
5073 av
->value
= stream_read_tree (ib
, data_in
);
5074 bp
= streamer_read_bitpack (ib
);
5075 av
->by_ref
= bp_unpack_value (&bp
, 1);
5079 ipa_set_node_agg_value_chain (node
, aggvals
);
5081 count
= streamer_read_uhwi (ib
);
5084 ipcp_grow_transformations_if_necessary ();
5086 ipcp_transformation_summary
*ts
= ipcp_get_transformation_summary (node
);
5087 vec_safe_grow_cleared (ts
->alignments
, count
);
5089 for (i
= 0; i
< count
; i
++)
5091 ipa_alignment
*parm_al
;
5092 parm_al
= &(*ts
->alignments
)[i
];
5093 struct bitpack_d bp
;
5094 bp
= streamer_read_bitpack (ib
);
5095 parm_al
->known
= bp_unpack_value (&bp
, 1);
5098 parm_al
->align
= streamer_read_uhwi (ib
);
5100 = streamer_read_hwi_in_range (ib
, "ipa-prop misalign",
5107 /* Write all aggregate replacement for nodes in set. */
5110 ipcp_write_transformation_summaries (void)
5112 struct cgraph_node
*node
;
5113 struct output_block
*ob
;
5114 unsigned int count
= 0;
5115 lto_symtab_encoder_iterator lsei
;
5116 lto_symtab_encoder_t encoder
;
5118 ob
= create_output_block (LTO_section_ipcp_transform
);
5119 encoder
= ob
->decl_state
->symtab_node_encoder
;
5121 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5122 lsei_next_function_in_partition (&lsei
))
5124 node
= lsei_cgraph_node (lsei
);
5125 if (node
->has_gimple_body_p ())
5129 streamer_write_uhwi (ob
, count
);
5131 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5132 lsei_next_function_in_partition (&lsei
))
5134 node
= lsei_cgraph_node (lsei
);
5135 if (node
->has_gimple_body_p ())
5136 write_ipcp_transformation_info (ob
, node
);
5138 streamer_write_char_stream (ob
->main_stream
, 0);
5139 produce_asm (ob
, NULL
);
5140 destroy_output_block (ob
);
5143 /* Read replacements section in file FILE_DATA of length LEN with data
5147 read_replacements_section (struct lto_file_decl_data
*file_data
,
5151 const struct lto_function_header
*header
=
5152 (const struct lto_function_header
*) data
;
5153 const int cfg_offset
= sizeof (struct lto_function_header
);
5154 const int main_offset
= cfg_offset
+ header
->cfg_size
;
5155 const int string_offset
= main_offset
+ header
->main_size
;
5156 struct data_in
*data_in
;
5160 lto_input_block
ib_main ((const char *) data
+ main_offset
,
5161 header
->main_size
, file_data
->mode_table
);
5163 data_in
= lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
5164 header
->string_size
, vNULL
);
5165 count
= streamer_read_uhwi (&ib_main
);
5167 for (i
= 0; i
< count
; i
++)
5170 struct cgraph_node
*node
;
5171 lto_symtab_encoder_t encoder
;
5173 index
= streamer_read_uhwi (&ib_main
);
5174 encoder
= file_data
->symtab_node_encoder
;
5175 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
5177 gcc_assert (node
->definition
);
5178 read_ipcp_transformation_info (&ib_main
, node
, data_in
);
5180 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
5182 lto_data_in_delete (data_in
);
5185 /* Read IPA-CP aggregate replacements. */
5188 ipcp_read_transformation_summaries (void)
5190 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
5191 struct lto_file_decl_data
*file_data
;
5194 while ((file_data
= file_data_vec
[j
++]))
5197 const char *data
= lto_get_section_data (file_data
,
5198 LTO_section_ipcp_transform
,
5201 read_replacements_section (file_data
, data
, len
);
5205 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5209 adjust_agg_replacement_values (struct cgraph_node
*node
,
5210 struct ipa_agg_replacement_value
*aggval
)
5212 struct ipa_agg_replacement_value
*v
;
5213 int i
, c
= 0, d
= 0, *adj
;
5215 if (!node
->clone
.combined_args_to_skip
)
5218 for (v
= aggval
; v
; v
= v
->next
)
5220 gcc_assert (v
->index
>= 0);
5226 adj
= XALLOCAVEC (int, c
);
5227 for (i
= 0; i
< c
; i
++)
5228 if (bitmap_bit_p (node
->clone
.combined_args_to_skip
, i
))
5236 for (v
= aggval
; v
; v
= v
->next
)
5237 v
->index
= adj
[v
->index
];
5240 /* Dominator walker driving the ipcp modification phase. */
5242 class ipcp_modif_dom_walker
: public dom_walker
5245 ipcp_modif_dom_walker (struct ipa_func_body_info
*fbi
,
5246 vec
<ipa_param_descriptor
> descs
,
5247 struct ipa_agg_replacement_value
*av
,
5249 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
), m_descriptors (descs
),
5250 m_aggval (av
), m_something_changed (sc
), m_cfg_changed (cc
) {}
5252 virtual edge
before_dom_children (basic_block
);
5255 struct ipa_func_body_info
*m_fbi
;
5256 vec
<ipa_param_descriptor
> m_descriptors
;
5257 struct ipa_agg_replacement_value
*m_aggval
;
5258 bool *m_something_changed
, *m_cfg_changed
;
5262 ipcp_modif_dom_walker::before_dom_children (basic_block bb
)
5264 gimple_stmt_iterator gsi
;
5265 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5267 struct ipa_agg_replacement_value
*v
;
5268 gimple
*stmt
= gsi_stmt (gsi
);
5270 HOST_WIDE_INT offset
, size
;
5274 if (!gimple_assign_load_p (stmt
))
5276 rhs
= gimple_assign_rhs1 (stmt
);
5277 if (!is_gimple_reg_type (TREE_TYPE (rhs
)))
5282 while (handled_component_p (t
))
5284 /* V_C_E can do things like convert an array of integers to one
5285 bigger integer and similar things we do not handle below. */
5286 if (TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
)
5291 t
= TREE_OPERAND (t
, 0);
5296 if (!ipa_load_from_parm_agg (m_fbi
, m_descriptors
, stmt
, rhs
, &index
,
5297 &offset
, &size
, &by_ref
))
5299 for (v
= m_aggval
; v
; v
= v
->next
)
5300 if (v
->index
== index
5301 && v
->offset
== offset
)
5304 || v
->by_ref
!= by_ref
5305 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v
->value
))) != size
)
5308 gcc_checking_assert (is_gimple_ip_invariant (v
->value
));
5309 if (!useless_type_conversion_p (TREE_TYPE (rhs
), TREE_TYPE (v
->value
)))
5311 if (fold_convertible_p (TREE_TYPE (rhs
), v
->value
))
5312 val
= fold_build1 (NOP_EXPR
, TREE_TYPE (rhs
), v
->value
);
5313 else if (TYPE_SIZE (TREE_TYPE (rhs
))
5314 == TYPE_SIZE (TREE_TYPE (v
->value
)))
5315 val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (rhs
), v
->value
);
5320 fprintf (dump_file
, " const ");
5321 print_generic_expr (dump_file
, v
->value
, 0);
5322 fprintf (dump_file
, " can't be converted to type of ");
5323 print_generic_expr (dump_file
, rhs
, 0);
5324 fprintf (dump_file
, "\n");
5332 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5334 fprintf (dump_file
, "Modifying stmt:\n ");
5335 print_gimple_stmt (dump_file
, stmt
, 0, 0);
5337 gimple_assign_set_rhs_from_tree (&gsi
, val
);
5340 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5342 fprintf (dump_file
, "into:\n ");
5343 print_gimple_stmt (dump_file
, stmt
, 0, 0);
5344 fprintf (dump_file
, "\n");
5347 *m_something_changed
= true;
5348 if (maybe_clean_eh_stmt (stmt
)
5349 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
5350 *m_cfg_changed
= true;
5355 /* Update alignment of formal parameters as described in
5356 ipcp_transformation_summary. */
5359 ipcp_update_alignments (struct cgraph_node
*node
)
5361 tree fndecl
= node
->decl
;
5362 tree parm
= DECL_ARGUMENTS (fndecl
);
5363 tree next_parm
= parm
;
5364 ipcp_transformation_summary
*ts
= ipcp_get_transformation_summary (node
);
5365 if (!ts
|| vec_safe_length (ts
->alignments
) == 0)
5367 const vec
<ipa_alignment
, va_gc
> &alignments
= *ts
->alignments
;
5368 unsigned count
= alignments
.length ();
5370 for (unsigned i
= 0; i
< count
; ++i
, parm
= next_parm
)
5372 if (node
->clone
.combined_args_to_skip
5373 && bitmap_bit_p (node
->clone
.combined_args_to_skip
, i
))
5375 gcc_checking_assert (parm
);
5376 next_parm
= DECL_CHAIN (parm
);
5378 if (!alignments
[i
].known
|| !is_gimple_reg (parm
))
5380 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
), parm
);
5385 fprintf (dump_file
, " Adjusting alignment of param %u to %u, "
5386 "misalignment to %u\n", i
, alignments
[i
].align
,
5387 alignments
[i
].misalign
);
5389 struct ptr_info_def
*pi
= get_ptr_info (ddef
);
5390 gcc_checking_assert (pi
);
5392 unsigned old_misalign
;
5393 bool old_known
= get_ptr_info_alignment (pi
, &old_align
, &old_misalign
);
5396 && old_align
>= alignments
[i
].align
)
5399 fprintf (dump_file
, " But the alignment was already %u.\n",
5403 set_ptr_info_alignment (pi
, alignments
[i
].align
, alignments
[i
].misalign
);
5407 /* IPCP transformation phase doing propagation of aggregate values. */
5410 ipcp_transform_function (struct cgraph_node
*node
)
5412 vec
<ipa_param_descriptor
> descriptors
= vNULL
;
5413 struct ipa_func_body_info fbi
;
5414 struct ipa_agg_replacement_value
*aggval
;
5416 bool cfg_changed
= false, something_changed
= false;
5418 gcc_checking_assert (cfun
);
5419 gcc_checking_assert (current_function_decl
);
5422 fprintf (dump_file
, "Modification phase of node %s/%i\n",
5423 node
->name (), node
->order
);
5425 ipcp_update_alignments (node
);
5426 aggval
= ipa_get_agg_replacements_for_node (node
);
5429 param_count
= count_formal_params (node
->decl
);
5430 if (param_count
== 0)
5432 adjust_agg_replacement_values (node
, aggval
);
5434 ipa_dump_agg_replacement_values (dump_file
, aggval
);
5438 fbi
.bb_infos
= vNULL
;
5439 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
));
5440 fbi
.param_count
= param_count
;
5443 descriptors
.safe_grow_cleared (param_count
);
5444 ipa_populate_param_decls (node
, descriptors
);
5445 calculate_dominance_info (CDI_DOMINATORS
);
5446 ipcp_modif_dom_walker (&fbi
, descriptors
, aggval
, &something_changed
,
5447 &cfg_changed
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5450 struct ipa_bb_info
*bi
;
5451 FOR_EACH_VEC_ELT (fbi
.bb_infos
, i
, bi
)
5452 free_ipa_bb_info (bi
);
5453 fbi
.bb_infos
.release ();
5454 free_dominance_info (CDI_DOMINATORS
);
5455 (*ipcp_transformations
)[node
->uid
].agg_values
= NULL
;
5456 (*ipcp_transformations
)[node
->uid
].alignments
= NULL
;
5457 descriptors
.release ();
5459 if (!something_changed
)
5461 else if (cfg_changed
)
5462 return TODO_update_ssa_only_virtuals
| TODO_cleanup_cfg
;
5464 return TODO_update_ssa_only_virtuals
;