1 /* Interprocedural analyses.
2 Copyright (C) 2005-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
30 #include "tree-streamer.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
47 #include "tree-inline.h"
48 #include "ipa-inline.h"
49 #include "gimple-pretty-print.h"
51 #include "ipa-utils.h"
56 /* Function summary where the parameter infos are actually stored. */
57 ipa_node_params_t
*ipa_node_params_sum
= NULL
;
58 /* Vector of IPA-CP transformation data for each clone. */
59 vec
<ipcp_transformation_summary
, va_gc
> *ipcp_transformations
;
60 /* Vector where the parameter infos are actually stored. */
61 vec
<ipa_edge_args
, va_gc
> *ipa_edge_args_vector
;
63 /* Holders of ipa cgraph hooks: */
64 static struct cgraph_edge_hook_list
*edge_removal_hook_holder
;
65 static struct cgraph_2edge_hook_list
*edge_duplication_hook_holder
;
66 static struct cgraph_node_hook_list
*function_insertion_hook_holder
;
68 /* Description of a reference to an IPA constant. */
69 struct ipa_cst_ref_desc
71 /* Edge that corresponds to the statement which took the reference. */
72 struct cgraph_edge
*cs
;
73 /* Linked list of duplicates created when call graph edges are cloned. */
74 struct ipa_cst_ref_desc
*next_duplicate
;
75 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
80 /* Allocation pool for reference descriptions. */
82 static object_allocator
<ipa_cst_ref_desc
> ipa_refdesc_pool
83 ("IPA-PROP ref descriptions");
85 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
86 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
89 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node
*node
)
91 tree fs_opts
= DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node
->decl
);
95 return !opt_for_fn (node
->decl
, optimize
) || !opt_for_fn (node
->decl
, flag_ipa_cp
);
98 /* Return index of the formal whose tree is PTREE in function which corresponds
102 ipa_get_param_decl_index_1 (vec
<ipa_param_descriptor
> descriptors
, tree ptree
)
106 count
= descriptors
.length ();
107 for (i
= 0; i
< count
; i
++)
108 if (descriptors
[i
].decl
== ptree
)
114 /* Return index of the formal whose tree is PTREE in function which corresponds
118 ipa_get_param_decl_index (struct ipa_node_params
*info
, tree ptree
)
120 return ipa_get_param_decl_index_1 (info
->descriptors
, ptree
);
123 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
127 ipa_populate_param_decls (struct cgraph_node
*node
,
128 vec
<ipa_param_descriptor
> &descriptors
)
136 gcc_assert (gimple_has_body_p (fndecl
));
137 fnargs
= DECL_ARGUMENTS (fndecl
);
139 for (parm
= fnargs
; parm
; parm
= DECL_CHAIN (parm
))
141 descriptors
[param_num
].decl
= parm
;
142 descriptors
[param_num
].move_cost
= estimate_move_cost (TREE_TYPE (parm
),
148 /* Return how many formal parameters FNDECL has. */
151 count_formal_params (tree fndecl
)
155 gcc_assert (gimple_has_body_p (fndecl
));
157 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
163 /* Return the declaration of Ith formal parameter of the function corresponding
164 to INFO. Note there is no setter function as this array is built just once
165 using ipa_initialize_node_params. */
168 ipa_dump_param (FILE *file
, struct ipa_node_params
*info
, int i
)
170 fprintf (file
, "param #%i", i
);
171 if (info
->descriptors
[i
].decl
)
174 print_generic_expr (file
, info
->descriptors
[i
].decl
, 0);
178 /* Initialize the ipa_node_params structure associated with NODE
179 to hold PARAM_COUNT parameters. */
182 ipa_alloc_node_params (struct cgraph_node
*node
, int param_count
)
184 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
186 if (!info
->descriptors
.exists () && param_count
)
187 info
->descriptors
.safe_grow_cleared (param_count
);
190 /* Initialize the ipa_node_params structure associated with NODE by counting
191 the function parameters, creating the descriptors and populating their
195 ipa_initialize_node_params (struct cgraph_node
*node
)
197 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
199 if (!info
->descriptors
.exists ())
201 ipa_alloc_node_params (node
, count_formal_params (node
->decl
));
202 ipa_populate_param_decls (node
, info
->descriptors
);
206 /* Print the jump functions associated with call graph edge CS to file F. */
209 ipa_print_node_jump_functions_for_edge (FILE *f
, struct cgraph_edge
*cs
)
213 count
= ipa_get_cs_argument_count (IPA_EDGE_REF (cs
));
214 for (i
= 0; i
< count
; i
++)
216 struct ipa_jump_func
*jump_func
;
217 enum jump_func_type type
;
219 jump_func
= ipa_get_ith_jump_func (IPA_EDGE_REF (cs
), i
);
220 type
= jump_func
->type
;
222 fprintf (f
, " param %d: ", i
);
223 if (type
== IPA_JF_UNKNOWN
)
224 fprintf (f
, "UNKNOWN\n");
225 else if (type
== IPA_JF_CONST
)
227 tree val
= jump_func
->value
.constant
.value
;
228 fprintf (f
, "CONST: ");
229 print_generic_expr (f
, val
, 0);
230 if (TREE_CODE (val
) == ADDR_EXPR
231 && TREE_CODE (TREE_OPERAND (val
, 0)) == CONST_DECL
)
234 print_generic_expr (f
, DECL_INITIAL (TREE_OPERAND (val
, 0)),
239 else if (type
== IPA_JF_PASS_THROUGH
)
241 fprintf (f
, "PASS THROUGH: ");
242 fprintf (f
, "%d, op %s",
243 jump_func
->value
.pass_through
.formal_id
,
244 get_tree_code_name(jump_func
->value
.pass_through
.operation
));
245 if (jump_func
->value
.pass_through
.operation
!= NOP_EXPR
)
248 print_generic_expr (f
,
249 jump_func
->value
.pass_through
.operand
, 0);
251 if (jump_func
->value
.pass_through
.agg_preserved
)
252 fprintf (f
, ", agg_preserved");
255 else if (type
== IPA_JF_ANCESTOR
)
257 fprintf (f
, "ANCESTOR: ");
258 fprintf (f
, "%d, offset " HOST_WIDE_INT_PRINT_DEC
,
259 jump_func
->value
.ancestor
.formal_id
,
260 jump_func
->value
.ancestor
.offset
);
261 if (jump_func
->value
.ancestor
.agg_preserved
)
262 fprintf (f
, ", agg_preserved");
266 if (jump_func
->agg
.items
)
268 struct ipa_agg_jf_item
*item
;
271 fprintf (f
, " Aggregate passed by %s:\n",
272 jump_func
->agg
.by_ref
? "reference" : "value");
273 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, j
, item
)
275 fprintf (f
, " offset: " HOST_WIDE_INT_PRINT_DEC
", ",
277 if (TYPE_P (item
->value
))
278 fprintf (f
, "clobber of " HOST_WIDE_INT_PRINT_DEC
" bits",
279 tree_to_uhwi (TYPE_SIZE (item
->value
)));
282 fprintf (f
, "cst: ");
283 print_generic_expr (f
, item
->value
, 0);
289 struct ipa_polymorphic_call_context
*ctx
290 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs
), i
);
291 if (ctx
&& !ctx
->useless_p ())
293 fprintf (f
, " Context: ");
294 ctx
->dump (dump_file
);
297 if (jump_func
->alignment
.known
)
299 fprintf (f
, " Alignment: %u, misalignment: %u\n",
300 jump_func
->alignment
.align
,
301 jump_func
->alignment
.misalign
);
304 fprintf (f
, " Unknown alignment\n");
309 /* Print the jump functions of all arguments on all call graph edges going from
313 ipa_print_node_jump_functions (FILE *f
, struct cgraph_node
*node
)
315 struct cgraph_edge
*cs
;
317 fprintf (f
, " Jump functions of caller %s/%i:\n", node
->name (),
319 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
321 if (!ipa_edge_args_info_available_for_edge_p (cs
))
324 fprintf (f
, " callsite %s/%i -> %s/%i : \n",
325 xstrdup_for_dump (node
->name ()), node
->order
,
326 xstrdup_for_dump (cs
->callee
->name ()),
328 ipa_print_node_jump_functions_for_edge (f
, cs
);
331 for (cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
333 struct cgraph_indirect_call_info
*ii
;
334 if (!ipa_edge_args_info_available_for_edge_p (cs
))
337 ii
= cs
->indirect_info
;
338 if (ii
->agg_contents
)
339 fprintf (f
, " indirect %s callsite, calling param %i, "
340 "offset " HOST_WIDE_INT_PRINT_DEC
", %s",
341 ii
->member_ptr
? "member ptr" : "aggregate",
342 ii
->param_index
, ii
->offset
,
343 ii
->by_ref
? "by reference" : "by_value");
345 fprintf (f
, " indirect %s callsite, calling param %i, "
346 "offset " HOST_WIDE_INT_PRINT_DEC
,
347 ii
->polymorphic
? "polymorphic" : "simple", ii
->param_index
,
352 fprintf (f
, ", for stmt ");
353 print_gimple_stmt (f
, cs
->call_stmt
, 0, TDF_SLIM
);
358 ii
->context
.dump (f
);
359 ipa_print_node_jump_functions_for_edge (f
, cs
);
363 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
366 ipa_print_all_jump_functions (FILE *f
)
368 struct cgraph_node
*node
;
370 fprintf (f
, "\nJump functions:\n");
371 FOR_EACH_FUNCTION (node
)
373 ipa_print_node_jump_functions (f
, node
);
377 /* Set jfunc to be a know-really nothing jump function. */
380 ipa_set_jf_unknown (struct ipa_jump_func
*jfunc
)
382 jfunc
->type
= IPA_JF_UNKNOWN
;
383 jfunc
->alignment
.known
= false;
386 /* Set JFUNC to be a copy of another jmp (to be used by jump function
387 combination code). The two functions will share their rdesc. */
390 ipa_set_jf_cst_copy (struct ipa_jump_func
*dst
,
391 struct ipa_jump_func
*src
)
394 gcc_checking_assert (src
->type
== IPA_JF_CONST
);
395 dst
->type
= IPA_JF_CONST
;
396 dst
->value
.constant
= src
->value
.constant
;
399 /* Set JFUNC to be a constant jmp function. */
402 ipa_set_jf_constant (struct ipa_jump_func
*jfunc
, tree constant
,
403 struct cgraph_edge
*cs
)
405 jfunc
->type
= IPA_JF_CONST
;
406 jfunc
->value
.constant
.value
= unshare_expr_without_location (constant
);
408 if (TREE_CODE (constant
) == ADDR_EXPR
409 && TREE_CODE (TREE_OPERAND (constant
, 0)) == FUNCTION_DECL
)
411 struct ipa_cst_ref_desc
*rdesc
;
413 rdesc
= ipa_refdesc_pool
.allocate ();
415 rdesc
->next_duplicate
= NULL
;
417 jfunc
->value
.constant
.rdesc
= rdesc
;
420 jfunc
->value
.constant
.rdesc
= NULL
;
423 /* Set JFUNC to be a simple pass-through jump function. */
425 ipa_set_jf_simple_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
428 jfunc
->type
= IPA_JF_PASS_THROUGH
;
429 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
430 jfunc
->value
.pass_through
.formal_id
= formal_id
;
431 jfunc
->value
.pass_through
.operation
= NOP_EXPR
;
432 jfunc
->value
.pass_through
.agg_preserved
= agg_preserved
;
435 /* Set JFUNC to be an arithmetic pass through jump function. */
438 ipa_set_jf_arith_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
439 tree operand
, enum tree_code operation
)
441 jfunc
->type
= IPA_JF_PASS_THROUGH
;
442 jfunc
->value
.pass_through
.operand
= unshare_expr_without_location (operand
);
443 jfunc
->value
.pass_through
.formal_id
= formal_id
;
444 jfunc
->value
.pass_through
.operation
= operation
;
445 jfunc
->value
.pass_through
.agg_preserved
= false;
448 /* Set JFUNC to be an ancestor jump function. */
451 ipa_set_ancestor_jf (struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
,
452 int formal_id
, bool agg_preserved
)
454 jfunc
->type
= IPA_JF_ANCESTOR
;
455 jfunc
->value
.ancestor
.formal_id
= formal_id
;
456 jfunc
->value
.ancestor
.offset
= offset
;
457 jfunc
->value
.ancestor
.agg_preserved
= agg_preserved
;
460 /* Get IPA BB information about the given BB. FBI is the context of analyzis
461 of this function body. */
463 static struct ipa_bb_info
*
464 ipa_get_bb_info (struct ipa_func_body_info
*fbi
, basic_block bb
)
466 gcc_checking_assert (fbi
);
467 return &fbi
->bb_infos
[bb
->index
];
470 /* Structure to be passed in between detect_type_change and
471 check_stmt_for_type_change. */
473 struct prop_type_change_info
475 /* Offset into the object where there is the virtual method pointer we are
477 HOST_WIDE_INT offset
;
478 /* The declaration or SSA_NAME pointer of the base that we are checking for
481 /* Set to true if dynamic type change has been detected. */
482 bool type_maybe_changed
;
485 /* Return true if STMT can modify a virtual method table pointer.
487 This function makes special assumptions about both constructors and
488 destructors which are all the functions that are allowed to alter the VMT
489 pointers. It assumes that destructors begin with assignment into all VMT
490 pointers and that constructors essentially look in the following way:
492 1) The very first thing they do is that they call constructors of ancestor
493 sub-objects that have them.
495 2) Then VMT pointers of this and all its ancestors is set to new values
496 corresponding to the type corresponding to the constructor.
498 3) Only afterwards, other stuff such as constructor of member sub-objects
499 and the code written by the user is run. Only this may include calling
500 virtual functions, directly or indirectly.
502 There is no way to call a constructor of an ancestor sub-object in any
505 This means that we do not have to care whether constructors get the correct
506 type information because they will always change it (in fact, if we define
507 the type to be given by the VMT pointer, it is undefined).
509 The most important fact to derive from the above is that if, for some
510 statement in the section 3, we try to detect whether the dynamic type has
511 changed, we can safely ignore all calls as we examine the function body
512 backwards until we reach statements in section 2 because these calls cannot
513 be ancestor constructors or destructors (if the input is not bogus) and so
514 do not change the dynamic type (this holds true only for automatically
515 allocated objects but at the moment we devirtualize only these). We then
516 must detect that statements in section 2 change the dynamic type and can try
517 to derive the new type. That is enough and we can stop, we will never see
518 the calls into constructors of sub-objects in this code. Therefore we can
519 safely ignore all call statements that we traverse.
523 stmt_may_be_vtbl_ptr_store (gimple
*stmt
)
525 if (is_gimple_call (stmt
))
527 if (gimple_clobber_p (stmt
))
529 else if (is_gimple_assign (stmt
))
531 tree lhs
= gimple_assign_lhs (stmt
);
533 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs
)))
535 if (flag_strict_aliasing
536 && !POINTER_TYPE_P (TREE_TYPE (lhs
)))
539 if (TREE_CODE (lhs
) == COMPONENT_REF
540 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
542 /* In the future we might want to use get_base_ref_and_offset to find
543 if there is a field corresponding to the offset and if so, proceed
544 almost like if it was a component ref. */
550 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
551 to check whether a particular statement may modify the virtual table
552 pointerIt stores its result into DATA, which points to a
553 prop_type_change_info structure. */
556 check_stmt_for_type_change (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef
, void *data
)
558 gimple
*stmt
= SSA_NAME_DEF_STMT (vdef
);
559 struct prop_type_change_info
*tci
= (struct prop_type_change_info
*) data
;
561 if (stmt_may_be_vtbl_ptr_store (stmt
))
563 tci
->type_maybe_changed
= true;
570 /* See if ARG is PARAM_DECl describing instance passed by pointer
571 or reference in FUNCTION. Return false if the dynamic type may change
572 in between beggining of the function until CALL is invoked.
574 Generally functions are not allowed to change type of such instances,
575 but they call destructors. We assume that methods can not destroy the THIS
576 pointer. Also as a special cases, constructor and destructors may change
577 type of the THIS pointer. */
580 param_type_may_change_p (tree function
, tree arg
, gimple
*call
)
582 /* Pure functions can not do any changes on the dynamic type;
583 that require writting to memory. */
584 if (flags_from_decl_or_type (function
) & (ECF_PURE
| ECF_CONST
))
586 /* We need to check if we are within inlined consturctor
587 or destructor (ideally we would have way to check that the
588 inline cdtor is actually working on ARG, but we don't have
589 easy tie on this, so punt on all non-pure cdtors.
590 We may also record the types of cdtors and once we know type
591 of the instance match them.
593 Also code unification optimizations may merge calls from
594 different blocks making return values unreliable. So
595 do nothing during late optimization. */
596 if (DECL_STRUCT_FUNCTION (function
)->after_inlining
)
598 if (TREE_CODE (arg
) == SSA_NAME
599 && SSA_NAME_IS_DEFAULT_DEF (arg
)
600 && TREE_CODE (SSA_NAME_VAR (arg
)) == PARM_DECL
)
602 /* Normal (non-THIS) argument. */
603 if ((SSA_NAME_VAR (arg
) != DECL_ARGUMENTS (function
)
604 || TREE_CODE (TREE_TYPE (function
)) != METHOD_TYPE
)
605 /* THIS pointer of an method - here we want to watch constructors
606 and destructors as those definitely may change the dynamic
608 || (TREE_CODE (TREE_TYPE (function
)) == METHOD_TYPE
609 && !DECL_CXX_CONSTRUCTOR_P (function
)
610 && !DECL_CXX_DESTRUCTOR_P (function
)
611 && (SSA_NAME_VAR (arg
) == DECL_ARGUMENTS (function
))))
613 /* Walk the inline stack and watch out for ctors/dtors. */
614 for (tree block
= gimple_block (call
); block
&& TREE_CODE (block
) == BLOCK
;
615 block
= BLOCK_SUPERCONTEXT (block
))
616 if (inlined_polymorphic_ctor_dtor_block_p (block
, false))
624 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
625 callsite CALL) by looking for assignments to its virtual table pointer. If
626 it is, return true and fill in the jump function JFUNC with relevant type
627 information or set it to unknown. ARG is the object itself (not a pointer
628 to it, unless dereferenced). BASE is the base of the memory access as
629 returned by get_ref_base_and_extent, as is the offset.
631 This is helper function for detect_type_change and detect_type_change_ssa
632 that does the heavy work which is usually unnecesary. */
635 detect_type_change_from_memory_writes (tree arg
, tree base
, tree comp_type
,
636 gcall
*call
, struct ipa_jump_func
*jfunc
,
637 HOST_WIDE_INT offset
)
639 struct prop_type_change_info tci
;
641 bool entry_reached
= false;
643 gcc_checking_assert (DECL_P (arg
)
644 || TREE_CODE (arg
) == MEM_REF
645 || handled_component_p (arg
));
647 comp_type
= TYPE_MAIN_VARIANT (comp_type
);
649 /* Const calls cannot call virtual methods through VMT and so type changes do
651 if (!flag_devirtualize
|| !gimple_vuse (call
)
652 /* Be sure expected_type is polymorphic. */
654 || TREE_CODE (comp_type
) != RECORD_TYPE
655 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))
656 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))))
659 ao_ref_init (&ao
, arg
);
662 ao
.size
= POINTER_SIZE
;
663 ao
.max_size
= ao
.size
;
666 tci
.object
= get_base_address (arg
);
667 tci
.type_maybe_changed
= false;
669 walk_aliased_vdefs (&ao
, gimple_vuse (call
), check_stmt_for_type_change
,
670 &tci
, NULL
, &entry_reached
);
671 if (!tci
.type_maybe_changed
)
674 ipa_set_jf_unknown (jfunc
);
678 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
679 If it is, return true and fill in the jump function JFUNC with relevant type
680 information or set it to unknown. ARG is the object itself (not a pointer
681 to it, unless dereferenced). BASE is the base of the memory access as
682 returned by get_ref_base_and_extent, as is the offset. */
685 detect_type_change (tree arg
, tree base
, tree comp_type
, gcall
*call
,
686 struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
)
688 if (!flag_devirtualize
)
691 if (TREE_CODE (base
) == MEM_REF
692 && !param_type_may_change_p (current_function_decl
,
693 TREE_OPERAND (base
, 0),
696 return detect_type_change_from_memory_writes (arg
, base
, comp_type
,
697 call
, jfunc
, offset
);
700 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
701 SSA name (its dereference will become the base and the offset is assumed to
705 detect_type_change_ssa (tree arg
, tree comp_type
,
706 gcall
*call
, struct ipa_jump_func
*jfunc
)
708 gcc_checking_assert (TREE_CODE (arg
) == SSA_NAME
);
709 if (!flag_devirtualize
710 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
713 if (!param_type_may_change_p (current_function_decl
, arg
, call
))
716 arg
= build2 (MEM_REF
, ptr_type_node
, arg
,
717 build_int_cst (ptr_type_node
, 0));
719 return detect_type_change_from_memory_writes (arg
, arg
, comp_type
,
723 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
724 boolean variable pointed to by DATA. */
727 mark_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
730 bool *b
= (bool *) data
;
735 /* Return true if we have already walked so many statements in AA that we
736 should really just start giving up. */
739 aa_overwalked (struct ipa_func_body_info
*fbi
)
741 gcc_checking_assert (fbi
);
742 return fbi
->aa_walked
> (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS
);
745 /* Find the nearest valid aa status for parameter specified by INDEX that
748 static struct ipa_param_aa_status
*
749 find_dominating_aa_status (struct ipa_func_body_info
*fbi
, basic_block bb
,
754 bb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
757 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
758 if (!bi
->param_aa_statuses
.is_empty ()
759 && bi
->param_aa_statuses
[index
].valid
)
760 return &bi
->param_aa_statuses
[index
];
764 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
765 structures and/or intialize the result with a dominating description as
768 static struct ipa_param_aa_status
*
769 parm_bb_aa_status_for_bb (struct ipa_func_body_info
*fbi
, basic_block bb
,
772 gcc_checking_assert (fbi
);
773 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
774 if (bi
->param_aa_statuses
.is_empty ())
775 bi
->param_aa_statuses
.safe_grow_cleared (fbi
->param_count
);
776 struct ipa_param_aa_status
*paa
= &bi
->param_aa_statuses
[index
];
779 gcc_checking_assert (!paa
->parm_modified
780 && !paa
->ref_modified
781 && !paa
->pt_modified
);
782 struct ipa_param_aa_status
*dom_paa
;
783 dom_paa
= find_dominating_aa_status (fbi
, bb
, index
);
793 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
794 a value known not to be modified in this function before reaching the
795 statement STMT. FBI holds information about the function we have so far
796 gathered but do not survive the summary building stage. */
799 parm_preserved_before_stmt_p (struct ipa_func_body_info
*fbi
, int index
,
800 gimple
*stmt
, tree parm_load
)
802 struct ipa_param_aa_status
*paa
;
803 bool modified
= false;
806 /* FIXME: FBI can be NULL if we are being called from outside
807 ipa_node_analysis or ipcp_transform_function, which currently happens
808 during inlining analysis. It would be great to extend fbi's lifetime and
809 always have it. Currently, we are just not afraid of too much walking in
813 if (aa_overwalked (fbi
))
815 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
816 if (paa
->parm_modified
)
822 gcc_checking_assert (gimple_vuse (stmt
) != NULL_TREE
);
823 ao_ref_init (&refd
, parm_load
);
824 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
827 fbi
->aa_walked
+= walked
;
829 paa
->parm_modified
= true;
833 /* If STMT is an assignment that loads a value from an parameter declaration,
834 return the index of the parameter in ipa_node_params which has not been
835 modified. Otherwise return -1. */
838 load_from_unmodified_param (struct ipa_func_body_info
*fbi
,
839 vec
<ipa_param_descriptor
> descriptors
,
845 if (!gimple_assign_single_p (stmt
))
848 op1
= gimple_assign_rhs1 (stmt
);
849 if (TREE_CODE (op1
) != PARM_DECL
)
852 index
= ipa_get_param_decl_index_1 (descriptors
, op1
);
854 || !parm_preserved_before_stmt_p (fbi
, index
, stmt
, op1
))
860 /* Return true if memory reference REF (which must be a load through parameter
861 with INDEX) loads data that are known to be unmodified in this function
862 before reaching statement STMT. */
865 parm_ref_data_preserved_p (struct ipa_func_body_info
*fbi
,
866 int index
, gimple
*stmt
, tree ref
)
868 struct ipa_param_aa_status
*paa
;
869 bool modified
= false;
872 /* FIXME: FBI can be NULL if we are being called from outside
873 ipa_node_analysis or ipcp_transform_function, which currently happens
874 during inlining analysis. It would be great to extend fbi's lifetime and
875 always have it. Currently, we are just not afraid of too much walking in
879 if (aa_overwalked (fbi
))
881 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
882 if (paa
->ref_modified
)
888 gcc_checking_assert (gimple_vuse (stmt
));
889 ao_ref_init (&refd
, ref
);
890 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
893 fbi
->aa_walked
+= walked
;
895 paa
->ref_modified
= true;
899 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
900 is known to be unmodified in this function before reaching call statement
901 CALL into which it is passed. FBI describes the function body. */
904 parm_ref_data_pass_through_p (struct ipa_func_body_info
*fbi
, int index
,
905 gimple
*call
, tree parm
)
907 bool modified
= false;
910 /* It's unnecessary to calculate anything about memory contnets for a const
911 function because it is not goin to use it. But do not cache the result
912 either. Also, no such calculations for non-pointers. */
913 if (!gimple_vuse (call
)
914 || !POINTER_TYPE_P (TREE_TYPE (parm
))
915 || aa_overwalked (fbi
))
918 struct ipa_param_aa_status
*paa
= parm_bb_aa_status_for_bb (fbi
,
921 if (paa
->pt_modified
)
924 ao_ref_init_from_ptr_and_size (&refd
, parm
, NULL_TREE
);
925 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (call
), mark_modified
,
927 fbi
->aa_walked
+= walked
;
929 paa
->pt_modified
= true;
933 /* Return true if we can prove that OP is a memory reference loading unmodified
934 data from an aggregate passed as a parameter and if the aggregate is passed
935 by reference, that the alias type of the load corresponds to the type of the
936 formal parameter (so that we can rely on this type for TBAA in callers).
937 INFO and PARMS_AINFO describe parameters of the current function (but the
938 latter can be NULL), STMT is the load statement. If function returns true,
939 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
940 within the aggregate and whether it is a load from a value passed by
941 reference respectively. */
944 ipa_load_from_parm_agg (struct ipa_func_body_info
*fbi
,
945 vec
<ipa_param_descriptor
> descriptors
,
946 gimple
*stmt
, tree op
, int *index_p
,
947 HOST_WIDE_INT
*offset_p
, HOST_WIDE_INT
*size_p
,
951 HOST_WIDE_INT size
, max_size
;
954 = get_ref_base_and_extent (op
, offset_p
, &size
, &max_size
, &reverse
);
956 if (max_size
== -1 || max_size
!= size
|| *offset_p
< 0)
961 int index
= ipa_get_param_decl_index_1 (descriptors
, base
);
963 && parm_preserved_before_stmt_p (fbi
, index
, stmt
, op
))
974 if (TREE_CODE (base
) != MEM_REF
975 || TREE_CODE (TREE_OPERAND (base
, 0)) != SSA_NAME
976 || !integer_zerop (TREE_OPERAND (base
, 1)))
979 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base
, 0)))
981 tree parm
= SSA_NAME_VAR (TREE_OPERAND (base
, 0));
982 index
= ipa_get_param_decl_index_1 (descriptors
, parm
);
986 /* This branch catches situations where a pointer parameter is not a
987 gimple register, for example:
989 void hip7(S*) (struct S * p)
991 void (*<T2e4>) (struct S *) D.1867;
1001 gimple
*def
= SSA_NAME_DEF_STMT (TREE_OPERAND (base
, 0));
1002 index
= load_from_unmodified_param (fbi
, descriptors
, def
);
1006 && parm_ref_data_preserved_p (fbi
, index
, stmt
, op
))
1017 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1018 of an assignment statement STMT, try to determine whether we are actually
1019 handling any of the following cases and construct an appropriate jump
1020 function into JFUNC if so:
1022 1) The passed value is loaded from a formal parameter which is not a gimple
1023 register (most probably because it is addressable, the value has to be
1024 scalar) and we can guarantee the value has not changed. This case can
1025 therefore be described by a simple pass-through jump function. For example:
1034 2) The passed value can be described by a simple arithmetic pass-through
1041 D.2064_4 = a.1(D) + 4;
1044 This case can also occur in combination of the previous one, e.g.:
1052 D.2064_4 = a.0_3 + 4;
1055 3) The passed value is an address of an object within another one (which
1056 also passed by reference). Such situations are described by an ancestor
1057 jump function and describe situations such as:
1059 B::foo() (struct B * const this)
1063 D.1845_2 = &this_1(D)->D.1748;
1066 INFO is the structure describing individual parameters access different
1067 stages of IPA optimizations. PARMS_AINFO contains the information that is
1068 only needed for intraprocedural analysis. */
1071 compute_complex_assign_jump_func (struct ipa_func_body_info
*fbi
,
1072 struct ipa_node_params
*info
,
1073 struct ipa_jump_func
*jfunc
,
1074 gcall
*call
, gimple
*stmt
, tree name
,
1077 HOST_WIDE_INT offset
, size
, max_size
;
1078 tree op1
, tc_ssa
, base
, ssa
;
1082 op1
= gimple_assign_rhs1 (stmt
);
1084 if (TREE_CODE (op1
) == SSA_NAME
)
1086 if (SSA_NAME_IS_DEFAULT_DEF (op1
))
1087 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (op1
));
1089 index
= load_from_unmodified_param (fbi
, info
->descriptors
,
1090 SSA_NAME_DEF_STMT (op1
));
1095 index
= load_from_unmodified_param (fbi
, info
->descriptors
, stmt
);
1096 tc_ssa
= gimple_assign_lhs (stmt
);
1101 tree op2
= gimple_assign_rhs2 (stmt
);
1105 if (!is_gimple_ip_invariant (op2
)
1106 || (TREE_CODE_CLASS (gimple_expr_code (stmt
)) != tcc_comparison
1107 && !useless_type_conversion_p (TREE_TYPE (name
),
1111 ipa_set_jf_arith_pass_through (jfunc
, index
, op2
,
1112 gimple_assign_rhs_code (stmt
));
1114 else if (gimple_assign_single_p (stmt
))
1116 bool agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
, tc_ssa
);
1117 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1122 if (TREE_CODE (op1
) != ADDR_EXPR
)
1124 op1
= TREE_OPERAND (op1
, 0);
1125 if (TREE_CODE (TREE_TYPE (op1
)) != RECORD_TYPE
)
1127 base
= get_ref_base_and_extent (op1
, &offset
, &size
, &max_size
, &reverse
);
1128 if (TREE_CODE (base
) != MEM_REF
1129 /* If this is a varying address, punt. */
1131 || max_size
!= size
)
1133 offset
+= mem_ref_offset (base
).to_short_addr () * BITS_PER_UNIT
;
1134 ssa
= TREE_OPERAND (base
, 0);
1135 if (TREE_CODE (ssa
) != SSA_NAME
1136 || !SSA_NAME_IS_DEFAULT_DEF (ssa
)
1140 /* Dynamic types are changed in constructors and destructors. */
1141 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (ssa
));
1142 if (index
>= 0 && param_type
&& POINTER_TYPE_P (param_type
))
1143 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1144 parm_ref_data_pass_through_p (fbi
, index
, call
, ssa
));
1147 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1150 iftmp.1_3 = &obj_2(D)->D.1762;
1152 The base of the MEM_REF must be a default definition SSA NAME of a
1153 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1154 whole MEM_REF expression is returned and the offset calculated from any
1155 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1156 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1159 get_ancestor_addr_info (gimple
*assign
, tree
*obj_p
, HOST_WIDE_INT
*offset
)
1161 HOST_WIDE_INT size
, max_size
;
1162 tree expr
, parm
, obj
;
1165 if (!gimple_assign_single_p (assign
))
1167 expr
= gimple_assign_rhs1 (assign
);
1169 if (TREE_CODE (expr
) != ADDR_EXPR
)
1171 expr
= TREE_OPERAND (expr
, 0);
1173 expr
= get_ref_base_and_extent (expr
, offset
, &size
, &max_size
, &reverse
);
1175 if (TREE_CODE (expr
) != MEM_REF
1176 /* If this is a varying address, punt. */
1181 parm
= TREE_OPERAND (expr
, 0);
1182 if (TREE_CODE (parm
) != SSA_NAME
1183 || !SSA_NAME_IS_DEFAULT_DEF (parm
)
1184 || TREE_CODE (SSA_NAME_VAR (parm
)) != PARM_DECL
)
1187 *offset
+= mem_ref_offset (expr
).to_short_addr () * BITS_PER_UNIT
;
1193 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1194 statement PHI, try to find out whether NAME is in fact a
1195 multiple-inheritance typecast from a descendant into an ancestor of a formal
1196 parameter and thus can be described by an ancestor jump function and if so,
1197 write the appropriate function into JFUNC.
1199 Essentially we want to match the following pattern:
1207 iftmp.1_3 = &obj_2(D)->D.1762;
1210 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1211 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1215 compute_complex_ancestor_jump_func (struct ipa_func_body_info
*fbi
,
1216 struct ipa_node_params
*info
,
1217 struct ipa_jump_func
*jfunc
,
1218 gcall
*call
, gphi
*phi
)
1220 HOST_WIDE_INT offset
;
1221 gimple
*assign
, *cond
;
1222 basic_block phi_bb
, assign_bb
, cond_bb
;
1223 tree tmp
, parm
, expr
, obj
;
1226 if (gimple_phi_num_args (phi
) != 2)
1229 if (integer_zerop (PHI_ARG_DEF (phi
, 1)))
1230 tmp
= PHI_ARG_DEF (phi
, 0);
1231 else if (integer_zerop (PHI_ARG_DEF (phi
, 0)))
1232 tmp
= PHI_ARG_DEF (phi
, 1);
1235 if (TREE_CODE (tmp
) != SSA_NAME
1236 || SSA_NAME_IS_DEFAULT_DEF (tmp
)
1237 || !POINTER_TYPE_P (TREE_TYPE (tmp
))
1238 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp
))) != RECORD_TYPE
)
1241 assign
= SSA_NAME_DEF_STMT (tmp
);
1242 assign_bb
= gimple_bb (assign
);
1243 if (!single_pred_p (assign_bb
))
1245 expr
= get_ancestor_addr_info (assign
, &obj
, &offset
);
1248 parm
= TREE_OPERAND (expr
, 0);
1249 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (parm
));
1253 cond_bb
= single_pred (assign_bb
);
1254 cond
= last_stmt (cond_bb
);
1256 || gimple_code (cond
) != GIMPLE_COND
1257 || gimple_cond_code (cond
) != NE_EXPR
1258 || gimple_cond_lhs (cond
) != parm
1259 || !integer_zerop (gimple_cond_rhs (cond
)))
1262 phi_bb
= gimple_bb (phi
);
1263 for (i
= 0; i
< 2; i
++)
1265 basic_block pred
= EDGE_PRED (phi_bb
, i
)->src
;
1266 if (pred
!= assign_bb
&& pred
!= cond_bb
)
1270 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1271 parm_ref_data_pass_through_p (fbi
, index
, call
, parm
));
1274 /* Inspect the given TYPE and return true iff it has the same structure (the
1275 same number of fields of the same types) as a C++ member pointer. If
1276 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1277 corresponding fields there. */
1280 type_like_member_ptr_p (tree type
, tree
*method_ptr
, tree
*delta
)
1284 if (TREE_CODE (type
) != RECORD_TYPE
)
1287 fld
= TYPE_FIELDS (type
);
1288 if (!fld
|| !POINTER_TYPE_P (TREE_TYPE (fld
))
1289 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld
))) != METHOD_TYPE
1290 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1296 fld
= DECL_CHAIN (fld
);
1297 if (!fld
|| INTEGRAL_TYPE_P (fld
)
1298 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1303 if (DECL_CHAIN (fld
))
1309 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1310 return the rhs of its defining statement. Otherwise return RHS as it
1314 get_ssa_def_if_simple_copy (tree rhs
)
1316 while (TREE_CODE (rhs
) == SSA_NAME
&& !SSA_NAME_IS_DEFAULT_DEF (rhs
))
1318 gimple
*def_stmt
= SSA_NAME_DEF_STMT (rhs
);
1320 if (gimple_assign_single_p (def_stmt
))
1321 rhs
= gimple_assign_rhs1 (def_stmt
);
1328 /* Simple linked list, describing known contents of an aggregate beforere
1331 struct ipa_known_agg_contents_list
1333 /* Offset and size of the described part of the aggregate. */
1334 HOST_WIDE_INT offset
, size
;
1335 /* Known constant value or NULL if the contents is known to be unknown. */
1337 /* Pointer to the next structure in the list. */
1338 struct ipa_known_agg_contents_list
*next
;
1341 /* Find the proper place in linked list of ipa_known_agg_contents_list
1342 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1343 unless there is a partial overlap, in which case return NULL, or such
1344 element is already there, in which case set *ALREADY_THERE to true. */
1346 static struct ipa_known_agg_contents_list
**
1347 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list
**list
,
1348 HOST_WIDE_INT lhs_offset
,
1349 HOST_WIDE_INT lhs_size
,
1350 bool *already_there
)
1352 struct ipa_known_agg_contents_list
**p
= list
;
1353 while (*p
&& (*p
)->offset
< lhs_offset
)
1355 if ((*p
)->offset
+ (*p
)->size
> lhs_offset
)
1360 if (*p
&& (*p
)->offset
< lhs_offset
+ lhs_size
)
1362 if ((*p
)->offset
== lhs_offset
&& (*p
)->size
== lhs_size
)
1363 /* We already know this value is subsequently overwritten with
1365 *already_there
= true;
1367 /* Otherwise this is a partial overlap which we cannot
1374 /* Build aggregate jump function from LIST, assuming there are exactly
1375 CONST_COUNT constant entries there and that th offset of the passed argument
1376 is ARG_OFFSET and store it into JFUNC. */
1379 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list
*list
,
1380 int const_count
, HOST_WIDE_INT arg_offset
,
1381 struct ipa_jump_func
*jfunc
)
1383 vec_alloc (jfunc
->agg
.items
, const_count
);
1388 struct ipa_agg_jf_item item
;
1389 item
.offset
= list
->offset
- arg_offset
;
1390 gcc_assert ((item
.offset
% BITS_PER_UNIT
) == 0);
1391 item
.value
= unshare_expr_without_location (list
->constant
);
1392 jfunc
->agg
.items
->quick_push (item
);
1398 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1399 in ARG is filled in with constant values. ARG can either be an aggregate
1400 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1401 aggregate. JFUNC is the jump function into which the constants are
1402 subsequently stored. */
1405 determine_locally_known_aggregate_parts (gcall
*call
, tree arg
,
1407 struct ipa_jump_func
*jfunc
)
1409 struct ipa_known_agg_contents_list
*list
= NULL
;
1410 int item_count
= 0, const_count
= 0;
1411 HOST_WIDE_INT arg_offset
, arg_size
;
1412 gimple_stmt_iterator gsi
;
1414 bool check_ref
, by_ref
;
1417 /* The function operates in three stages. First, we prepare check_ref, r,
1418 arg_base and arg_offset based on what is actually passed as an actual
1421 if (POINTER_TYPE_P (arg_type
))
1424 if (TREE_CODE (arg
) == SSA_NAME
)
1427 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type
))))
1432 type_size
= TYPE_SIZE (TREE_TYPE (arg_type
));
1433 arg_size
= tree_to_uhwi (type_size
);
1434 ao_ref_init_from_ptr_and_size (&r
, arg_base
, NULL_TREE
);
1436 else if (TREE_CODE (arg
) == ADDR_EXPR
)
1438 HOST_WIDE_INT arg_max_size
;
1441 arg
= TREE_OPERAND (arg
, 0);
1442 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1443 &arg_max_size
, &reverse
);
1444 if (arg_max_size
== -1
1445 || arg_max_size
!= arg_size
1448 if (DECL_P (arg_base
))
1451 ao_ref_init (&r
, arg_base
);
1461 HOST_WIDE_INT arg_max_size
;
1464 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg
)));
1468 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1469 &arg_max_size
, &reverse
);
1470 if (arg_max_size
== -1
1471 || arg_max_size
!= arg_size
1475 ao_ref_init (&r
, arg
);
1478 /* Second stage walks back the BB, looks at individual statements and as long
1479 as it is confident of how the statements affect contents of the
1480 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1482 gsi
= gsi_for_stmt (call
);
1484 for (; !gsi_end_p (gsi
); gsi_prev (&gsi
))
1486 struct ipa_known_agg_contents_list
*n
, **p
;
1487 gimple
*stmt
= gsi_stmt (gsi
);
1488 HOST_WIDE_INT lhs_offset
, lhs_size
, lhs_max_size
;
1489 tree lhs
, rhs
, lhs_base
;
1492 if (!stmt_may_clobber_ref_p_1 (stmt
, &r
))
1494 if (!gimple_assign_single_p (stmt
))
1497 lhs
= gimple_assign_lhs (stmt
);
1498 rhs
= gimple_assign_rhs1 (stmt
);
1499 if (!is_gimple_reg_type (TREE_TYPE (rhs
))
1500 || TREE_CODE (lhs
) == BIT_FIELD_REF
1501 || contains_bitfld_component_ref_p (lhs
))
1504 lhs_base
= get_ref_base_and_extent (lhs
, &lhs_offset
, &lhs_size
,
1505 &lhs_max_size
, &reverse
);
1506 if (lhs_max_size
== -1
1507 || lhs_max_size
!= lhs_size
)
1512 if (TREE_CODE (lhs_base
) != MEM_REF
1513 || TREE_OPERAND (lhs_base
, 0) != arg_base
1514 || !integer_zerop (TREE_OPERAND (lhs_base
, 1)))
1517 else if (lhs_base
!= arg_base
)
1519 if (DECL_P (lhs_base
))
1525 bool already_there
= false;
1526 p
= get_place_in_agg_contents_list (&list
, lhs_offset
, lhs_size
,
1533 rhs
= get_ssa_def_if_simple_copy (rhs
);
1534 n
= XALLOCA (struct ipa_known_agg_contents_list
);
1536 n
->offset
= lhs_offset
;
1537 if (is_gimple_ip_invariant (rhs
))
1543 n
->constant
= NULL_TREE
;
1548 if (const_count
== PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
)
1549 || item_count
== 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
))
1553 /* Third stage just goes over the list and creates an appropriate vector of
1554 ipa_agg_jf_item structures out of it, of sourse only if there are
1555 any known constants to begin with. */
1559 jfunc
->agg
.by_ref
= by_ref
;
1560 build_agg_jump_func_from_list (list
, const_count
, arg_offset
, jfunc
);
1565 ipa_get_callee_param_type (struct cgraph_edge
*e
, int i
)
1568 tree type
= (e
->callee
1569 ? TREE_TYPE (e
->callee
->decl
)
1570 : gimple_call_fntype (e
->call_stmt
));
1571 tree t
= TYPE_ARG_TYPES (type
);
1573 for (n
= 0; n
< i
; n
++)
1580 return TREE_VALUE (t
);
1583 t
= DECL_ARGUMENTS (e
->callee
->decl
);
1584 for (n
= 0; n
< i
; n
++)
1591 return TREE_TYPE (t
);
1595 /* Compute jump function for all arguments of callsite CS and insert the
1596 information in the jump_functions array in the ipa_edge_args corresponding
1597 to this callsite. */
1600 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info
*fbi
,
1601 struct cgraph_edge
*cs
)
1603 struct ipa_node_params
*info
= IPA_NODE_REF (cs
->caller
);
1604 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
1605 gcall
*call
= cs
->call_stmt
;
1606 int n
, arg_num
= gimple_call_num_args (call
);
1607 bool useful_context
= false;
1609 if (arg_num
== 0 || args
->jump_functions
)
1611 vec_safe_grow_cleared (args
->jump_functions
, arg_num
);
1612 if (flag_devirtualize
)
1613 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, arg_num
);
1615 if (gimple_call_internal_p (call
))
1617 if (ipa_func_spec_opts_forbid_analysis_p (cs
->caller
))
1620 for (n
= 0; n
< arg_num
; n
++)
1622 struct ipa_jump_func
*jfunc
= ipa_get_ith_jump_func (args
, n
);
1623 tree arg
= gimple_call_arg (call
, n
);
1624 tree param_type
= ipa_get_callee_param_type (cs
, n
);
1625 if (flag_devirtualize
&& POINTER_TYPE_P (TREE_TYPE (arg
)))
1628 struct ipa_polymorphic_call_context
context (cs
->caller
->decl
,
1631 context
.get_dynamic_type (instance
, arg
, NULL
, cs
->call_stmt
);
1632 *ipa_get_ith_polymorhic_call_context (args
, n
) = context
;
1633 if (!context
.useless_p ())
1634 useful_context
= true;
1637 if (POINTER_TYPE_P (TREE_TYPE(arg
)))
1639 unsigned HOST_WIDE_INT hwi_bitpos
;
1642 if (get_pointer_alignment_1 (arg
, &align
, &hwi_bitpos
)
1643 && align
% BITS_PER_UNIT
== 0
1644 && hwi_bitpos
% BITS_PER_UNIT
== 0)
1646 gcc_checking_assert (align
!= 0);
1647 jfunc
->alignment
.known
= true;
1648 jfunc
->alignment
.align
= align
/ BITS_PER_UNIT
;
1649 jfunc
->alignment
.misalign
= hwi_bitpos
/ BITS_PER_UNIT
;
1652 gcc_assert (!jfunc
->alignment
.known
);
1655 gcc_assert (!jfunc
->alignment
.known
);
1657 if (is_gimple_ip_invariant (arg
))
1658 ipa_set_jf_constant (jfunc
, arg
, cs
);
1659 else if (!is_gimple_reg_type (TREE_TYPE (arg
))
1660 && TREE_CODE (arg
) == PARM_DECL
)
1662 int index
= ipa_get_param_decl_index (info
, arg
);
1664 gcc_assert (index
>=0);
1665 /* Aggregate passed by value, check for pass-through, otherwise we
1666 will attempt to fill in aggregate contents later in this
1668 if (parm_preserved_before_stmt_p (fbi
, index
, call
, arg
))
1670 ipa_set_jf_simple_pass_through (jfunc
, index
, false);
1674 else if (TREE_CODE (arg
) == SSA_NAME
)
1676 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
1678 int index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (arg
));
1682 agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
, arg
);
1683 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1688 gimple
*stmt
= SSA_NAME_DEF_STMT (arg
);
1689 if (is_gimple_assign (stmt
))
1690 compute_complex_assign_jump_func (fbi
, info
, jfunc
,
1691 call
, stmt
, arg
, param_type
);
1692 else if (gimple_code (stmt
) == GIMPLE_PHI
)
1693 compute_complex_ancestor_jump_func (fbi
, info
, jfunc
,
1695 as_a
<gphi
*> (stmt
));
1699 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1700 passed (because type conversions are ignored in gimple). Usually we can
1701 safely get type from function declaration, but in case of K&R prototypes or
1702 variadic functions we can try our luck with type of the pointer passed.
1703 TODO: Since we look for actual initialization of the memory object, we may better
1704 work out the type based on the memory stores we find. */
1706 param_type
= TREE_TYPE (arg
);
1708 if ((jfunc
->type
!= IPA_JF_PASS_THROUGH
1709 || !ipa_get_jf_pass_through_agg_preserved (jfunc
))
1710 && (jfunc
->type
!= IPA_JF_ANCESTOR
1711 || !ipa_get_jf_ancestor_agg_preserved (jfunc
))
1712 && (AGGREGATE_TYPE_P (TREE_TYPE (arg
))
1713 || POINTER_TYPE_P (param_type
)))
1714 determine_locally_known_aggregate_parts (call
, arg
, param_type
, jfunc
);
1716 if (!useful_context
)
1717 vec_free (args
->polymorphic_call_contexts
);
1720 /* Compute jump functions for all edges - both direct and indirect - outgoing
1724 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info
*fbi
, basic_block bb
)
1726 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
1728 struct cgraph_edge
*cs
;
1730 FOR_EACH_VEC_ELT_REVERSE (bi
->cg_edges
, i
, cs
)
1732 struct cgraph_node
*callee
= cs
->callee
;
1736 callee
->ultimate_alias_target ();
1737 /* We do not need to bother analyzing calls to unknown functions
1738 unless they may become known during lto/whopr. */
1739 if (!callee
->definition
&& !flag_lto
)
1742 ipa_compute_jump_functions_for_edge (fbi
, cs
);
1746 /* If STMT looks like a statement loading a value from a member pointer formal
1747 parameter, return that parameter and store the offset of the field to
1748 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1749 might be clobbered). If USE_DELTA, then we look for a use of the delta
1750 field rather than the pfn. */
1753 ipa_get_stmt_member_ptr_load_param (gimple
*stmt
, bool use_delta
,
1754 HOST_WIDE_INT
*offset_p
)
1756 tree rhs
, rec
, ref_field
, ref_offset
, fld
, ptr_field
, delta_field
;
1758 if (!gimple_assign_single_p (stmt
))
1761 rhs
= gimple_assign_rhs1 (stmt
);
1762 if (TREE_CODE (rhs
) == COMPONENT_REF
)
1764 ref_field
= TREE_OPERAND (rhs
, 1);
1765 rhs
= TREE_OPERAND (rhs
, 0);
1768 ref_field
= NULL_TREE
;
1769 if (TREE_CODE (rhs
) != MEM_REF
)
1771 rec
= TREE_OPERAND (rhs
, 0);
1772 if (TREE_CODE (rec
) != ADDR_EXPR
)
1774 rec
= TREE_OPERAND (rec
, 0);
1775 if (TREE_CODE (rec
) != PARM_DECL
1776 || !type_like_member_ptr_p (TREE_TYPE (rec
), &ptr_field
, &delta_field
))
1778 ref_offset
= TREE_OPERAND (rhs
, 1);
1785 *offset_p
= int_bit_position (fld
);
1789 if (integer_nonzerop (ref_offset
))
1791 return ref_field
== fld
? rec
: NULL_TREE
;
1794 return tree_int_cst_equal (byte_position (fld
), ref_offset
) ? rec
1798 /* Returns true iff T is an SSA_NAME defined by a statement. */
1801 ipa_is_ssa_with_stmt_def (tree t
)
1803 if (TREE_CODE (t
) == SSA_NAME
1804 && !SSA_NAME_IS_DEFAULT_DEF (t
))
1810 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1811 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1812 indirect call graph edge. */
1814 static struct cgraph_edge
*
1815 ipa_note_param_call (struct cgraph_node
*node
, int param_index
,
1818 struct cgraph_edge
*cs
;
1820 cs
= node
->get_edge (stmt
);
1821 cs
->indirect_info
->param_index
= param_index
;
1822 cs
->indirect_info
->agg_contents
= 0;
1823 cs
->indirect_info
->member_ptr
= 0;
1827 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1828 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1829 intermediate information about each formal parameter. Currently it checks
1830 whether the call calls a pointer that is a formal parameter and if so, the
1831 parameter is marked with the called flag and an indirect call graph edge
1832 describing the call is created. This is very simple for ordinary pointers
1833 represented in SSA but not-so-nice when it comes to member pointers. The
1834 ugly part of this function does nothing more than trying to match the
1835 pattern of such a call. An example of such a pattern is the gimple dump
1836 below, the call is on the last line:
1839 f$__delta_5 = f.__delta;
1840 f$__pfn_24 = f.__pfn;
1844 f$__delta_5 = MEM[(struct *)&f];
1845 f$__pfn_24 = MEM[(struct *)&f + 4B];
1847 and a few lines below:
1850 D.2496_3 = (int) f$__pfn_24;
1851 D.2497_4 = D.2496_3 & 1;
1858 D.2500_7 = (unsigned int) f$__delta_5;
1859 D.2501_8 = &S + D.2500_7;
1860 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1861 D.2503_10 = *D.2502_9;
1862 D.2504_12 = f$__pfn_24 + -1;
1863 D.2505_13 = (unsigned int) D.2504_12;
1864 D.2506_14 = D.2503_10 + D.2505_13;
1865 D.2507_15 = *D.2506_14;
1866 iftmp.11_16 = (String:: *) D.2507_15;
1869 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1870 D.2500_19 = (unsigned int) f$__delta_5;
1871 D.2508_20 = &S + D.2500_19;
1872 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1874 Such patterns are results of simple calls to a member pointer:
1876 int doprinting (int (MyString::* f)(int) const)
1878 MyString S ("somestring");
1883 Moreover, the function also looks for called pointers loaded from aggregates
1884 passed by value or reference. */
1887 ipa_analyze_indirect_call_uses (struct ipa_func_body_info
*fbi
, gcall
*call
,
1890 struct ipa_node_params
*info
= fbi
->info
;
1891 HOST_WIDE_INT offset
;
1894 if (SSA_NAME_IS_DEFAULT_DEF (target
))
1896 tree var
= SSA_NAME_VAR (target
);
1897 int index
= ipa_get_param_decl_index (info
, var
);
1899 ipa_note_param_call (fbi
->node
, index
, call
);
1904 gimple
*def
= SSA_NAME_DEF_STMT (target
);
1905 if (gimple_assign_single_p (def
)
1906 && ipa_load_from_parm_agg (fbi
, info
->descriptors
, def
,
1907 gimple_assign_rhs1 (def
), &index
, &offset
,
1910 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
1911 cs
->indirect_info
->offset
= offset
;
1912 cs
->indirect_info
->agg_contents
= 1;
1913 cs
->indirect_info
->by_ref
= by_ref
;
1917 /* Now we need to try to match the complex pattern of calling a member
1919 if (gimple_code (def
) != GIMPLE_PHI
1920 || gimple_phi_num_args (def
) != 2
1921 || !POINTER_TYPE_P (TREE_TYPE (target
))
1922 || TREE_CODE (TREE_TYPE (TREE_TYPE (target
))) != METHOD_TYPE
)
1925 /* First, we need to check whether one of these is a load from a member
1926 pointer that is a parameter to this function. */
1927 tree n1
= PHI_ARG_DEF (def
, 0);
1928 tree n2
= PHI_ARG_DEF (def
, 1);
1929 if (!ipa_is_ssa_with_stmt_def (n1
) || !ipa_is_ssa_with_stmt_def (n2
))
1931 gimple
*d1
= SSA_NAME_DEF_STMT (n1
);
1932 gimple
*d2
= SSA_NAME_DEF_STMT (n2
);
1935 basic_block bb
, virt_bb
;
1936 basic_block join
= gimple_bb (def
);
1937 if ((rec
= ipa_get_stmt_member_ptr_load_param (d1
, false, &offset
)))
1939 if (ipa_get_stmt_member_ptr_load_param (d2
, false, NULL
))
1942 bb
= EDGE_PRED (join
, 0)->src
;
1943 virt_bb
= gimple_bb (d2
);
1945 else if ((rec
= ipa_get_stmt_member_ptr_load_param (d2
, false, &offset
)))
1947 bb
= EDGE_PRED (join
, 1)->src
;
1948 virt_bb
= gimple_bb (d1
);
1953 /* Second, we need to check that the basic blocks are laid out in the way
1954 corresponding to the pattern. */
1956 if (!single_pred_p (virt_bb
) || !single_succ_p (virt_bb
)
1957 || single_pred (virt_bb
) != bb
1958 || single_succ (virt_bb
) != join
)
1961 /* Third, let's see that the branching is done depending on the least
1962 significant bit of the pfn. */
1964 gimple
*branch
= last_stmt (bb
);
1965 if (!branch
|| gimple_code (branch
) != GIMPLE_COND
)
1968 if ((gimple_cond_code (branch
) != NE_EXPR
1969 && gimple_cond_code (branch
) != EQ_EXPR
)
1970 || !integer_zerop (gimple_cond_rhs (branch
)))
1973 tree cond
= gimple_cond_lhs (branch
);
1974 if (!ipa_is_ssa_with_stmt_def (cond
))
1977 def
= SSA_NAME_DEF_STMT (cond
);
1978 if (!is_gimple_assign (def
)
1979 || gimple_assign_rhs_code (def
) != BIT_AND_EXPR
1980 || !integer_onep (gimple_assign_rhs2 (def
)))
1983 cond
= gimple_assign_rhs1 (def
);
1984 if (!ipa_is_ssa_with_stmt_def (cond
))
1987 def
= SSA_NAME_DEF_STMT (cond
);
1989 if (is_gimple_assign (def
)
1990 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
1992 cond
= gimple_assign_rhs1 (def
);
1993 if (!ipa_is_ssa_with_stmt_def (cond
))
1995 def
= SSA_NAME_DEF_STMT (cond
);
1999 rec2
= ipa_get_stmt_member_ptr_load_param (def
,
2000 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2001 == ptrmemfunc_vbit_in_delta
),
2006 index
= ipa_get_param_decl_index (info
, rec
);
2008 && parm_preserved_before_stmt_p (fbi
, index
, call
, rec
))
2010 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2011 cs
->indirect_info
->offset
= offset
;
2012 cs
->indirect_info
->agg_contents
= 1;
2013 cs
->indirect_info
->member_ptr
= 1;
2019 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2020 object referenced in the expression is a formal parameter of the caller
2021 FBI->node (described by FBI->info), create a call note for the
2025 ipa_analyze_virtual_call_uses (struct ipa_func_body_info
*fbi
,
2026 gcall
*call
, tree target
)
2028 tree obj
= OBJ_TYPE_REF_OBJECT (target
);
2030 HOST_WIDE_INT anc_offset
;
2032 if (!flag_devirtualize
)
2035 if (TREE_CODE (obj
) != SSA_NAME
)
2038 struct ipa_node_params
*info
= fbi
->info
;
2039 if (SSA_NAME_IS_DEFAULT_DEF (obj
))
2041 struct ipa_jump_func jfunc
;
2042 if (TREE_CODE (SSA_NAME_VAR (obj
)) != PARM_DECL
)
2046 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (obj
));
2047 gcc_assert (index
>= 0);
2048 if (detect_type_change_ssa (obj
, obj_type_ref_class (target
),
2054 struct ipa_jump_func jfunc
;
2055 gimple
*stmt
= SSA_NAME_DEF_STMT (obj
);
2058 expr
= get_ancestor_addr_info (stmt
, &obj
, &anc_offset
);
2061 index
= ipa_get_param_decl_index (info
,
2062 SSA_NAME_VAR (TREE_OPERAND (expr
, 0)));
2063 gcc_assert (index
>= 0);
2064 if (detect_type_change (obj
, expr
, obj_type_ref_class (target
),
2065 call
, &jfunc
, anc_offset
))
2069 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2070 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
2071 ii
->offset
= anc_offset
;
2072 ii
->otr_token
= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target
));
2073 ii
->otr_type
= obj_type_ref_class (target
);
2074 ii
->polymorphic
= 1;
2077 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2078 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2079 containing intermediate information about each formal parameter. */
2082 ipa_analyze_call_uses (struct ipa_func_body_info
*fbi
, gcall
*call
)
2084 tree target
= gimple_call_fn (call
);
2087 || (TREE_CODE (target
) != SSA_NAME
2088 && !virtual_method_call_p (target
)))
2091 struct cgraph_edge
*cs
= fbi
->node
->get_edge (call
);
2092 /* If we previously turned the call into a direct call, there is
2093 no need to analyze. */
2094 if (cs
&& !cs
->indirect_unknown_callee
)
2097 if (cs
->indirect_info
->polymorphic
&& flag_devirtualize
)
2100 tree target
= gimple_call_fn (call
);
2101 ipa_polymorphic_call_context
context (current_function_decl
,
2102 target
, call
, &instance
);
2104 gcc_checking_assert (cs
->indirect_info
->otr_type
2105 == obj_type_ref_class (target
));
2106 gcc_checking_assert (cs
->indirect_info
->otr_token
2107 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target
)));
2109 cs
->indirect_info
->vptr_changed
2110 = !context
.get_dynamic_type (instance
,
2111 OBJ_TYPE_REF_OBJECT (target
),
2112 obj_type_ref_class (target
), call
);
2113 cs
->indirect_info
->context
= context
;
2116 if (TREE_CODE (target
) == SSA_NAME
)
2117 ipa_analyze_indirect_call_uses (fbi
, call
, target
);
2118 else if (virtual_method_call_p (target
))
2119 ipa_analyze_virtual_call_uses (fbi
, call
, target
);
2123 /* Analyze the call statement STMT with respect to formal parameters (described
2124 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2125 formal parameters are called. */
2128 ipa_analyze_stmt_uses (struct ipa_func_body_info
*fbi
, gimple
*stmt
)
2130 if (is_gimple_call (stmt
))
2131 ipa_analyze_call_uses (fbi
, as_a
<gcall
*> (stmt
));
2134 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2135 If OP is a parameter declaration, mark it as used in the info structure
2139 visit_ref_for_mod_analysis (gimple
*, tree op
, tree
, void *data
)
2141 struct ipa_node_params
*info
= (struct ipa_node_params
*) data
;
2143 op
= get_base_address (op
);
2145 && TREE_CODE (op
) == PARM_DECL
)
2147 int index
= ipa_get_param_decl_index (info
, op
);
2148 gcc_assert (index
>= 0);
2149 ipa_set_param_used (info
, index
, true);
2155 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2156 the findings in various structures of the associated ipa_node_params
2157 structure, such as parameter flags, notes etc. FBI holds various data about
2158 the function being analyzed. */
2161 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info
*fbi
, basic_block bb
)
2163 gimple_stmt_iterator gsi
;
2164 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2166 gimple
*stmt
= gsi_stmt (gsi
);
2168 if (is_gimple_debug (stmt
))
2171 ipa_analyze_stmt_uses (fbi
, stmt
);
2172 walk_stmt_load_store_addr_ops (stmt
, fbi
->info
,
2173 visit_ref_for_mod_analysis
,
2174 visit_ref_for_mod_analysis
,
2175 visit_ref_for_mod_analysis
);
2177 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2178 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), fbi
->info
,
2179 visit_ref_for_mod_analysis
,
2180 visit_ref_for_mod_analysis
,
2181 visit_ref_for_mod_analysis
);
2184 /* Calculate controlled uses of parameters of NODE. */
2187 ipa_analyze_controlled_uses (struct cgraph_node
*node
)
2189 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
2191 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2193 tree parm
= ipa_get_param (info
, i
);
2194 int controlled_uses
= 0;
2196 /* For SSA regs see if parameter is used. For non-SSA we compute
2197 the flag during modification analysis. */
2198 if (is_gimple_reg (parm
))
2200 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
),
2202 if (ddef
&& !has_zero_uses (ddef
))
2204 imm_use_iterator imm_iter
;
2205 use_operand_p use_p
;
2207 ipa_set_param_used (info
, i
, true);
2208 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, ddef
)
2209 if (!is_gimple_call (USE_STMT (use_p
)))
2211 if (!is_gimple_debug (USE_STMT (use_p
)))
2213 controlled_uses
= IPA_UNDESCRIBED_USE
;
2221 controlled_uses
= 0;
2224 controlled_uses
= IPA_UNDESCRIBED_USE
;
2225 ipa_set_controlled_uses (info
, i
, controlled_uses
);
2229 /* Free stuff in BI. */
2232 free_ipa_bb_info (struct ipa_bb_info
*bi
)
2234 bi
->cg_edges
.release ();
2235 bi
->param_aa_statuses
.release ();
2238 /* Dominator walker driving the analysis. */
2240 class analysis_dom_walker
: public dom_walker
2243 analysis_dom_walker (struct ipa_func_body_info
*fbi
)
2244 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
) {}
2246 virtual edge
before_dom_children (basic_block
);
2249 struct ipa_func_body_info
*m_fbi
;
2253 analysis_dom_walker::before_dom_children (basic_block bb
)
2255 ipa_analyze_params_uses_in_bb (m_fbi
, bb
);
2256 ipa_compute_jump_functions_for_bb (m_fbi
, bb
);
2260 /* Release body info FBI. */
2263 ipa_release_body_info (struct ipa_func_body_info
*fbi
)
2266 struct ipa_bb_info
*bi
;
2268 FOR_EACH_VEC_ELT (fbi
->bb_infos
, i
, bi
)
2269 free_ipa_bb_info (bi
);
2270 fbi
->bb_infos
.release ();
2273 /* Initialize the array describing properties of formal parameters
2274 of NODE, analyze their uses and compute jump functions associated
2275 with actual arguments of calls from within NODE. */
2278 ipa_analyze_node (struct cgraph_node
*node
)
2280 struct ipa_func_body_info fbi
;
2281 struct ipa_node_params
*info
;
2283 ipa_check_create_node_params ();
2284 ipa_check_create_edge_args ();
2285 info
= IPA_NODE_REF (node
);
2287 if (info
->analysis_done
)
2289 info
->analysis_done
= 1;
2291 if (ipa_func_spec_opts_forbid_analysis_p (node
))
2293 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2295 ipa_set_param_used (info
, i
, true);
2296 ipa_set_controlled_uses (info
, i
, IPA_UNDESCRIBED_USE
);
2301 struct function
*func
= DECL_STRUCT_FUNCTION (node
->decl
);
2303 calculate_dominance_info (CDI_DOMINATORS
);
2304 ipa_initialize_node_params (node
);
2305 ipa_analyze_controlled_uses (node
);
2308 fbi
.info
= IPA_NODE_REF (node
);
2309 fbi
.bb_infos
= vNULL
;
2310 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
));
2311 fbi
.param_count
= ipa_get_param_count (info
);
2314 for (struct cgraph_edge
*cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
2316 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
2317 bi
->cg_edges
.safe_push (cs
);
2320 for (struct cgraph_edge
*cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
2322 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
2323 bi
->cg_edges
.safe_push (cs
);
2326 analysis_dom_walker (&fbi
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
2328 ipa_release_body_info (&fbi
);
2329 free_dominance_info (CDI_DOMINATORS
);
2333 /* Update the jump functions associated with call graph edge E when the call
2334 graph edge CS is being inlined, assuming that E->caller is already (possibly
2335 indirectly) inlined into CS->callee and that E has not been inlined. */
2338 update_jump_functions_after_inlining (struct cgraph_edge
*cs
,
2339 struct cgraph_edge
*e
)
2341 struct ipa_edge_args
*top
= IPA_EDGE_REF (cs
);
2342 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
2343 int count
= ipa_get_cs_argument_count (args
);
2346 for (i
= 0; i
< count
; i
++)
2348 struct ipa_jump_func
*dst
= ipa_get_ith_jump_func (args
, i
);
2349 struct ipa_polymorphic_call_context
*dst_ctx
2350 = ipa_get_ith_polymorhic_call_context (args
, i
);
2352 if (dst
->type
== IPA_JF_ANCESTOR
)
2354 struct ipa_jump_func
*src
;
2355 int dst_fid
= dst
->value
.ancestor
.formal_id
;
2356 struct ipa_polymorphic_call_context
*src_ctx
2357 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
2359 /* Variable number of arguments can cause havoc if we try to access
2360 one that does not exist in the inlined edge. So make sure we
2362 if (dst_fid
>= ipa_get_cs_argument_count (top
))
2364 ipa_set_jf_unknown (dst
);
2368 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2370 if (src_ctx
&& !src_ctx
->useless_p ())
2372 struct ipa_polymorphic_call_context ctx
= *src_ctx
;
2374 /* TODO: Make type preserved safe WRT contexts. */
2375 if (!ipa_get_jf_ancestor_type_preserved (dst
))
2376 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
2377 ctx
.offset_by (dst
->value
.ancestor
.offset
);
2378 if (!ctx
.useless_p ())
2382 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
2384 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
2387 dst_ctx
->combine_with (ctx
);
2392 && (dst
->value
.ancestor
.agg_preserved
|| !src
->agg
.by_ref
))
2394 struct ipa_agg_jf_item
*item
;
2397 /* Currently we do not produce clobber aggregate jump functions,
2398 replace with merging when we do. */
2399 gcc_assert (!dst
->agg
.items
);
2401 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2402 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2403 FOR_EACH_VEC_SAFE_ELT (dst
->agg
.items
, j
, item
)
2404 item
->offset
-= dst
->value
.ancestor
.offset
;
2407 if (src
->type
== IPA_JF_PASS_THROUGH
2408 && src
->value
.pass_through
.operation
== NOP_EXPR
)
2410 dst
->value
.ancestor
.formal_id
= src
->value
.pass_through
.formal_id
;
2411 dst
->value
.ancestor
.agg_preserved
&=
2412 src
->value
.pass_through
.agg_preserved
;
2414 else if (src
->type
== IPA_JF_ANCESTOR
)
2416 dst
->value
.ancestor
.formal_id
= src
->value
.ancestor
.formal_id
;
2417 dst
->value
.ancestor
.offset
+= src
->value
.ancestor
.offset
;
2418 dst
->value
.ancestor
.agg_preserved
&=
2419 src
->value
.ancestor
.agg_preserved
;
2422 ipa_set_jf_unknown (dst
);
2424 else if (dst
->type
== IPA_JF_PASS_THROUGH
)
2426 struct ipa_jump_func
*src
;
2427 /* We must check range due to calls with variable number of arguments
2428 and we cannot combine jump functions with operations. */
2429 if (dst
->value
.pass_through
.operation
== NOP_EXPR
2430 && (dst
->value
.pass_through
.formal_id
2431 < ipa_get_cs_argument_count (top
)))
2433 int dst_fid
= dst
->value
.pass_through
.formal_id
;
2434 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2435 bool dst_agg_p
= ipa_get_jf_pass_through_agg_preserved (dst
);
2436 struct ipa_polymorphic_call_context
*src_ctx
2437 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
2439 if (src_ctx
&& !src_ctx
->useless_p ())
2441 struct ipa_polymorphic_call_context ctx
= *src_ctx
;
2443 /* TODO: Make type preserved safe WRT contexts. */
2444 if (!ipa_get_jf_pass_through_type_preserved (dst
))
2445 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
2446 if (!ctx
.useless_p ())
2450 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
2452 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
2454 dst_ctx
->combine_with (ctx
);
2459 case IPA_JF_UNKNOWN
:
2460 ipa_set_jf_unknown (dst
);
2463 ipa_set_jf_cst_copy (dst
, src
);
2466 case IPA_JF_PASS_THROUGH
:
2468 int formal_id
= ipa_get_jf_pass_through_formal_id (src
);
2469 enum tree_code operation
;
2470 operation
= ipa_get_jf_pass_through_operation (src
);
2472 if (operation
== NOP_EXPR
)
2476 && ipa_get_jf_pass_through_agg_preserved (src
);
2477 ipa_set_jf_simple_pass_through (dst
, formal_id
, agg_p
);
2481 tree operand
= ipa_get_jf_pass_through_operand (src
);
2482 ipa_set_jf_arith_pass_through (dst
, formal_id
, operand
,
2487 case IPA_JF_ANCESTOR
:
2491 && ipa_get_jf_ancestor_agg_preserved (src
);
2492 ipa_set_ancestor_jf (dst
,
2493 ipa_get_jf_ancestor_offset (src
),
2494 ipa_get_jf_ancestor_formal_id (src
),
2503 && (dst_agg_p
|| !src
->agg
.by_ref
))
2505 /* Currently we do not produce clobber aggregate jump
2506 functions, replace with merging when we do. */
2507 gcc_assert (!dst
->agg
.items
);
2509 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2510 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2514 ipa_set_jf_unknown (dst
);
2519 /* If TARGET is an addr_expr of a function declaration, make it the
2520 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2521 Otherwise, return NULL. */
2523 struct cgraph_edge
*
2524 ipa_make_edge_direct_to_target (struct cgraph_edge
*ie
, tree target
,
2527 struct cgraph_node
*callee
;
2528 struct inline_edge_summary
*es
= inline_edge_summary (ie
);
2529 bool unreachable
= false;
2531 if (TREE_CODE (target
) == ADDR_EXPR
)
2532 target
= TREE_OPERAND (target
, 0);
2533 if (TREE_CODE (target
) != FUNCTION_DECL
)
2535 target
= canonicalize_constructor_val (target
, NULL
);
2536 if (!target
|| TREE_CODE (target
) != FUNCTION_DECL
)
2538 /* Member pointer call that goes through a VMT lookup. */
2539 if (ie
->indirect_info
->member_ptr
2540 /* Or if target is not an invariant expression and we do not
2541 know if it will evaulate to function at runtime.
2542 This can happen when folding through &VAR, where &VAR
2543 is IP invariant, but VAR itself is not.
2545 TODO: Revisit this when GCC 5 is branched. It seems that
2546 member_ptr check is not needed and that we may try to fold
2547 the expression and see if VAR is readonly. */
2548 || !is_gimple_ip_invariant (target
))
2550 if (dump_enabled_p ())
2552 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2553 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2554 "discovered direct call non-invariant "
2556 ie
->caller
->name (), ie
->caller
->order
);
2562 if (dump_enabled_p ())
2564 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2565 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2566 "discovered direct call to non-function in %s/%i, "
2567 "making it __builtin_unreachable\n",
2568 ie
->caller
->name (), ie
->caller
->order
);
2571 target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2572 callee
= cgraph_node::get_create (target
);
2576 callee
= cgraph_node::get (target
);
2579 callee
= cgraph_node::get (target
);
2581 /* Because may-edges are not explicitely represented and vtable may be external,
2582 we may create the first reference to the object in the unit. */
2583 if (!callee
|| callee
->global
.inlined_to
)
2586 /* We are better to ensure we can refer to it.
2587 In the case of static functions we are out of luck, since we already
2588 removed its body. In the case of public functions we may or may
2589 not introduce the reference. */
2590 if (!canonicalize_constructor_val (target
, NULL
)
2591 || !TREE_PUBLIC (target
))
2594 fprintf (dump_file
, "ipa-prop: Discovered call to a known target "
2595 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2596 xstrdup_for_dump (ie
->caller
->name ()),
2598 xstrdup_for_dump (ie
->callee
->name ()),
2602 callee
= cgraph_node::get_create (target
);
2605 /* If the edge is already speculated. */
2606 if (speculative
&& ie
->speculative
)
2608 struct cgraph_edge
*e2
;
2609 struct ipa_ref
*ref
;
2610 ie
->speculative_call_info (e2
, ie
, ref
);
2611 if (e2
->callee
->ultimate_alias_target ()
2612 != callee
->ultimate_alias_target ())
2615 fprintf (dump_file
, "ipa-prop: Discovered call to a speculative target "
2616 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2617 xstrdup_for_dump (ie
->caller
->name ()),
2619 xstrdup_for_dump (callee
->name ()),
2621 xstrdup_for_dump (e2
->callee
->name ()),
2627 fprintf (dump_file
, "ipa-prop: Discovered call to a speculative target "
2628 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2629 xstrdup_for_dump (ie
->caller
->name ()),
2631 xstrdup_for_dump (callee
->name ()),
2637 if (!dbg_cnt (devirt
))
2640 ipa_check_create_node_params ();
2642 /* We can not make edges to inline clones. It is bug that someone removed
2643 the cgraph node too early. */
2644 gcc_assert (!callee
->global
.inlined_to
);
2646 if (dump_file
&& !unreachable
)
2648 fprintf (dump_file
, "ipa-prop: Discovered %s call to a %s target "
2649 "(%s/%i -> %s/%i), for stmt ",
2650 ie
->indirect_info
->polymorphic
? "a virtual" : "an indirect",
2651 speculative
? "speculative" : "known",
2652 xstrdup_for_dump (ie
->caller
->name ()),
2654 xstrdup_for_dump (callee
->name ()),
2657 print_gimple_stmt (dump_file
, ie
->call_stmt
, 2, TDF_SLIM
);
2659 fprintf (dump_file
, "with uid %i\n", ie
->lto_stmt_uid
);
2661 if (dump_enabled_p ())
2663 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2665 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2666 "converting indirect call in %s to direct call to %s\n",
2667 ie
->caller
->name (), callee
->name ());
2671 struct cgraph_edge
*orig
= ie
;
2672 ie
= ie
->make_direct (callee
);
2673 /* If we resolved speculative edge the cost is already up to date
2674 for direct call (adjusted by inline_edge_duplication_hook). */
2677 es
= inline_edge_summary (ie
);
2678 es
->call_stmt_size
-= (eni_size_weights
.indirect_call_cost
2679 - eni_size_weights
.call_cost
);
2680 es
->call_stmt_time
-= (eni_time_weights
.indirect_call_cost
2681 - eni_time_weights
.call_cost
);
2686 if (!callee
->can_be_discarded_p ())
2689 alias
= dyn_cast
<cgraph_node
*> (callee
->noninterposable_alias ());
2693 /* make_speculative will update ie's cost to direct call cost. */
2694 ie
= ie
->make_speculative
2695 (callee
, ie
->count
* 8 / 10, ie
->frequency
* 8 / 10);
2701 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2702 return NULL if there is not any. BY_REF specifies whether the value has to
2703 be passed by reference or by value. */
2706 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function
*agg
,
2707 HOST_WIDE_INT offset
, bool by_ref
)
2709 struct ipa_agg_jf_item
*item
;
2712 if (by_ref
!= agg
->by_ref
)
2715 FOR_EACH_VEC_SAFE_ELT (agg
->items
, i
, item
)
2716 if (item
->offset
== offset
)
2718 /* Currently we do not have clobber values, return NULL for them once
2720 gcc_checking_assert (is_gimple_ip_invariant (item
->value
));
2726 /* Remove a reference to SYMBOL from the list of references of a node given by
2727 reference description RDESC. Return true if the reference has been
2728 successfully found and removed. */
2731 remove_described_reference (symtab_node
*symbol
, struct ipa_cst_ref_desc
*rdesc
)
2733 struct ipa_ref
*to_del
;
2734 struct cgraph_edge
*origin
;
2739 to_del
= origin
->caller
->find_reference (symbol
, origin
->call_stmt
,
2740 origin
->lto_stmt_uid
);
2744 to_del
->remove_reference ();
2746 fprintf (dump_file
, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2747 xstrdup_for_dump (origin
->caller
->name ()),
2748 origin
->caller
->order
, xstrdup_for_dump (symbol
->name ()));
2752 /* If JFUNC has a reference description with refcount different from
2753 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2754 NULL. JFUNC must be a constant jump function. */
2756 static struct ipa_cst_ref_desc
*
2757 jfunc_rdesc_usable (struct ipa_jump_func
*jfunc
)
2759 struct ipa_cst_ref_desc
*rdesc
= ipa_get_jf_constant_rdesc (jfunc
);
2760 if (rdesc
&& rdesc
->refcount
!= IPA_UNDESCRIBED_USE
)
2766 /* If the value of constant jump function JFUNC is an address of a function
2767 declaration, return the associated call graph node. Otherwise return
2770 static cgraph_node
*
2771 cgraph_node_for_jfunc (struct ipa_jump_func
*jfunc
)
2773 gcc_checking_assert (jfunc
->type
== IPA_JF_CONST
);
2774 tree cst
= ipa_get_jf_constant (jfunc
);
2775 if (TREE_CODE (cst
) != ADDR_EXPR
2776 || TREE_CODE (TREE_OPERAND (cst
, 0)) != FUNCTION_DECL
)
2779 return cgraph_node::get (TREE_OPERAND (cst
, 0));
2783 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2784 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2785 the edge specified in the rdesc. Return false if either the symbol or the
2786 reference could not be found, otherwise return true. */
2789 try_decrement_rdesc_refcount (struct ipa_jump_func
*jfunc
)
2791 struct ipa_cst_ref_desc
*rdesc
;
2792 if (jfunc
->type
== IPA_JF_CONST
2793 && (rdesc
= jfunc_rdesc_usable (jfunc
))
2794 && --rdesc
->refcount
== 0)
2796 symtab_node
*symbol
= cgraph_node_for_jfunc (jfunc
);
2800 return remove_described_reference (symbol
, rdesc
);
2805 /* Try to find a destination for indirect edge IE that corresponds to a simple
2806 call or a call of a member function pointer and where the destination is a
2807 pointer formal parameter described by jump function JFUNC. If it can be
2808 determined, return the newly direct edge, otherwise return NULL.
2809 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2811 static struct cgraph_edge
*
2812 try_make_edge_direct_simple_call (struct cgraph_edge
*ie
,
2813 struct ipa_jump_func
*jfunc
,
2814 struct ipa_node_params
*new_root_info
)
2816 struct cgraph_edge
*cs
;
2818 bool agg_contents
= ie
->indirect_info
->agg_contents
;
2820 if (ie
->indirect_info
->agg_contents
)
2821 target
= ipa_find_agg_cst_for_param (&jfunc
->agg
,
2822 ie
->indirect_info
->offset
,
2823 ie
->indirect_info
->by_ref
);
2825 target
= ipa_value_from_jfunc (new_root_info
, jfunc
);
2828 cs
= ipa_make_edge_direct_to_target (ie
, target
);
2830 if (cs
&& !agg_contents
)
2833 gcc_checking_assert (cs
->callee
2835 || jfunc
->type
!= IPA_JF_CONST
2836 || !cgraph_node_for_jfunc (jfunc
)
2837 || cs
->callee
== cgraph_node_for_jfunc (jfunc
)));
2838 ok
= try_decrement_rdesc_refcount (jfunc
);
2839 gcc_checking_assert (ok
);
2845 /* Return the target to be used in cases of impossible devirtualization. IE
2846 and target (the latter can be NULL) are dumped when dumping is enabled. */
2849 ipa_impossible_devirt_target (struct cgraph_edge
*ie
, tree target
)
2855 "Type inconsistent devirtualization: %s/%i->%s\n",
2856 ie
->caller
->name (), ie
->caller
->order
,
2857 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target
)));
2860 "No devirtualization target in %s/%i\n",
2861 ie
->caller
->name (), ie
->caller
->order
);
2863 tree new_target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2864 cgraph_node::get_create (new_target
);
2868 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2869 call based on a formal parameter which is described by jump function JFUNC
2870 and if it can be determined, make it direct and return the direct edge.
2871 Otherwise, return NULL. CTX describes the polymorphic context that the
2872 parameter the call is based on brings along with it. */
2874 static struct cgraph_edge
*
2875 try_make_edge_direct_virtual_call (struct cgraph_edge
*ie
,
2876 struct ipa_jump_func
*jfunc
,
2877 struct ipa_polymorphic_call_context ctx
)
2880 bool speculative
= false;
2882 if (!opt_for_fn (ie
->caller
->decl
, flag_devirtualize
))
2885 gcc_assert (!ie
->indirect_info
->by_ref
);
2887 /* Try to do lookup via known virtual table pointer value. */
2888 if (!ie
->indirect_info
->vptr_changed
2889 || opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
))
2892 unsigned HOST_WIDE_INT offset
;
2893 tree t
= ipa_find_agg_cst_for_param (&jfunc
->agg
,
2894 ie
->indirect_info
->offset
,
2896 if (t
&& vtable_pointer_value_to_vtable (t
, &vtable
, &offset
))
2899 t
= gimple_get_virt_method_for_vtable (ie
->indirect_info
->otr_token
,
2900 vtable
, offset
, &can_refer
);
2904 || (TREE_CODE (TREE_TYPE (t
)) == FUNCTION_TYPE
2905 && DECL_FUNCTION_CODE (t
) == BUILT_IN_UNREACHABLE
)
2906 || !possible_polymorphic_call_target_p
2907 (ie
, cgraph_node::get (t
)))
2909 /* Do not speculate builtin_unreachable, it is stupid! */
2910 if (!ie
->indirect_info
->vptr_changed
)
2911 target
= ipa_impossible_devirt_target (ie
, target
);
2918 speculative
= ie
->indirect_info
->vptr_changed
;
2924 ipa_polymorphic_call_context
ie_context (ie
);
2925 vec
<cgraph_node
*>targets
;
2928 ctx
.offset_by (ie
->indirect_info
->offset
);
2929 if (ie
->indirect_info
->vptr_changed
)
2930 ctx
.possible_dynamic_type_change (ie
->in_polymorphic_cdtor
,
2931 ie
->indirect_info
->otr_type
);
2932 ctx
.combine_with (ie_context
, ie
->indirect_info
->otr_type
);
2933 targets
= possible_polymorphic_call_targets
2934 (ie
->indirect_info
->otr_type
,
2935 ie
->indirect_info
->otr_token
,
2937 if (final
&& targets
.length () <= 1)
2939 speculative
= false;
2940 if (targets
.length () == 1)
2941 target
= targets
[0]->decl
;
2943 target
= ipa_impossible_devirt_target (ie
, NULL_TREE
);
2945 else if (!target
&& opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
)
2946 && !ie
->speculative
&& ie
->maybe_hot_p ())
2949 n
= try_speculative_devirtualization (ie
->indirect_info
->otr_type
,
2950 ie
->indirect_info
->otr_token
,
2951 ie
->indirect_info
->context
);
2961 if (!possible_polymorphic_call_target_p
2962 (ie
, cgraph_node::get_create (target
)))
2966 target
= ipa_impossible_devirt_target (ie
, target
);
2968 return ipa_make_edge_direct_to_target (ie
, target
, speculative
);
2974 /* Update the param called notes associated with NODE when CS is being inlined,
2975 assuming NODE is (potentially indirectly) inlined into CS->callee.
2976 Moreover, if the callee is discovered to be constant, create a new cgraph
2977 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2978 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2981 update_indirect_edges_after_inlining (struct cgraph_edge
*cs
,
2982 struct cgraph_node
*node
,
2983 vec
<cgraph_edge
*> *new_edges
)
2985 struct ipa_edge_args
*top
;
2986 struct cgraph_edge
*ie
, *next_ie
, *new_direct_edge
;
2987 struct ipa_node_params
*new_root_info
;
2990 ipa_check_create_edge_args ();
2991 top
= IPA_EDGE_REF (cs
);
2992 new_root_info
= IPA_NODE_REF (cs
->caller
->global
.inlined_to
2993 ? cs
->caller
->global
.inlined_to
2996 for (ie
= node
->indirect_calls
; ie
; ie
= next_ie
)
2998 struct cgraph_indirect_call_info
*ici
= ie
->indirect_info
;
2999 struct ipa_jump_func
*jfunc
;
3001 cgraph_node
*spec_target
= NULL
;
3003 next_ie
= ie
->next_callee
;
3005 if (ici
->param_index
== -1)
3008 /* We must check range due to calls with variable number of arguments: */
3009 if (ici
->param_index
>= ipa_get_cs_argument_count (top
))
3011 ici
->param_index
= -1;
3015 param_index
= ici
->param_index
;
3016 jfunc
= ipa_get_ith_jump_func (top
, param_index
);
3018 if (ie
->speculative
)
3020 struct cgraph_edge
*de
;
3021 struct ipa_ref
*ref
;
3022 ie
->speculative_call_info (de
, ie
, ref
);
3023 spec_target
= de
->callee
;
3026 if (!opt_for_fn (node
->decl
, flag_indirect_inlining
))
3027 new_direct_edge
= NULL
;
3028 else if (ici
->polymorphic
)
3030 ipa_polymorphic_call_context ctx
;
3031 ctx
= ipa_context_from_jfunc (new_root_info
, cs
, param_index
, jfunc
);
3032 new_direct_edge
= try_make_edge_direct_virtual_call (ie
, jfunc
, ctx
);
3035 new_direct_edge
= try_make_edge_direct_simple_call (ie
, jfunc
,
3037 /* If speculation was removed, then we need to do nothing. */
3038 if (new_direct_edge
&& new_direct_edge
!= ie
3039 && new_direct_edge
->callee
== spec_target
)
3041 new_direct_edge
->indirect_inlining_edge
= 1;
3042 top
= IPA_EDGE_REF (cs
);
3044 if (!new_direct_edge
->speculative
)
3047 else if (new_direct_edge
)
3049 new_direct_edge
->indirect_inlining_edge
= 1;
3050 if (new_direct_edge
->call_stmt
)
3051 new_direct_edge
->call_stmt_cannot_inline_p
3052 = !gimple_check_call_matching_types (
3053 new_direct_edge
->call_stmt
,
3054 new_direct_edge
->callee
->decl
, false);
3057 new_edges
->safe_push (new_direct_edge
);
3060 top
= IPA_EDGE_REF (cs
);
3061 /* If speculative edge was introduced we still need to update
3062 call info of the indirect edge. */
3063 if (!new_direct_edge
->speculative
)
3066 if (jfunc
->type
== IPA_JF_PASS_THROUGH
3067 && ipa_get_jf_pass_through_operation (jfunc
) == NOP_EXPR
)
3069 if (ici
->agg_contents
3070 && !ipa_get_jf_pass_through_agg_preserved (jfunc
)
3071 && !ici
->polymorphic
)
3072 ici
->param_index
= -1;
3075 ici
->param_index
= ipa_get_jf_pass_through_formal_id (jfunc
);
3076 if (ici
->polymorphic
3077 && !ipa_get_jf_pass_through_type_preserved (jfunc
))
3078 ici
->vptr_changed
= true;
3081 else if (jfunc
->type
== IPA_JF_ANCESTOR
)
3083 if (ici
->agg_contents
3084 && !ipa_get_jf_ancestor_agg_preserved (jfunc
)
3085 && !ici
->polymorphic
)
3086 ici
->param_index
= -1;
3089 ici
->param_index
= ipa_get_jf_ancestor_formal_id (jfunc
);
3090 ici
->offset
+= ipa_get_jf_ancestor_offset (jfunc
);
3091 if (ici
->polymorphic
3092 && !ipa_get_jf_ancestor_type_preserved (jfunc
))
3093 ici
->vptr_changed
= true;
3097 /* Either we can find a destination for this edge now or never. */
3098 ici
->param_index
= -1;
3104 /* Recursively traverse subtree of NODE (including node) made of inlined
3105 cgraph_edges when CS has been inlined and invoke
3106 update_indirect_edges_after_inlining on all nodes and
3107 update_jump_functions_after_inlining on all non-inlined edges that lead out
3108 of this subtree. Newly discovered indirect edges will be added to
3109 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3113 propagate_info_to_inlined_callees (struct cgraph_edge
*cs
,
3114 struct cgraph_node
*node
,
3115 vec
<cgraph_edge
*> *new_edges
)
3117 struct cgraph_edge
*e
;
3120 res
= update_indirect_edges_after_inlining (cs
, node
, new_edges
);
3122 for (e
= node
->callees
; e
; e
= e
->next_callee
)
3123 if (!e
->inline_failed
)
3124 res
|= propagate_info_to_inlined_callees (cs
, e
->callee
, new_edges
);
3126 update_jump_functions_after_inlining (cs
, e
);
3127 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
3128 update_jump_functions_after_inlining (cs
, e
);
3133 /* Combine two controlled uses counts as done during inlining. */
3136 combine_controlled_uses_counters (int c
, int d
)
3138 if (c
== IPA_UNDESCRIBED_USE
|| d
== IPA_UNDESCRIBED_USE
)
3139 return IPA_UNDESCRIBED_USE
;
3144 /* Propagate number of controlled users from CS->caleee to the new root of the
3145 tree of inlined nodes. */
3148 propagate_controlled_uses (struct cgraph_edge
*cs
)
3150 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
3151 struct cgraph_node
*new_root
= cs
->caller
->global
.inlined_to
3152 ? cs
->caller
->global
.inlined_to
: cs
->caller
;
3153 struct ipa_node_params
*new_root_info
= IPA_NODE_REF (new_root
);
3154 struct ipa_node_params
*old_root_info
= IPA_NODE_REF (cs
->callee
);
3157 count
= MIN (ipa_get_cs_argument_count (args
),
3158 ipa_get_param_count (old_root_info
));
3159 for (i
= 0; i
< count
; i
++)
3161 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
3162 struct ipa_cst_ref_desc
*rdesc
;
3164 if (jf
->type
== IPA_JF_PASS_THROUGH
)
3167 src_idx
= ipa_get_jf_pass_through_formal_id (jf
);
3168 c
= ipa_get_controlled_uses (new_root_info
, src_idx
);
3169 d
= ipa_get_controlled_uses (old_root_info
, i
);
3171 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf
)
3172 == NOP_EXPR
|| c
== IPA_UNDESCRIBED_USE
);
3173 c
= combine_controlled_uses_counters (c
, d
);
3174 ipa_set_controlled_uses (new_root_info
, src_idx
, c
);
3175 if (c
== 0 && new_root_info
->ipcp_orig_node
)
3177 struct cgraph_node
*n
;
3178 struct ipa_ref
*ref
;
3179 tree t
= new_root_info
->known_csts
[src_idx
];
3181 if (t
&& TREE_CODE (t
) == ADDR_EXPR
3182 && TREE_CODE (TREE_OPERAND (t
, 0)) == FUNCTION_DECL
3183 && (n
= cgraph_node::get (TREE_OPERAND (t
, 0)))
3184 && (ref
= new_root
->find_reference (n
, NULL
, 0)))
3187 fprintf (dump_file
, "ipa-prop: Removing cloning-created "
3188 "reference from %s/%i to %s/%i.\n",
3189 xstrdup_for_dump (new_root
->name ()),
3191 xstrdup_for_dump (n
->name ()), n
->order
);
3192 ref
->remove_reference ();
3196 else if (jf
->type
== IPA_JF_CONST
3197 && (rdesc
= jfunc_rdesc_usable (jf
)))
3199 int d
= ipa_get_controlled_uses (old_root_info
, i
);
3200 int c
= rdesc
->refcount
;
3201 rdesc
->refcount
= combine_controlled_uses_counters (c
, d
);
3202 if (rdesc
->refcount
== 0)
3204 tree cst
= ipa_get_jf_constant (jf
);
3205 struct cgraph_node
*n
;
3206 gcc_checking_assert (TREE_CODE (cst
) == ADDR_EXPR
3207 && TREE_CODE (TREE_OPERAND (cst
, 0))
3209 n
= cgraph_node::get (TREE_OPERAND (cst
, 0));
3212 struct cgraph_node
*clone
;
3214 ok
= remove_described_reference (n
, rdesc
);
3215 gcc_checking_assert (ok
);
3218 while (clone
->global
.inlined_to
3219 && clone
!= rdesc
->cs
->caller
3220 && IPA_NODE_REF (clone
)->ipcp_orig_node
)
3222 struct ipa_ref
*ref
;
3223 ref
= clone
->find_reference (n
, NULL
, 0);
3227 fprintf (dump_file
, "ipa-prop: Removing "
3228 "cloning-created reference "
3229 "from %s/%i to %s/%i.\n",
3230 xstrdup_for_dump (clone
->name ()),
3232 xstrdup_for_dump (n
->name ()),
3234 ref
->remove_reference ();
3236 clone
= clone
->callers
->caller
;
3243 for (i
= ipa_get_param_count (old_root_info
);
3244 i
< ipa_get_cs_argument_count (args
);
3247 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
3249 if (jf
->type
== IPA_JF_CONST
)
3251 struct ipa_cst_ref_desc
*rdesc
= jfunc_rdesc_usable (jf
);
3253 rdesc
->refcount
= IPA_UNDESCRIBED_USE
;
3255 else if (jf
->type
== IPA_JF_PASS_THROUGH
)
3256 ipa_set_controlled_uses (new_root_info
,
3257 jf
->value
.pass_through
.formal_id
,
3258 IPA_UNDESCRIBED_USE
);
3262 /* Update jump functions and call note functions on inlining the call site CS.
3263 CS is expected to lead to a node already cloned by
3264 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3265 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3269 ipa_propagate_indirect_call_infos (struct cgraph_edge
*cs
,
3270 vec
<cgraph_edge
*> *new_edges
)
3273 /* Do nothing if the preparation phase has not been carried out yet
3274 (i.e. during early inlining). */
3275 if (!ipa_node_params_sum
)
3277 gcc_assert (ipa_edge_args_vector
);
3279 propagate_controlled_uses (cs
);
3280 changed
= propagate_info_to_inlined_callees (cs
, cs
->callee
, new_edges
);
3285 /* Frees all dynamically allocated structures that the argument info points
3289 ipa_free_edge_args_substructures (struct ipa_edge_args
*args
)
3291 vec_free (args
->jump_functions
);
3292 memset (args
, 0, sizeof (*args
));
3295 /* Free all ipa_edge structures. */
3298 ipa_free_all_edge_args (void)
3301 struct ipa_edge_args
*args
;
3303 if (!ipa_edge_args_vector
)
3306 FOR_EACH_VEC_ELT (*ipa_edge_args_vector
, i
, args
)
3307 ipa_free_edge_args_substructures (args
);
3309 vec_free (ipa_edge_args_vector
);
3312 /* Frees all dynamically allocated structures that the param info points
3315 ipa_node_params::~ipa_node_params ()
3317 descriptors
.release ();
3319 /* Lattice values and their sources are deallocated with their alocation
3321 known_csts
.release ();
3322 known_contexts
.release ();
3325 ipcp_orig_node
= NULL
;
3328 do_clone_for_all_contexts
= 0;
3329 is_all_contexts_clone
= 0;
3333 /* Free all ipa_node_params structures. */
3336 ipa_free_all_node_params (void)
3338 delete ipa_node_params_sum
;
3339 ipa_node_params_sum
= NULL
;
3342 /* Grow ipcp_transformations if necessary. */
3345 ipcp_grow_transformations_if_necessary (void)
3347 if (vec_safe_length (ipcp_transformations
)
3348 <= (unsigned) symtab
->cgraph_max_uid
)
3349 vec_safe_grow_cleared (ipcp_transformations
, symtab
->cgraph_max_uid
+ 1);
3352 /* Set the aggregate replacements of NODE to be AGGVALS. */
3355 ipa_set_node_agg_value_chain (struct cgraph_node
*node
,
3356 struct ipa_agg_replacement_value
*aggvals
)
3358 ipcp_grow_transformations_if_necessary ();
3359 (*ipcp_transformations
)[node
->uid
].agg_values
= aggvals
;
3362 /* Hook that is called by cgraph.c when an edge is removed. */
3365 ipa_edge_removal_hook (struct cgraph_edge
*cs
, void *data ATTRIBUTE_UNUSED
)
3367 struct ipa_edge_args
*args
;
3369 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3370 if (vec_safe_length (ipa_edge_args_vector
) <= (unsigned)cs
->uid
)
3373 args
= IPA_EDGE_REF (cs
);
3374 if (args
->jump_functions
)
3376 struct ipa_jump_func
*jf
;
3378 FOR_EACH_VEC_ELT (*args
->jump_functions
, i
, jf
)
3380 struct ipa_cst_ref_desc
*rdesc
;
3381 try_decrement_rdesc_refcount (jf
);
3382 if (jf
->type
== IPA_JF_CONST
3383 && (rdesc
= ipa_get_jf_constant_rdesc (jf
))
3389 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs
));
3392 /* Hook that is called by cgraph.c when an edge is duplicated. */
3395 ipa_edge_duplication_hook (struct cgraph_edge
*src
, struct cgraph_edge
*dst
,
3398 struct ipa_edge_args
*old_args
, *new_args
;
3401 ipa_check_create_edge_args ();
3403 old_args
= IPA_EDGE_REF (src
);
3404 new_args
= IPA_EDGE_REF (dst
);
3406 new_args
->jump_functions
= vec_safe_copy (old_args
->jump_functions
);
3407 if (old_args
->polymorphic_call_contexts
)
3408 new_args
->polymorphic_call_contexts
3409 = vec_safe_copy (old_args
->polymorphic_call_contexts
);
3411 for (i
= 0; i
< vec_safe_length (old_args
->jump_functions
); i
++)
3413 struct ipa_jump_func
*src_jf
= ipa_get_ith_jump_func (old_args
, i
);
3414 struct ipa_jump_func
*dst_jf
= ipa_get_ith_jump_func (new_args
, i
);
3416 dst_jf
->agg
.items
= vec_safe_copy (dst_jf
->agg
.items
);
3418 if (src_jf
->type
== IPA_JF_CONST
)
3420 struct ipa_cst_ref_desc
*src_rdesc
= jfunc_rdesc_usable (src_jf
);
3423 dst_jf
->value
.constant
.rdesc
= NULL
;
3424 else if (src
->caller
== dst
->caller
)
3426 struct ipa_ref
*ref
;
3427 symtab_node
*n
= cgraph_node_for_jfunc (src_jf
);
3428 gcc_checking_assert (n
);
3429 ref
= src
->caller
->find_reference (n
, src
->call_stmt
,
3431 gcc_checking_assert (ref
);
3432 dst
->caller
->clone_reference (ref
, ref
->stmt
);
3434 struct ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
3435 dst_rdesc
->cs
= dst
;
3436 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3437 dst_rdesc
->next_duplicate
= NULL
;
3438 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3440 else if (src_rdesc
->cs
== src
)
3442 struct ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
3443 dst_rdesc
->cs
= dst
;
3444 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3445 dst_rdesc
->next_duplicate
= src_rdesc
->next_duplicate
;
3446 src_rdesc
->next_duplicate
= dst_rdesc
;
3447 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3451 struct ipa_cst_ref_desc
*dst_rdesc
;
3452 /* This can happen during inlining, when a JFUNC can refer to a
3453 reference taken in a function up in the tree of inline clones.
3454 We need to find the duplicate that refers to our tree of
3457 gcc_assert (dst
->caller
->global
.inlined_to
);
3458 for (dst_rdesc
= src_rdesc
->next_duplicate
;
3460 dst_rdesc
= dst_rdesc
->next_duplicate
)
3462 struct cgraph_node
*top
;
3463 top
= dst_rdesc
->cs
->caller
->global
.inlined_to
3464 ? dst_rdesc
->cs
->caller
->global
.inlined_to
3465 : dst_rdesc
->cs
->caller
;
3466 if (dst
->caller
->global
.inlined_to
== top
)
3469 gcc_assert (dst_rdesc
);
3470 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3473 else if (dst_jf
->type
== IPA_JF_PASS_THROUGH
3474 && src
->caller
== dst
->caller
)
3476 struct cgraph_node
*inline_root
= dst
->caller
->global
.inlined_to
3477 ? dst
->caller
->global
.inlined_to
: dst
->caller
;
3478 struct ipa_node_params
*root_info
= IPA_NODE_REF (inline_root
);
3479 int idx
= ipa_get_jf_pass_through_formal_id (dst_jf
);
3481 int c
= ipa_get_controlled_uses (root_info
, idx
);
3482 if (c
!= IPA_UNDESCRIBED_USE
)
3485 ipa_set_controlled_uses (root_info
, idx
, c
);
3491 /* Analyze newly added function into callgraph. */
3494 ipa_add_new_function (cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
3496 if (node
->has_gimple_body_p ())
3497 ipa_analyze_node (node
);
3500 /* Hook that is called by summary when a node is duplicated. */
3503 ipa_node_params_t::duplicate(cgraph_node
*src
, cgraph_node
*dst
,
3504 ipa_node_params
*old_info
,
3505 ipa_node_params
*new_info
)
3507 ipa_agg_replacement_value
*old_av
, *new_av
;
3509 new_info
->descriptors
= old_info
->descriptors
.copy ();
3510 new_info
->lattices
= NULL
;
3511 new_info
->ipcp_orig_node
= old_info
->ipcp_orig_node
;
3513 new_info
->analysis_done
= old_info
->analysis_done
;
3514 new_info
->node_enqueued
= old_info
->node_enqueued
;
3515 new_info
->versionable
= old_info
->versionable
;
3517 old_av
= ipa_get_agg_replacements_for_node (src
);
3523 struct ipa_agg_replacement_value
*v
;
3525 v
= ggc_alloc
<ipa_agg_replacement_value
> ();
3526 memcpy (v
, old_av
, sizeof (*v
));
3529 old_av
= old_av
->next
;
3531 ipa_set_node_agg_value_chain (dst
, new_av
);
3534 ipcp_transformation_summary
*src_trans
= ipcp_get_transformation_summary (src
);
3536 if (src_trans
&& vec_safe_length (src_trans
->alignments
) > 0)
3538 ipcp_grow_transformations_if_necessary ();
3539 src_trans
= ipcp_get_transformation_summary (src
);
3540 const vec
<ipa_alignment
, va_gc
> *src_alignments
= src_trans
->alignments
;
3541 vec
<ipa_alignment
, va_gc
> *&dst_alignments
3542 = ipcp_get_transformation_summary (dst
)->alignments
;
3543 vec_safe_reserve_exact (dst_alignments
, src_alignments
->length ());
3544 for (unsigned i
= 0; i
< src_alignments
->length (); ++i
)
3545 dst_alignments
->quick_push ((*src_alignments
)[i
]);
3549 /* Register our cgraph hooks if they are not already there. */
3552 ipa_register_cgraph_hooks (void)
3554 ipa_check_create_node_params ();
3556 if (!edge_removal_hook_holder
)
3557 edge_removal_hook_holder
=
3558 symtab
->add_edge_removal_hook (&ipa_edge_removal_hook
, NULL
);
3559 if (!edge_duplication_hook_holder
)
3560 edge_duplication_hook_holder
=
3561 symtab
->add_edge_duplication_hook (&ipa_edge_duplication_hook
, NULL
);
3562 function_insertion_hook_holder
=
3563 symtab
->add_cgraph_insertion_hook (&ipa_add_new_function
, NULL
);
3566 /* Unregister our cgraph hooks if they are not already there. */
3569 ipa_unregister_cgraph_hooks (void)
3571 symtab
->remove_edge_removal_hook (edge_removal_hook_holder
);
3572 edge_removal_hook_holder
= NULL
;
3573 symtab
->remove_edge_duplication_hook (edge_duplication_hook_holder
);
3574 edge_duplication_hook_holder
= NULL
;
3575 symtab
->remove_cgraph_insertion_hook (function_insertion_hook_holder
);
3576 function_insertion_hook_holder
= NULL
;
3579 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3580 longer needed after ipa-cp. */
3583 ipa_free_all_structures_after_ipa_cp (void)
3585 if (!optimize
&& !in_lto_p
)
3587 ipa_free_all_edge_args ();
3588 ipa_free_all_node_params ();
3589 ipcp_sources_pool
.release ();
3590 ipcp_cst_values_pool
.release ();
3591 ipcp_poly_ctx_values_pool
.release ();
3592 ipcp_agg_lattice_pool
.release ();
3593 ipa_unregister_cgraph_hooks ();
3594 ipa_refdesc_pool
.release ();
3598 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3599 longer needed after indirect inlining. */
3602 ipa_free_all_structures_after_iinln (void)
3604 ipa_free_all_edge_args ();
3605 ipa_free_all_node_params ();
3606 ipa_unregister_cgraph_hooks ();
3607 ipcp_sources_pool
.release ();
3608 ipcp_cst_values_pool
.release ();
3609 ipcp_poly_ctx_values_pool
.release ();
3610 ipcp_agg_lattice_pool
.release ();
3611 ipa_refdesc_pool
.release ();
3614 /* Print ipa_tree_map data structures of all functions in the
3618 ipa_print_node_params (FILE *f
, struct cgraph_node
*node
)
3621 struct ipa_node_params
*info
;
3623 if (!node
->definition
)
3625 info
= IPA_NODE_REF (node
);
3626 fprintf (f
, " function %s/%i parameter descriptors:\n",
3627 node
->name (), node
->order
);
3628 count
= ipa_get_param_count (info
);
3629 for (i
= 0; i
< count
; i
++)
3634 ipa_dump_param (f
, info
, i
);
3635 if (ipa_is_param_used (info
, i
))
3636 fprintf (f
, " used");
3637 c
= ipa_get_controlled_uses (info
, i
);
3638 if (c
== IPA_UNDESCRIBED_USE
)
3639 fprintf (f
, " undescribed_use");
3641 fprintf (f
, " controlled_uses=%i", c
);
3646 /* Print ipa_tree_map data structures of all functions in the
3650 ipa_print_all_params (FILE * f
)
3652 struct cgraph_node
*node
;
3654 fprintf (f
, "\nFunction parameters:\n");
3655 FOR_EACH_FUNCTION (node
)
3656 ipa_print_node_params (f
, node
);
3659 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3662 ipa_get_vector_of_formal_parms (tree fndecl
)
3668 gcc_assert (!flag_wpa
);
3669 count
= count_formal_params (fndecl
);
3670 args
.create (count
);
3671 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
3672 args
.quick_push (parm
);
3677 /* Return a heap allocated vector containing types of formal parameters of
3678 function type FNTYPE. */
3681 ipa_get_vector_of_formal_parm_types (tree fntype
)
3687 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
3690 types
.create (count
);
3691 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
3692 types
.quick_push (TREE_VALUE (t
));
3697 /* Modify the function declaration FNDECL and its type according to the plan in
3698 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3699 to reflect the actual parameters being modified which are determined by the
3700 base_index field. */
3703 ipa_modify_formal_parameters (tree fndecl
, ipa_parm_adjustment_vec adjustments
)
3705 vec
<tree
> oparms
= ipa_get_vector_of_formal_parms (fndecl
);
3706 tree orig_type
= TREE_TYPE (fndecl
);
3707 tree old_arg_types
= TYPE_ARG_TYPES (orig_type
);
3709 /* The following test is an ugly hack, some functions simply don't have any
3710 arguments in their type. This is probably a bug but well... */
3711 bool care_for_types
= (old_arg_types
!= NULL_TREE
);
3712 bool last_parm_void
;
3716 last_parm_void
= (TREE_VALUE (tree_last (old_arg_types
))
3718 otypes
= ipa_get_vector_of_formal_parm_types (orig_type
);
3720 gcc_assert (oparms
.length () + 1 == otypes
.length ());
3722 gcc_assert (oparms
.length () == otypes
.length ());
3726 last_parm_void
= false;
3730 int len
= adjustments
.length ();
3731 tree
*link
= &DECL_ARGUMENTS (fndecl
);
3732 tree new_arg_types
= NULL
;
3733 for (int i
= 0; i
< len
; i
++)
3735 struct ipa_parm_adjustment
*adj
;
3738 adj
= &adjustments
[i
];
3740 if (adj
->op
== IPA_PARM_OP_NEW
)
3743 parm
= oparms
[adj
->base_index
];
3746 if (adj
->op
== IPA_PARM_OP_COPY
)
3749 new_arg_types
= tree_cons (NULL_TREE
, otypes
[adj
->base_index
],
3752 link
= &DECL_CHAIN (parm
);
3754 else if (adj
->op
!= IPA_PARM_OP_REMOVE
)
3760 ptype
= build_pointer_type (adj
->type
);
3764 if (is_gimple_reg_type (ptype
))
3766 unsigned malign
= GET_MODE_ALIGNMENT (TYPE_MODE (ptype
));
3767 if (TYPE_ALIGN (ptype
) < malign
)
3768 ptype
= build_aligned_type (ptype
, malign
);
3773 new_arg_types
= tree_cons (NULL_TREE
, ptype
, new_arg_types
);
3775 new_parm
= build_decl (UNKNOWN_LOCATION
, PARM_DECL
, NULL_TREE
,
3777 const char *prefix
= adj
->arg_prefix
? adj
->arg_prefix
: "SYNTH";
3778 DECL_NAME (new_parm
) = create_tmp_var_name (prefix
);
3779 DECL_ARTIFICIAL (new_parm
) = 1;
3780 DECL_ARG_TYPE (new_parm
) = ptype
;
3781 DECL_CONTEXT (new_parm
) = fndecl
;
3782 TREE_USED (new_parm
) = 1;
3783 DECL_IGNORED_P (new_parm
) = 1;
3784 layout_decl (new_parm
, 0);
3786 if (adj
->op
== IPA_PARM_OP_NEW
)
3790 adj
->new_decl
= new_parm
;
3793 link
= &DECL_CHAIN (new_parm
);
3799 tree new_reversed
= NULL
;
3802 new_reversed
= nreverse (new_arg_types
);
3806 TREE_CHAIN (new_arg_types
) = void_list_node
;
3808 new_reversed
= void_list_node
;
3812 /* Use copy_node to preserve as much as possible from original type
3813 (debug info, attribute lists etc.)
3814 Exception is METHOD_TYPEs must have THIS argument.
3815 When we are asked to remove it, we need to build new FUNCTION_TYPE
3817 tree new_type
= NULL
;
3818 if (TREE_CODE (orig_type
) != METHOD_TYPE
3819 || (adjustments
[0].op
== IPA_PARM_OP_COPY
3820 && adjustments
[0].base_index
== 0))
3822 new_type
= build_distinct_type_copy (orig_type
);
3823 TYPE_ARG_TYPES (new_type
) = new_reversed
;
3828 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type
),
3830 TYPE_CONTEXT (new_type
) = TYPE_CONTEXT (orig_type
);
3831 DECL_VINDEX (fndecl
) = NULL_TREE
;
3834 /* When signature changes, we need to clear builtin info. */
3835 if (DECL_BUILT_IN (fndecl
))
3837 DECL_BUILT_IN_CLASS (fndecl
) = NOT_BUILT_IN
;
3838 DECL_FUNCTION_CODE (fndecl
) = (enum built_in_function
) 0;
3841 TREE_TYPE (fndecl
) = new_type
;
3842 DECL_VIRTUAL_P (fndecl
) = 0;
3843 DECL_LANG_SPECIFIC (fndecl
) = NULL
;
3848 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3849 If this is a directly recursive call, CS must be NULL. Otherwise it must
3850 contain the corresponding call graph edge. */
3853 ipa_modify_call_arguments (struct cgraph_edge
*cs
, gcall
*stmt
,
3854 ipa_parm_adjustment_vec adjustments
)
3856 struct cgraph_node
*current_node
= cgraph_node::get (current_function_decl
);
3858 vec
<tree
, va_gc
> **debug_args
= NULL
;
3860 gimple_stmt_iterator gsi
, prev_gsi
;
3864 len
= adjustments
.length ();
3866 callee_decl
= !cs
? gimple_call_fndecl (stmt
) : cs
->callee
->decl
;
3867 current_node
->remove_stmt_references (stmt
);
3869 gsi
= gsi_for_stmt (stmt
);
3871 gsi_prev (&prev_gsi
);
3872 for (i
= 0; i
< len
; i
++)
3874 struct ipa_parm_adjustment
*adj
;
3876 adj
= &adjustments
[i
];
3878 if (adj
->op
== IPA_PARM_OP_COPY
)
3880 tree arg
= gimple_call_arg (stmt
, adj
->base_index
);
3882 vargs
.quick_push (arg
);
3884 else if (adj
->op
!= IPA_PARM_OP_REMOVE
)
3886 tree expr
, base
, off
;
3888 unsigned int deref_align
= 0;
3889 bool deref_base
= false;
3891 /* We create a new parameter out of the value of the old one, we can
3892 do the following kind of transformations:
3894 - A scalar passed by reference is converted to a scalar passed by
3895 value. (adj->by_ref is false and the type of the original
3896 actual argument is a pointer to a scalar).
3898 - A part of an aggregate is passed instead of the whole aggregate.
3899 The part can be passed either by value or by reference, this is
3900 determined by value of adj->by_ref. Moreover, the code below
3901 handles both situations when the original aggregate is passed by
3902 value (its type is not a pointer) and when it is passed by
3903 reference (it is a pointer to an aggregate).
3905 When the new argument is passed by reference (adj->by_ref is true)
3906 it must be a part of an aggregate and therefore we form it by
3907 simply taking the address of a reference inside the original
3910 gcc_checking_assert (adj
->offset
% BITS_PER_UNIT
== 0);
3911 base
= gimple_call_arg (stmt
, adj
->base_index
);
3912 loc
= DECL_P (base
) ? DECL_SOURCE_LOCATION (base
)
3913 : EXPR_LOCATION (base
);
3915 if (TREE_CODE (base
) != ADDR_EXPR
3916 && POINTER_TYPE_P (TREE_TYPE (base
)))
3917 off
= build_int_cst (adj
->alias_ptr_type
,
3918 adj
->offset
/ BITS_PER_UNIT
);
3921 HOST_WIDE_INT base_offset
;
3925 if (TREE_CODE (base
) == ADDR_EXPR
)
3927 base
= TREE_OPERAND (base
, 0);
3933 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
3934 /* Aggregate arguments can have non-invariant addresses. */
3937 base
= build_fold_addr_expr (prev_base
);
3938 off
= build_int_cst (adj
->alias_ptr_type
,
3939 adj
->offset
/ BITS_PER_UNIT
);
3941 else if (TREE_CODE (base
) == MEM_REF
)
3946 deref_align
= TYPE_ALIGN (TREE_TYPE (base
));
3948 off
= build_int_cst (adj
->alias_ptr_type
,
3950 + adj
->offset
/ BITS_PER_UNIT
);
3951 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1),
3953 base
= TREE_OPERAND (base
, 0);
3957 off
= build_int_cst (adj
->alias_ptr_type
,
3959 + adj
->offset
/ BITS_PER_UNIT
);
3960 base
= build_fold_addr_expr (base
);
3966 tree type
= adj
->type
;
3968 unsigned HOST_WIDE_INT misalign
;
3972 align
= deref_align
;
3977 get_pointer_alignment_1 (base
, &align
, &misalign
);
3978 if (TYPE_ALIGN (type
) > align
)
3979 align
= TYPE_ALIGN (type
);
3981 misalign
+= (offset_int::from (off
, SIGNED
).to_short_addr ()
3983 misalign
= misalign
& (align
- 1);
3985 align
= (misalign
& -misalign
);
3986 if (align
< TYPE_ALIGN (type
))
3987 type
= build_aligned_type (type
, align
);
3988 base
= force_gimple_operand_gsi (&gsi
, base
,
3989 true, NULL
, true, GSI_SAME_STMT
);
3990 expr
= fold_build2_loc (loc
, MEM_REF
, type
, base
, off
);
3991 REF_REVERSE_STORAGE_ORDER (expr
) = adj
->reverse
;
3992 /* If expr is not a valid gimple call argument emit
3993 a load into a temporary. */
3994 if (is_gimple_reg_type (TREE_TYPE (expr
)))
3996 gimple
*tem
= gimple_build_assign (NULL_TREE
, expr
);
3997 if (gimple_in_ssa_p (cfun
))
3999 gimple_set_vuse (tem
, gimple_vuse (stmt
));
4000 expr
= make_ssa_name (TREE_TYPE (expr
), tem
);
4003 expr
= create_tmp_reg (TREE_TYPE (expr
));
4004 gimple_assign_set_lhs (tem
, expr
);
4005 gsi_insert_before (&gsi
, tem
, GSI_SAME_STMT
);
4010 expr
= fold_build2_loc (loc
, MEM_REF
, adj
->type
, base
, off
);
4011 REF_REVERSE_STORAGE_ORDER (expr
) = adj
->reverse
;
4012 expr
= build_fold_addr_expr (expr
);
4013 expr
= force_gimple_operand_gsi (&gsi
, expr
,
4014 true, NULL
, true, GSI_SAME_STMT
);
4016 vargs
.quick_push (expr
);
4018 if (adj
->op
!= IPA_PARM_OP_COPY
&& MAY_HAVE_DEBUG_STMTS
)
4021 tree ddecl
= NULL_TREE
, origin
= DECL_ORIGIN (adj
->base
), arg
;
4024 arg
= gimple_call_arg (stmt
, adj
->base_index
);
4025 if (!useless_type_conversion_p (TREE_TYPE (origin
), TREE_TYPE (arg
)))
4027 if (!fold_convertible_p (TREE_TYPE (origin
), arg
))
4029 arg
= fold_convert_loc (gimple_location (stmt
),
4030 TREE_TYPE (origin
), arg
);
4032 if (debug_args
== NULL
)
4033 debug_args
= decl_debug_args_insert (callee_decl
);
4034 for (ix
= 0; vec_safe_iterate (*debug_args
, ix
, &ddecl
); ix
+= 2)
4035 if (ddecl
== origin
)
4037 ddecl
= (**debug_args
)[ix
+ 1];
4042 ddecl
= make_node (DEBUG_EXPR_DECL
);
4043 DECL_ARTIFICIAL (ddecl
) = 1;
4044 TREE_TYPE (ddecl
) = TREE_TYPE (origin
);
4045 DECL_MODE (ddecl
) = DECL_MODE (origin
);
4047 vec_safe_push (*debug_args
, origin
);
4048 vec_safe_push (*debug_args
, ddecl
);
4050 def_temp
= gimple_build_debug_bind (ddecl
, unshare_expr (arg
), stmt
);
4051 gsi_insert_before (&gsi
, def_temp
, GSI_SAME_STMT
);
4055 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4057 fprintf (dump_file
, "replacing stmt:");
4058 print_gimple_stmt (dump_file
, gsi_stmt (gsi
), 0, 0);
4061 new_stmt
= gimple_build_call_vec (callee_decl
, vargs
);
4063 if (gimple_call_lhs (stmt
))
4064 gimple_call_set_lhs (new_stmt
, gimple_call_lhs (stmt
));
4066 gimple_set_block (new_stmt
, gimple_block (stmt
));
4067 if (gimple_has_location (stmt
))
4068 gimple_set_location (new_stmt
, gimple_location (stmt
));
4069 gimple_call_set_chain (new_stmt
, gimple_call_chain (stmt
));
4070 gimple_call_copy_flags (new_stmt
, stmt
);
4071 if (gimple_in_ssa_p (cfun
))
4073 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
4074 if (gimple_vdef (stmt
))
4076 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
4077 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
4081 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4083 fprintf (dump_file
, "with stmt:");
4084 print_gimple_stmt (dump_file
, new_stmt
, 0, 0);
4085 fprintf (dump_file
, "\n");
4087 gsi_replace (&gsi
, new_stmt
, true);
4089 cs
->set_call_stmt (new_stmt
);
4092 current_node
->record_stmt_references (gsi_stmt (gsi
));
4095 while (gsi_stmt (gsi
) != gsi_stmt (prev_gsi
));
4098 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4099 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4100 specifies whether the function should care about type incompatibility the
4101 current and new expressions. If it is false, the function will leave
4102 incompatibility issues to the caller. Return true iff the expression
4106 ipa_modify_expr (tree
*expr
, bool convert
,
4107 ipa_parm_adjustment_vec adjustments
)
4109 struct ipa_parm_adjustment
*cand
4110 = ipa_get_adjustment_candidate (&expr
, &convert
, adjustments
, false);
4117 src
= build_simple_mem_ref (cand
->new_decl
);
4118 REF_REVERSE_STORAGE_ORDER (src
) = cand
->reverse
;
4121 src
= cand
->new_decl
;
4123 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4125 fprintf (dump_file
, "About to replace expr ");
4126 print_generic_expr (dump_file
, *expr
, 0);
4127 fprintf (dump_file
, " with ");
4128 print_generic_expr (dump_file
, src
, 0);
4129 fprintf (dump_file
, "\n");
4132 if (convert
&& !useless_type_conversion_p (TREE_TYPE (*expr
), cand
->type
))
4134 tree vce
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (*expr
), src
);
4142 /* If T is an SSA_NAME, return NULL if it is not a default def or
4143 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4144 the base variable is always returned, regardless if it is a default
4145 def. Return T if it is not an SSA_NAME. */
4148 get_ssa_base_param (tree t
, bool ignore_default_def
)
4150 if (TREE_CODE (t
) == SSA_NAME
)
4152 if (ignore_default_def
|| SSA_NAME_IS_DEFAULT_DEF (t
))
4153 return SSA_NAME_VAR (t
);
4160 /* Given an expression, return an adjustment entry specifying the
4161 transformation to be done on EXPR. If no suitable adjustment entry
4162 was found, returns NULL.
4164 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4165 default def, otherwise bail on them.
4167 If CONVERT is non-NULL, this function will set *CONVERT if the
4168 expression provided is a component reference. ADJUSTMENTS is the
4169 adjustments vector. */
4171 ipa_parm_adjustment
*
4172 ipa_get_adjustment_candidate (tree
**expr
, bool *convert
,
4173 ipa_parm_adjustment_vec adjustments
,
4174 bool ignore_default_def
)
4176 if (TREE_CODE (**expr
) == BIT_FIELD_REF
4177 || TREE_CODE (**expr
) == IMAGPART_EXPR
4178 || TREE_CODE (**expr
) == REALPART_EXPR
)
4180 *expr
= &TREE_OPERAND (**expr
, 0);
4185 HOST_WIDE_INT offset
, size
, max_size
;
4188 = get_ref_base_and_extent (**expr
, &offset
, &size
, &max_size
, &reverse
);
4189 if (!base
|| size
== -1 || max_size
== -1)
4192 if (TREE_CODE (base
) == MEM_REF
)
4194 offset
+= mem_ref_offset (base
).to_short_addr () * BITS_PER_UNIT
;
4195 base
= TREE_OPERAND (base
, 0);
4198 base
= get_ssa_base_param (base
, ignore_default_def
);
4199 if (!base
|| TREE_CODE (base
) != PARM_DECL
)
4202 struct ipa_parm_adjustment
*cand
= NULL
;
4203 unsigned int len
= adjustments
.length ();
4204 for (unsigned i
= 0; i
< len
; i
++)
4206 struct ipa_parm_adjustment
*adj
= &adjustments
[i
];
4208 if (adj
->base
== base
4209 && (adj
->offset
== offset
|| adj
->op
== IPA_PARM_OP_REMOVE
))
4216 if (!cand
|| cand
->op
== IPA_PARM_OP_COPY
|| cand
->op
== IPA_PARM_OP_REMOVE
)
4221 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4224 index_in_adjustments_multiple_times_p (int base_index
,
4225 ipa_parm_adjustment_vec adjustments
)
4227 int i
, len
= adjustments
.length ();
4230 for (i
= 0; i
< len
; i
++)
4232 struct ipa_parm_adjustment
*adj
;
4233 adj
= &adjustments
[i
];
4235 if (adj
->base_index
== base_index
)
4247 /* Return adjustments that should have the same effect on function parameters
4248 and call arguments as if they were first changed according to adjustments in
4249 INNER and then by adjustments in OUTER. */
4251 ipa_parm_adjustment_vec
4252 ipa_combine_adjustments (ipa_parm_adjustment_vec inner
,
4253 ipa_parm_adjustment_vec outer
)
4255 int i
, outlen
= outer
.length ();
4256 int inlen
= inner
.length ();
4258 ipa_parm_adjustment_vec adjustments
, tmp
;
4261 for (i
= 0; i
< inlen
; i
++)
4263 struct ipa_parm_adjustment
*n
;
4266 if (n
->op
== IPA_PARM_OP_REMOVE
)
4270 /* FIXME: Handling of new arguments are not implemented yet. */
4271 gcc_assert (n
->op
!= IPA_PARM_OP_NEW
);
4272 tmp
.quick_push (*n
);
4276 adjustments
.create (outlen
+ removals
);
4277 for (i
= 0; i
< outlen
; i
++)
4279 struct ipa_parm_adjustment r
;
4280 struct ipa_parm_adjustment
*out
= &outer
[i
];
4281 struct ipa_parm_adjustment
*in
= &tmp
[out
->base_index
];
4283 memset (&r
, 0, sizeof (r
));
4284 gcc_assert (in
->op
!= IPA_PARM_OP_REMOVE
);
4285 if (out
->op
== IPA_PARM_OP_REMOVE
)
4287 if (!index_in_adjustments_multiple_times_p (in
->base_index
, tmp
))
4289 r
.op
= IPA_PARM_OP_REMOVE
;
4290 adjustments
.quick_push (r
);
4296 /* FIXME: Handling of new arguments are not implemented yet. */
4297 gcc_assert (out
->op
!= IPA_PARM_OP_NEW
);
4300 r
.base_index
= in
->base_index
;
4303 /* FIXME: Create nonlocal value too. */
4305 if (in
->op
== IPA_PARM_OP_COPY
&& out
->op
== IPA_PARM_OP_COPY
)
4306 r
.op
= IPA_PARM_OP_COPY
;
4307 else if (in
->op
== IPA_PARM_OP_COPY
)
4308 r
.offset
= out
->offset
;
4309 else if (out
->op
== IPA_PARM_OP_COPY
)
4310 r
.offset
= in
->offset
;
4312 r
.offset
= in
->offset
+ out
->offset
;
4313 adjustments
.quick_push (r
);
4316 for (i
= 0; i
< inlen
; i
++)
4318 struct ipa_parm_adjustment
*n
= &inner
[i
];
4320 if (n
->op
== IPA_PARM_OP_REMOVE
)
4321 adjustments
.quick_push (*n
);
4328 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4329 friendly way, assuming they are meant to be applied to FNDECL. */
4332 ipa_dump_param_adjustments (FILE *file
, ipa_parm_adjustment_vec adjustments
,
4335 int i
, len
= adjustments
.length ();
4337 vec
<tree
> parms
= ipa_get_vector_of_formal_parms (fndecl
);
4339 fprintf (file
, "IPA param adjustments: ");
4340 for (i
= 0; i
< len
; i
++)
4342 struct ipa_parm_adjustment
*adj
;
4343 adj
= &adjustments
[i
];
4346 fprintf (file
, " ");
4350 fprintf (file
, "%i. base_index: %i - ", i
, adj
->base_index
);
4351 print_generic_expr (file
, parms
[adj
->base_index
], 0);
4354 fprintf (file
, ", base: ");
4355 print_generic_expr (file
, adj
->base
, 0);
4359 fprintf (file
, ", new_decl: ");
4360 print_generic_expr (file
, adj
->new_decl
, 0);
4362 if (adj
->new_ssa_base
)
4364 fprintf (file
, ", new_ssa_base: ");
4365 print_generic_expr (file
, adj
->new_ssa_base
, 0);
4368 if (adj
->op
== IPA_PARM_OP_COPY
)
4369 fprintf (file
, ", copy_param");
4370 else if (adj
->op
== IPA_PARM_OP_REMOVE
)
4371 fprintf (file
, ", remove_param");
4373 fprintf (file
, ", offset %li", (long) adj
->offset
);
4375 fprintf (file
, ", by_ref");
4376 print_node_brief (file
, ", type: ", adj
->type
, 0);
4377 fprintf (file
, "\n");
4382 /* Dump the AV linked list. */
4385 ipa_dump_agg_replacement_values (FILE *f
, struct ipa_agg_replacement_value
*av
)
4388 fprintf (f
, " Aggregate replacements:");
4389 for (; av
; av
= av
->next
)
4391 fprintf (f
, "%s %i[" HOST_WIDE_INT_PRINT_DEC
"]=", comma
? "," : "",
4392 av
->index
, av
->offset
);
4393 print_generic_expr (f
, av
->value
, 0);
4399 /* Stream out jump function JUMP_FUNC to OB. */
4402 ipa_write_jump_function (struct output_block
*ob
,
4403 struct ipa_jump_func
*jump_func
)
4405 struct ipa_agg_jf_item
*item
;
4406 struct bitpack_d bp
;
4409 streamer_write_uhwi (ob
, jump_func
->type
);
4410 switch (jump_func
->type
)
4412 case IPA_JF_UNKNOWN
:
4416 EXPR_LOCATION (jump_func
->value
.constant
.value
) == UNKNOWN_LOCATION
);
4417 stream_write_tree (ob
, jump_func
->value
.constant
.value
, true);
4419 case IPA_JF_PASS_THROUGH
:
4420 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.operation
);
4421 if (jump_func
->value
.pass_through
.operation
== NOP_EXPR
)
4423 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4424 bp
= bitpack_create (ob
->main_stream
);
4425 bp_pack_value (&bp
, jump_func
->value
.pass_through
.agg_preserved
, 1);
4426 streamer_write_bitpack (&bp
);
4430 stream_write_tree (ob
, jump_func
->value
.pass_through
.operand
, true);
4431 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4434 case IPA_JF_ANCESTOR
:
4435 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.offset
);
4436 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.formal_id
);
4437 bp
= bitpack_create (ob
->main_stream
);
4438 bp_pack_value (&bp
, jump_func
->value
.ancestor
.agg_preserved
, 1);
4439 streamer_write_bitpack (&bp
);
4443 count
= vec_safe_length (jump_func
->agg
.items
);
4444 streamer_write_uhwi (ob
, count
);
4447 bp
= bitpack_create (ob
->main_stream
);
4448 bp_pack_value (&bp
, jump_func
->agg
.by_ref
, 1);
4449 streamer_write_bitpack (&bp
);
4452 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, i
, item
)
4454 streamer_write_uhwi (ob
, item
->offset
);
4455 stream_write_tree (ob
, item
->value
, true);
4458 bp
= bitpack_create (ob
->main_stream
);
4459 bp_pack_value (&bp
, jump_func
->alignment
.known
, 1);
4460 streamer_write_bitpack (&bp
);
4461 if (jump_func
->alignment
.known
)
4463 streamer_write_uhwi (ob
, jump_func
->alignment
.align
);
4464 streamer_write_uhwi (ob
, jump_func
->alignment
.misalign
);
4468 /* Read in jump function JUMP_FUNC from IB. */
4471 ipa_read_jump_function (struct lto_input_block
*ib
,
4472 struct ipa_jump_func
*jump_func
,
4473 struct cgraph_edge
*cs
,
4474 struct data_in
*data_in
)
4476 enum jump_func_type jftype
;
4477 enum tree_code operation
;
4480 jftype
= (enum jump_func_type
) streamer_read_uhwi (ib
);
4483 case IPA_JF_UNKNOWN
:
4484 ipa_set_jf_unknown (jump_func
);
4487 ipa_set_jf_constant (jump_func
, stream_read_tree (ib
, data_in
), cs
);
4489 case IPA_JF_PASS_THROUGH
:
4490 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
4491 if (operation
== NOP_EXPR
)
4493 int formal_id
= streamer_read_uhwi (ib
);
4494 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4495 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4496 ipa_set_jf_simple_pass_through (jump_func
, formal_id
, agg_preserved
);
4500 tree operand
= stream_read_tree (ib
, data_in
);
4501 int formal_id
= streamer_read_uhwi (ib
);
4502 ipa_set_jf_arith_pass_through (jump_func
, formal_id
, operand
,
4506 case IPA_JF_ANCESTOR
:
4508 HOST_WIDE_INT offset
= streamer_read_uhwi (ib
);
4509 int formal_id
= streamer_read_uhwi (ib
);
4510 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4511 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4512 ipa_set_ancestor_jf (jump_func
, offset
, formal_id
, agg_preserved
);
4517 count
= streamer_read_uhwi (ib
);
4518 vec_alloc (jump_func
->agg
.items
, count
);
4521 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4522 jump_func
->agg
.by_ref
= bp_unpack_value (&bp
, 1);
4524 for (i
= 0; i
< count
; i
++)
4526 struct ipa_agg_jf_item item
;
4527 item
.offset
= streamer_read_uhwi (ib
);
4528 item
.value
= stream_read_tree (ib
, data_in
);
4529 jump_func
->agg
.items
->quick_push (item
);
4532 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4533 bool alignment_known
= bp_unpack_value (&bp
, 1);
4534 if (alignment_known
)
4536 jump_func
->alignment
.known
= true;
4537 jump_func
->alignment
.align
= streamer_read_uhwi (ib
);
4538 jump_func
->alignment
.misalign
= streamer_read_uhwi (ib
);
4541 jump_func
->alignment
.known
= false;
4544 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4545 relevant to indirect inlining to OB. */
4548 ipa_write_indirect_edge_info (struct output_block
*ob
,
4549 struct cgraph_edge
*cs
)
4551 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4552 struct bitpack_d bp
;
4554 streamer_write_hwi (ob
, ii
->param_index
);
4555 bp
= bitpack_create (ob
->main_stream
);
4556 bp_pack_value (&bp
, ii
->polymorphic
, 1);
4557 bp_pack_value (&bp
, ii
->agg_contents
, 1);
4558 bp_pack_value (&bp
, ii
->member_ptr
, 1);
4559 bp_pack_value (&bp
, ii
->by_ref
, 1);
4560 bp_pack_value (&bp
, ii
->vptr_changed
, 1);
4561 streamer_write_bitpack (&bp
);
4562 if (ii
->agg_contents
|| ii
->polymorphic
)
4563 streamer_write_hwi (ob
, ii
->offset
);
4565 gcc_assert (ii
->offset
== 0);
4567 if (ii
->polymorphic
)
4569 streamer_write_hwi (ob
, ii
->otr_token
);
4570 stream_write_tree (ob
, ii
->otr_type
, true);
4571 ii
->context
.stream_out (ob
);
4575 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4576 relevant to indirect inlining from IB. */
4579 ipa_read_indirect_edge_info (struct lto_input_block
*ib
,
4580 struct data_in
*data_in
,
4581 struct cgraph_edge
*cs
)
4583 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4584 struct bitpack_d bp
;
4586 ii
->param_index
= (int) streamer_read_hwi (ib
);
4587 bp
= streamer_read_bitpack (ib
);
4588 ii
->polymorphic
= bp_unpack_value (&bp
, 1);
4589 ii
->agg_contents
= bp_unpack_value (&bp
, 1);
4590 ii
->member_ptr
= bp_unpack_value (&bp
, 1);
4591 ii
->by_ref
= bp_unpack_value (&bp
, 1);
4592 ii
->vptr_changed
= bp_unpack_value (&bp
, 1);
4593 if (ii
->agg_contents
|| ii
->polymorphic
)
4594 ii
->offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4597 if (ii
->polymorphic
)
4599 ii
->otr_token
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4600 ii
->otr_type
= stream_read_tree (ib
, data_in
);
4601 ii
->context
.stream_in (ib
, data_in
);
4605 /* Stream out NODE info to OB. */
4608 ipa_write_node_info (struct output_block
*ob
, struct cgraph_node
*node
)
4611 lto_symtab_encoder_t encoder
;
4612 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
4614 struct cgraph_edge
*e
;
4615 struct bitpack_d bp
;
4617 encoder
= ob
->decl_state
->symtab_node_encoder
;
4618 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4619 streamer_write_uhwi (ob
, node_ref
);
4621 streamer_write_uhwi (ob
, ipa_get_param_count (info
));
4622 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4623 streamer_write_uhwi (ob
, ipa_get_param_move_cost (info
, j
));
4624 bp
= bitpack_create (ob
->main_stream
);
4625 gcc_assert (info
->analysis_done
4626 || ipa_get_param_count (info
) == 0);
4627 gcc_assert (!info
->node_enqueued
);
4628 gcc_assert (!info
->ipcp_orig_node
);
4629 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4630 bp_pack_value (&bp
, ipa_is_param_used (info
, j
), 1);
4631 streamer_write_bitpack (&bp
);
4632 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4633 streamer_write_hwi (ob
, ipa_get_controlled_uses (info
, j
));
4634 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4636 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4638 streamer_write_uhwi (ob
,
4639 ipa_get_cs_argument_count (args
) * 2
4640 + (args
->polymorphic_call_contexts
!= NULL
));
4641 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4643 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4644 if (args
->polymorphic_call_contexts
!= NULL
)
4645 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
4648 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4650 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4652 streamer_write_uhwi (ob
,
4653 ipa_get_cs_argument_count (args
) * 2
4654 + (args
->polymorphic_call_contexts
!= NULL
));
4655 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4657 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4658 if (args
->polymorphic_call_contexts
!= NULL
)
4659 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
4661 ipa_write_indirect_edge_info (ob
, e
);
4665 /* Stream in NODE info from IB. */
4668 ipa_read_node_info (struct lto_input_block
*ib
, struct cgraph_node
*node
,
4669 struct data_in
*data_in
)
4671 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
4673 struct cgraph_edge
*e
;
4674 struct bitpack_d bp
;
4676 ipa_alloc_node_params (node
, streamer_read_uhwi (ib
));
4678 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4679 info
->descriptors
[k
].move_cost
= streamer_read_uhwi (ib
);
4681 bp
= streamer_read_bitpack (ib
);
4682 if (ipa_get_param_count (info
) != 0)
4683 info
->analysis_done
= true;
4684 info
->node_enqueued
= false;
4685 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4686 ipa_set_param_used (info
, k
, bp_unpack_value (&bp
, 1));
4687 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4688 ipa_set_controlled_uses (info
, k
, streamer_read_hwi (ib
));
4689 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4691 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4692 int count
= streamer_read_uhwi (ib
);
4693 bool contexts_computed
= count
& 1;
4698 vec_safe_grow_cleared (args
->jump_functions
, count
);
4699 if (contexts_computed
)
4700 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
);
4702 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
4704 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4706 if (contexts_computed
)
4707 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in (ib
, data_in
);
4710 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4712 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4713 int count
= streamer_read_uhwi (ib
);
4714 bool contexts_computed
= count
& 1;
4719 vec_safe_grow_cleared (args
->jump_functions
, count
);
4720 if (contexts_computed
)
4721 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
);
4722 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
4724 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4726 if (contexts_computed
)
4727 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in (ib
, data_in
);
4730 ipa_read_indirect_edge_info (ib
, data_in
, e
);
4734 /* Write jump functions for nodes in SET. */
4737 ipa_prop_write_jump_functions (void)
4739 struct cgraph_node
*node
;
4740 struct output_block
*ob
;
4741 unsigned int count
= 0;
4742 lto_symtab_encoder_iterator lsei
;
4743 lto_symtab_encoder_t encoder
;
4745 if (!ipa_node_params_sum
)
4748 ob
= create_output_block (LTO_section_jump_functions
);
4749 encoder
= ob
->decl_state
->symtab_node_encoder
;
4751 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4752 lsei_next_function_in_partition (&lsei
))
4754 node
= lsei_cgraph_node (lsei
);
4755 if (node
->has_gimple_body_p ()
4756 && IPA_NODE_REF (node
) != NULL
)
4760 streamer_write_uhwi (ob
, count
);
4762 /* Process all of the functions. */
4763 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4764 lsei_next_function_in_partition (&lsei
))
4766 node
= lsei_cgraph_node (lsei
);
4767 if (node
->has_gimple_body_p ()
4768 && IPA_NODE_REF (node
) != NULL
)
4769 ipa_write_node_info (ob
, node
);
4771 streamer_write_char_stream (ob
->main_stream
, 0);
4772 produce_asm (ob
, NULL
);
4773 destroy_output_block (ob
);
4776 /* Read section in file FILE_DATA of length LEN with data DATA. */
4779 ipa_prop_read_section (struct lto_file_decl_data
*file_data
, const char *data
,
4782 const struct lto_function_header
*header
=
4783 (const struct lto_function_header
*) data
;
4784 const int cfg_offset
= sizeof (struct lto_function_header
);
4785 const int main_offset
= cfg_offset
+ header
->cfg_size
;
4786 const int string_offset
= main_offset
+ header
->main_size
;
4787 struct data_in
*data_in
;
4791 lto_input_block
ib_main ((const char *) data
+ main_offset
,
4792 header
->main_size
, file_data
->mode_table
);
4795 lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
4796 header
->string_size
, vNULL
);
4797 count
= streamer_read_uhwi (&ib_main
);
4799 for (i
= 0; i
< count
; i
++)
4802 struct cgraph_node
*node
;
4803 lto_symtab_encoder_t encoder
;
4805 index
= streamer_read_uhwi (&ib_main
);
4806 encoder
= file_data
->symtab_node_encoder
;
4807 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
4809 gcc_assert (node
->definition
);
4810 ipa_read_node_info (&ib_main
, node
, data_in
);
4812 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
4814 lto_data_in_delete (data_in
);
4817 /* Read ipcp jump functions. */
4820 ipa_prop_read_jump_functions (void)
4822 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
4823 struct lto_file_decl_data
*file_data
;
4826 ipa_check_create_node_params ();
4827 ipa_check_create_edge_args ();
4828 ipa_register_cgraph_hooks ();
4830 while ((file_data
= file_data_vec
[j
++]))
4833 const char *data
= lto_get_section_data (file_data
, LTO_section_jump_functions
, NULL
, &len
);
4836 ipa_prop_read_section (file_data
, data
, len
);
4840 /* After merging units, we can get mismatch in argument counts.
4841 Also decl merging might've rendered parameter lists obsolete.
4842 Also compute called_with_variable_arg info. */
4845 ipa_update_after_lto_read (void)
4847 ipa_check_create_node_params ();
4848 ipa_check_create_edge_args ();
4852 write_ipcp_transformation_info (output_block
*ob
, cgraph_node
*node
)
4855 unsigned int count
= 0;
4856 lto_symtab_encoder_t encoder
;
4857 struct ipa_agg_replacement_value
*aggvals
, *av
;
4859 aggvals
= ipa_get_agg_replacements_for_node (node
);
4860 encoder
= ob
->decl_state
->symtab_node_encoder
;
4861 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4862 streamer_write_uhwi (ob
, node_ref
);
4864 for (av
= aggvals
; av
; av
= av
->next
)
4866 streamer_write_uhwi (ob
, count
);
4868 for (av
= aggvals
; av
; av
= av
->next
)
4870 struct bitpack_d bp
;
4872 streamer_write_uhwi (ob
, av
->offset
);
4873 streamer_write_uhwi (ob
, av
->index
);
4874 stream_write_tree (ob
, av
->value
, true);
4876 bp
= bitpack_create (ob
->main_stream
);
4877 bp_pack_value (&bp
, av
->by_ref
, 1);
4878 streamer_write_bitpack (&bp
);
4881 ipcp_transformation_summary
*ts
= ipcp_get_transformation_summary (node
);
4882 if (ts
&& vec_safe_length (ts
->alignments
) > 0)
4884 count
= ts
->alignments
->length ();
4886 streamer_write_uhwi (ob
, count
);
4887 for (unsigned i
= 0; i
< count
; ++i
)
4889 ipa_alignment
*parm_al
= &(*ts
->alignments
)[i
];
4891 struct bitpack_d bp
;
4892 bp
= bitpack_create (ob
->main_stream
);
4893 bp_pack_value (&bp
, parm_al
->known
, 1);
4894 streamer_write_bitpack (&bp
);
4897 streamer_write_uhwi (ob
, parm_al
->align
);
4898 streamer_write_hwi_in_range (ob
->main_stream
, 0, parm_al
->align
,
4904 streamer_write_uhwi (ob
, 0);
4907 /* Stream in the aggregate value replacement chain for NODE from IB. */
4910 read_ipcp_transformation_info (lto_input_block
*ib
, cgraph_node
*node
,
4913 struct ipa_agg_replacement_value
*aggvals
= NULL
;
4914 unsigned int count
, i
;
4916 count
= streamer_read_uhwi (ib
);
4917 for (i
= 0; i
<count
; i
++)
4919 struct ipa_agg_replacement_value
*av
;
4920 struct bitpack_d bp
;
4922 av
= ggc_alloc
<ipa_agg_replacement_value
> ();
4923 av
->offset
= streamer_read_uhwi (ib
);
4924 av
->index
= streamer_read_uhwi (ib
);
4925 av
->value
= stream_read_tree (ib
, data_in
);
4926 bp
= streamer_read_bitpack (ib
);
4927 av
->by_ref
= bp_unpack_value (&bp
, 1);
4931 ipa_set_node_agg_value_chain (node
, aggvals
);
4933 count
= streamer_read_uhwi (ib
);
4936 ipcp_grow_transformations_if_necessary ();
4938 ipcp_transformation_summary
*ts
= ipcp_get_transformation_summary (node
);
4939 vec_safe_grow_cleared (ts
->alignments
, count
);
4941 for (i
= 0; i
< count
; i
++)
4943 ipa_alignment
*parm_al
;
4944 parm_al
= &(*ts
->alignments
)[i
];
4945 struct bitpack_d bp
;
4946 bp
= streamer_read_bitpack (ib
);
4947 parm_al
->known
= bp_unpack_value (&bp
, 1);
4950 parm_al
->align
= streamer_read_uhwi (ib
);
4952 = streamer_read_hwi_in_range (ib
, "ipa-prop misalign",
4959 /* Write all aggregate replacement for nodes in set. */
4962 ipcp_write_transformation_summaries (void)
4964 struct cgraph_node
*node
;
4965 struct output_block
*ob
;
4966 unsigned int count
= 0;
4967 lto_symtab_encoder_iterator lsei
;
4968 lto_symtab_encoder_t encoder
;
4970 ob
= create_output_block (LTO_section_ipcp_transform
);
4971 encoder
= ob
->decl_state
->symtab_node_encoder
;
4973 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4974 lsei_next_function_in_partition (&lsei
))
4976 node
= lsei_cgraph_node (lsei
);
4977 if (node
->has_gimple_body_p ())
4981 streamer_write_uhwi (ob
, count
);
4983 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4984 lsei_next_function_in_partition (&lsei
))
4986 node
= lsei_cgraph_node (lsei
);
4987 if (node
->has_gimple_body_p ())
4988 write_ipcp_transformation_info (ob
, node
);
4990 streamer_write_char_stream (ob
->main_stream
, 0);
4991 produce_asm (ob
, NULL
);
4992 destroy_output_block (ob
);
4995 /* Read replacements section in file FILE_DATA of length LEN with data
4999 read_replacements_section (struct lto_file_decl_data
*file_data
,
5003 const struct lto_function_header
*header
=
5004 (const struct lto_function_header
*) data
;
5005 const int cfg_offset
= sizeof (struct lto_function_header
);
5006 const int main_offset
= cfg_offset
+ header
->cfg_size
;
5007 const int string_offset
= main_offset
+ header
->main_size
;
5008 struct data_in
*data_in
;
5012 lto_input_block
ib_main ((const char *) data
+ main_offset
,
5013 header
->main_size
, file_data
->mode_table
);
5015 data_in
= lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
5016 header
->string_size
, vNULL
);
5017 count
= streamer_read_uhwi (&ib_main
);
5019 for (i
= 0; i
< count
; i
++)
5022 struct cgraph_node
*node
;
5023 lto_symtab_encoder_t encoder
;
5025 index
= streamer_read_uhwi (&ib_main
);
5026 encoder
= file_data
->symtab_node_encoder
;
5027 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
5029 gcc_assert (node
->definition
);
5030 read_ipcp_transformation_info (&ib_main
, node
, data_in
);
5032 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
5034 lto_data_in_delete (data_in
);
5037 /* Read IPA-CP aggregate replacements. */
5040 ipcp_read_transformation_summaries (void)
5042 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
5043 struct lto_file_decl_data
*file_data
;
5046 while ((file_data
= file_data_vec
[j
++]))
5049 const char *data
= lto_get_section_data (file_data
,
5050 LTO_section_ipcp_transform
,
5053 read_replacements_section (file_data
, data
, len
);
5057 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5061 adjust_agg_replacement_values (struct cgraph_node
*node
,
5062 struct ipa_agg_replacement_value
*aggval
)
5064 struct ipa_agg_replacement_value
*v
;
5065 int i
, c
= 0, d
= 0, *adj
;
5067 if (!node
->clone
.combined_args_to_skip
)
5070 for (v
= aggval
; v
; v
= v
->next
)
5072 gcc_assert (v
->index
>= 0);
5078 adj
= XALLOCAVEC (int, c
);
5079 for (i
= 0; i
< c
; i
++)
5080 if (bitmap_bit_p (node
->clone
.combined_args_to_skip
, i
))
5088 for (v
= aggval
; v
; v
= v
->next
)
5089 v
->index
= adj
[v
->index
];
5092 /* Dominator walker driving the ipcp modification phase. */
5094 class ipcp_modif_dom_walker
: public dom_walker
5097 ipcp_modif_dom_walker (struct ipa_func_body_info
*fbi
,
5098 vec
<ipa_param_descriptor
> descs
,
5099 struct ipa_agg_replacement_value
*av
,
5101 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
), m_descriptors (descs
),
5102 m_aggval (av
), m_something_changed (sc
), m_cfg_changed (cc
) {}
5104 virtual edge
before_dom_children (basic_block
);
5107 struct ipa_func_body_info
*m_fbi
;
5108 vec
<ipa_param_descriptor
> m_descriptors
;
5109 struct ipa_agg_replacement_value
*m_aggval
;
5110 bool *m_something_changed
, *m_cfg_changed
;
5114 ipcp_modif_dom_walker::before_dom_children (basic_block bb
)
5116 gimple_stmt_iterator gsi
;
5117 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5119 struct ipa_agg_replacement_value
*v
;
5120 gimple
*stmt
= gsi_stmt (gsi
);
5122 HOST_WIDE_INT offset
, size
;
5126 if (!gimple_assign_load_p (stmt
))
5128 rhs
= gimple_assign_rhs1 (stmt
);
5129 if (!is_gimple_reg_type (TREE_TYPE (rhs
)))
5134 while (handled_component_p (t
))
5136 /* V_C_E can do things like convert an array of integers to one
5137 bigger integer and similar things we do not handle below. */
5138 if (TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
)
5143 t
= TREE_OPERAND (t
, 0);
5148 if (!ipa_load_from_parm_agg (m_fbi
, m_descriptors
, stmt
, rhs
, &index
,
5149 &offset
, &size
, &by_ref
))
5151 for (v
= m_aggval
; v
; v
= v
->next
)
5152 if (v
->index
== index
5153 && v
->offset
== offset
)
5156 || v
->by_ref
!= by_ref
5157 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v
->value
))) != size
)
5160 gcc_checking_assert (is_gimple_ip_invariant (v
->value
));
5161 if (!useless_type_conversion_p (TREE_TYPE (rhs
), TREE_TYPE (v
->value
)))
5163 if (fold_convertible_p (TREE_TYPE (rhs
), v
->value
))
5164 val
= fold_build1 (NOP_EXPR
, TREE_TYPE (rhs
), v
->value
);
5165 else if (TYPE_SIZE (TREE_TYPE (rhs
))
5166 == TYPE_SIZE (TREE_TYPE (v
->value
)))
5167 val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (rhs
), v
->value
);
5172 fprintf (dump_file
, " const ");
5173 print_generic_expr (dump_file
, v
->value
, 0);
5174 fprintf (dump_file
, " can't be converted to type of ");
5175 print_generic_expr (dump_file
, rhs
, 0);
5176 fprintf (dump_file
, "\n");
5184 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5186 fprintf (dump_file
, "Modifying stmt:\n ");
5187 print_gimple_stmt (dump_file
, stmt
, 0, 0);
5189 gimple_assign_set_rhs_from_tree (&gsi
, val
);
5192 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5194 fprintf (dump_file
, "into:\n ");
5195 print_gimple_stmt (dump_file
, stmt
, 0, 0);
5196 fprintf (dump_file
, "\n");
5199 *m_something_changed
= true;
5200 if (maybe_clean_eh_stmt (stmt
)
5201 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
5202 *m_cfg_changed
= true;
5207 /* Update alignment of formal parameters as described in
5208 ipcp_transformation_summary. */
5211 ipcp_update_alignments (struct cgraph_node
*node
)
5213 tree fndecl
= node
->decl
;
5214 tree parm
= DECL_ARGUMENTS (fndecl
);
5215 tree next_parm
= parm
;
5216 ipcp_transformation_summary
*ts
= ipcp_get_transformation_summary (node
);
5217 if (!ts
|| vec_safe_length (ts
->alignments
) == 0)
5219 const vec
<ipa_alignment
, va_gc
> &alignments
= *ts
->alignments
;
5220 unsigned count
= alignments
.length ();
5222 for (unsigned i
= 0; i
< count
; ++i
, parm
= next_parm
)
5224 if (node
->clone
.combined_args_to_skip
5225 && bitmap_bit_p (node
->clone
.combined_args_to_skip
, i
))
5227 gcc_checking_assert (parm
);
5228 next_parm
= DECL_CHAIN (parm
);
5230 if (!alignments
[i
].known
|| !is_gimple_reg (parm
))
5232 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
), parm
);
5237 fprintf (dump_file
, " Adjusting alignment of param %u to %u, "
5238 "misalignment to %u\n", i
, alignments
[i
].align
,
5239 alignments
[i
].misalign
);
5241 struct ptr_info_def
*pi
= get_ptr_info (ddef
);
5242 gcc_checking_assert (pi
);
5244 unsigned old_misalign
;
5245 bool old_known
= get_ptr_info_alignment (pi
, &old_align
, &old_misalign
);
5248 && old_align
>= alignments
[i
].align
)
5251 fprintf (dump_file
, " But the alignment was already %u.\n",
5255 set_ptr_info_alignment (pi
, alignments
[i
].align
, alignments
[i
].misalign
);
5259 /* IPCP transformation phase doing propagation of aggregate values. */
5262 ipcp_transform_function (struct cgraph_node
*node
)
5264 vec
<ipa_param_descriptor
> descriptors
= vNULL
;
5265 struct ipa_func_body_info fbi
;
5266 struct ipa_agg_replacement_value
*aggval
;
5268 bool cfg_changed
= false, something_changed
= false;
5270 gcc_checking_assert (cfun
);
5271 gcc_checking_assert (current_function_decl
);
5274 fprintf (dump_file
, "Modification phase of node %s/%i\n",
5275 node
->name (), node
->order
);
5277 ipcp_update_alignments (node
);
5278 aggval
= ipa_get_agg_replacements_for_node (node
);
5281 param_count
= count_formal_params (node
->decl
);
5282 if (param_count
== 0)
5284 adjust_agg_replacement_values (node
, aggval
);
5286 ipa_dump_agg_replacement_values (dump_file
, aggval
);
5290 fbi
.bb_infos
= vNULL
;
5291 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
));
5292 fbi
.param_count
= param_count
;
5295 descriptors
.safe_grow_cleared (param_count
);
5296 ipa_populate_param_decls (node
, descriptors
);
5297 calculate_dominance_info (CDI_DOMINATORS
);
5298 ipcp_modif_dom_walker (&fbi
, descriptors
, aggval
, &something_changed
,
5299 &cfg_changed
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5302 struct ipa_bb_info
*bi
;
5303 FOR_EACH_VEC_ELT (fbi
.bb_infos
, i
, bi
)
5304 free_ipa_bb_info (bi
);
5305 fbi
.bb_infos
.release ();
5306 free_dominance_info (CDI_DOMINATORS
);
5307 (*ipcp_transformations
)[node
->uid
].agg_values
= NULL
;
5308 (*ipcp_transformations
)[node
->uid
].alignments
= NULL
;
5309 descriptors
.release ();
5311 if (!something_changed
)
5313 else if (cfg_changed
)
5314 return TODO_update_ssa_only_virtuals
| TODO_cleanup_cfg
;
5316 return TODO_update_ssa_only_virtuals
;