1 /* Interprocedural analyses.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
24 #include "basic-block.h"
25 #include "tree-ssa-alias.h"
26 #include "internal-fn.h"
27 #include "gimple-fold.h"
29 #include "gimple-expr.h"
33 #include "stor-layout.h"
34 #include "print-tree.h"
36 #include "gimple-iterator.h"
37 #include "gimplify-me.h"
38 #include "gimple-walk.h"
39 #include "langhooks.h"
43 #include "gimple-ssa.h"
45 #include "tree-phinodes.h"
46 #include "ssa-iterators.h"
47 #include "tree-into-ssa.h"
49 #include "tree-pass.h"
50 #include "tree-inline.h"
51 #include "ipa-inline.h"
53 #include "diagnostic.h"
54 #include "gimple-pretty-print.h"
55 #include "lto-streamer.h"
56 #include "data-streamer.h"
57 #include "tree-streamer.h"
59 #include "ipa-utils.h"
61 /* Intermediate information about a parameter that is only useful during the
62 run of ipa_analyze_node and is not kept afterwards. */
64 struct param_analysis_info
66 bool parm_modified
, ref_modified
, pt_modified
;
67 bitmap parm_visited_statements
, pt_visited_statements
;
70 /* Vector where the parameter infos are actually stored. */
71 vec
<ipa_node_params
> ipa_node_params_vector
;
72 /* Vector of known aggregate values in cloned nodes. */
73 vec
<ipa_agg_replacement_value_p
, va_gc
> *ipa_node_agg_replacements
;
74 /* Vector where the parameter infos are actually stored. */
75 vec
<ipa_edge_args
, va_gc
> *ipa_edge_args_vector
;
77 /* Holders of ipa cgraph hooks: */
78 static struct cgraph_edge_hook_list
*edge_removal_hook_holder
;
79 static struct cgraph_node_hook_list
*node_removal_hook_holder
;
80 static struct cgraph_2edge_hook_list
*edge_duplication_hook_holder
;
81 static struct cgraph_2node_hook_list
*node_duplication_hook_holder
;
82 static struct cgraph_node_hook_list
*function_insertion_hook_holder
;
84 /* Description of a reference to an IPA constant. */
85 struct ipa_cst_ref_desc
87 /* Edge that corresponds to the statement which took the reference. */
88 struct cgraph_edge
*cs
;
89 /* Linked list of duplicates created when call graph edges are cloned. */
90 struct ipa_cst_ref_desc
*next_duplicate
;
91 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
96 /* Allocation pool for reference descriptions. */
98 static alloc_pool ipa_refdesc_pool
;
100 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
101 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
104 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node
*node
)
106 tree fs_opts
= DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node
->decl
);
107 struct cl_optimization
*os
;
111 os
= TREE_OPTIMIZATION (fs_opts
);
112 return !os
->x_optimize
|| !os
->x_flag_ipa_cp
;
115 /* Return index of the formal whose tree is PTREE in function which corresponds
119 ipa_get_param_decl_index_1 (vec
<ipa_param_descriptor
> descriptors
, tree ptree
)
123 count
= descriptors
.length ();
124 for (i
= 0; i
< count
; i
++)
125 if (descriptors
[i
].decl
== ptree
)
131 /* Return index of the formal whose tree is PTREE in function which corresponds
135 ipa_get_param_decl_index (struct ipa_node_params
*info
, tree ptree
)
137 return ipa_get_param_decl_index_1 (info
->descriptors
, ptree
);
140 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
144 ipa_populate_param_decls (struct cgraph_node
*node
,
145 vec
<ipa_param_descriptor
> &descriptors
)
153 gcc_assert (gimple_has_body_p (fndecl
));
154 fnargs
= DECL_ARGUMENTS (fndecl
);
156 for (parm
= fnargs
; parm
; parm
= DECL_CHAIN (parm
))
158 descriptors
[param_num
].decl
= parm
;
159 descriptors
[param_num
].move_cost
= estimate_move_cost (TREE_TYPE (parm
));
164 /* Return how many formal parameters FNDECL has. */
167 count_formal_params (tree fndecl
)
171 gcc_assert (gimple_has_body_p (fndecl
));
173 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
179 /* Return the declaration of Ith formal parameter of the function corresponding
180 to INFO. Note there is no setter function as this array is built just once
181 using ipa_initialize_node_params. */
184 ipa_dump_param (FILE *file
, struct ipa_node_params
*info
, int i
)
186 fprintf (file
, "param #%i", i
);
187 if (info
->descriptors
[i
].decl
)
190 print_generic_expr (file
, info
->descriptors
[i
].decl
, 0);
194 /* Initialize the ipa_node_params structure associated with NODE
195 to hold PARAM_COUNT parameters. */
198 ipa_alloc_node_params (struct cgraph_node
*node
, int param_count
)
200 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
202 if (!info
->descriptors
.exists () && param_count
)
203 info
->descriptors
.safe_grow_cleared (param_count
);
206 /* Initialize the ipa_node_params structure associated with NODE by counting
207 the function parameters, creating the descriptors and populating their
211 ipa_initialize_node_params (struct cgraph_node
*node
)
213 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
215 if (!info
->descriptors
.exists ())
217 ipa_alloc_node_params (node
, count_formal_params (node
->decl
));
218 ipa_populate_param_decls (node
, info
->descriptors
);
222 /* Print the jump functions associated with call graph edge CS to file F. */
225 ipa_print_node_jump_functions_for_edge (FILE *f
, struct cgraph_edge
*cs
)
229 count
= ipa_get_cs_argument_count (IPA_EDGE_REF (cs
));
230 for (i
= 0; i
< count
; i
++)
232 struct ipa_jump_func
*jump_func
;
233 enum jump_func_type type
;
235 jump_func
= ipa_get_ith_jump_func (IPA_EDGE_REF (cs
), i
);
236 type
= jump_func
->type
;
238 fprintf (f
, " param %d: ", i
);
239 if (type
== IPA_JF_UNKNOWN
)
240 fprintf (f
, "UNKNOWN\n");
241 else if (type
== IPA_JF_KNOWN_TYPE
)
243 fprintf (f
, "KNOWN TYPE: base ");
244 print_generic_expr (f
, jump_func
->value
.known_type
.base_type
, 0);
245 fprintf (f
, ", offset "HOST_WIDE_INT_PRINT_DEC
", component ",
246 jump_func
->value
.known_type
.offset
);
247 print_generic_expr (f
, jump_func
->value
.known_type
.component_type
, 0);
250 else if (type
== IPA_JF_CONST
)
252 tree val
= jump_func
->value
.constant
.value
;
253 fprintf (f
, "CONST: ");
254 print_generic_expr (f
, val
, 0);
255 if (TREE_CODE (val
) == ADDR_EXPR
256 && TREE_CODE (TREE_OPERAND (val
, 0)) == CONST_DECL
)
259 print_generic_expr (f
, DECL_INITIAL (TREE_OPERAND (val
, 0)),
264 else if (type
== IPA_JF_PASS_THROUGH
)
266 fprintf (f
, "PASS THROUGH: ");
267 fprintf (f
, "%d, op %s",
268 jump_func
->value
.pass_through
.formal_id
,
269 get_tree_code_name(jump_func
->value
.pass_through
.operation
));
270 if (jump_func
->value
.pass_through
.operation
!= NOP_EXPR
)
273 print_generic_expr (f
,
274 jump_func
->value
.pass_through
.operand
, 0);
276 if (jump_func
->value
.pass_through
.agg_preserved
)
277 fprintf (f
, ", agg_preserved");
278 if (jump_func
->value
.pass_through
.type_preserved
)
279 fprintf (f
, ", type_preserved");
282 else if (type
== IPA_JF_ANCESTOR
)
284 fprintf (f
, "ANCESTOR: ");
285 fprintf (f
, "%d, offset "HOST_WIDE_INT_PRINT_DEC
", ",
286 jump_func
->value
.ancestor
.formal_id
,
287 jump_func
->value
.ancestor
.offset
);
288 print_generic_expr (f
, jump_func
->value
.ancestor
.type
, 0);
289 if (jump_func
->value
.ancestor
.agg_preserved
)
290 fprintf (f
, ", agg_preserved");
291 if (jump_func
->value
.ancestor
.type_preserved
)
292 fprintf (f
, ", type_preserved");
296 if (jump_func
->agg
.items
)
298 struct ipa_agg_jf_item
*item
;
301 fprintf (f
, " Aggregate passed by %s:\n",
302 jump_func
->agg
.by_ref
? "reference" : "value");
303 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, j
, item
)
305 fprintf (f
, " offset: " HOST_WIDE_INT_PRINT_DEC
", ",
307 if (TYPE_P (item
->value
))
308 fprintf (f
, "clobber of " HOST_WIDE_INT_PRINT_DEC
" bits",
309 tree_to_uhwi (TYPE_SIZE (item
->value
)));
312 fprintf (f
, "cst: ");
313 print_generic_expr (f
, item
->value
, 0);
322 /* Print the jump functions of all arguments on all call graph edges going from
326 ipa_print_node_jump_functions (FILE *f
, struct cgraph_node
*node
)
328 struct cgraph_edge
*cs
;
330 fprintf (f
, " Jump functions of caller %s/%i:\n", node
->name (),
332 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
334 if (!ipa_edge_args_info_available_for_edge_p (cs
))
337 fprintf (f
, " callsite %s/%i -> %s/%i : \n",
338 xstrdup (node
->name ()), node
->order
,
339 xstrdup (cs
->callee
->name ()),
341 ipa_print_node_jump_functions_for_edge (f
, cs
);
344 for (cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
346 struct cgraph_indirect_call_info
*ii
;
347 if (!ipa_edge_args_info_available_for_edge_p (cs
))
350 ii
= cs
->indirect_info
;
351 if (ii
->agg_contents
)
352 fprintf (f
, " indirect %s callsite, calling param %i, "
353 "offset " HOST_WIDE_INT_PRINT_DEC
", %s",
354 ii
->member_ptr
? "member ptr" : "aggregate",
355 ii
->param_index
, ii
->offset
,
356 ii
->by_ref
? "by reference" : "by_value");
358 fprintf (f
, " indirect %s callsite, calling param %i",
359 ii
->polymorphic
? "polymorphic" : "simple", ii
->param_index
);
363 fprintf (f
, ", for stmt ");
364 print_gimple_stmt (f
, cs
->call_stmt
, 0, TDF_SLIM
);
368 ipa_print_node_jump_functions_for_edge (f
, cs
);
372 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
375 ipa_print_all_jump_functions (FILE *f
)
377 struct cgraph_node
*node
;
379 fprintf (f
, "\nJump functions:\n");
380 FOR_EACH_FUNCTION (node
)
382 ipa_print_node_jump_functions (f
, node
);
386 /* Set JFUNC to be a known type jump function. */
389 ipa_set_jf_known_type (struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
,
390 tree base_type
, tree component_type
)
392 gcc_assert (TREE_CODE (component_type
) == RECORD_TYPE
393 && TYPE_BINFO (component_type
));
394 jfunc
->type
= IPA_JF_KNOWN_TYPE
;
395 jfunc
->value
.known_type
.offset
= offset
,
396 jfunc
->value
.known_type
.base_type
= base_type
;
397 jfunc
->value
.known_type
.component_type
= component_type
;
398 gcc_assert (component_type
);
401 /* Set JFUNC to be a copy of another jmp (to be used by jump function
402 combination code). The two functions will share their rdesc. */
405 ipa_set_jf_cst_copy (struct ipa_jump_func
*dst
,
406 struct ipa_jump_func
*src
)
409 gcc_checking_assert (src
->type
== IPA_JF_CONST
);
410 dst
->type
= IPA_JF_CONST
;
411 dst
->value
.constant
= src
->value
.constant
;
414 /* Set JFUNC to be a constant jmp function. */
417 ipa_set_jf_constant (struct ipa_jump_func
*jfunc
, tree constant
,
418 struct cgraph_edge
*cs
)
420 constant
= unshare_expr (constant
);
421 if (constant
&& EXPR_P (constant
))
422 SET_EXPR_LOCATION (constant
, UNKNOWN_LOCATION
);
423 jfunc
->type
= IPA_JF_CONST
;
424 jfunc
->value
.constant
.value
= unshare_expr_without_location (constant
);
426 if (TREE_CODE (constant
) == ADDR_EXPR
427 && TREE_CODE (TREE_OPERAND (constant
, 0)) == FUNCTION_DECL
)
429 struct ipa_cst_ref_desc
*rdesc
;
430 if (!ipa_refdesc_pool
)
431 ipa_refdesc_pool
= create_alloc_pool ("IPA-PROP ref descriptions",
432 sizeof (struct ipa_cst_ref_desc
), 32);
434 rdesc
= (struct ipa_cst_ref_desc
*) pool_alloc (ipa_refdesc_pool
);
436 rdesc
->next_duplicate
= NULL
;
438 jfunc
->value
.constant
.rdesc
= rdesc
;
441 jfunc
->value
.constant
.rdesc
= NULL
;
444 /* Set JFUNC to be a simple pass-through jump function. */
446 ipa_set_jf_simple_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
447 bool agg_preserved
, bool type_preserved
)
449 jfunc
->type
= IPA_JF_PASS_THROUGH
;
450 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
451 jfunc
->value
.pass_through
.formal_id
= formal_id
;
452 jfunc
->value
.pass_through
.operation
= NOP_EXPR
;
453 jfunc
->value
.pass_through
.agg_preserved
= agg_preserved
;
454 jfunc
->value
.pass_through
.type_preserved
= type_preserved
;
457 /* Set JFUNC to be an arithmetic pass through jump function. */
460 ipa_set_jf_arith_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
461 tree operand
, enum tree_code operation
)
463 jfunc
->type
= IPA_JF_PASS_THROUGH
;
464 jfunc
->value
.pass_through
.operand
= unshare_expr_without_location (operand
);
465 jfunc
->value
.pass_through
.formal_id
= formal_id
;
466 jfunc
->value
.pass_through
.operation
= operation
;
467 jfunc
->value
.pass_through
.agg_preserved
= false;
468 jfunc
->value
.pass_through
.type_preserved
= false;
471 /* Set JFUNC to be an ancestor jump function. */
474 ipa_set_ancestor_jf (struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
,
475 tree type
, int formal_id
, bool agg_preserved
,
478 jfunc
->type
= IPA_JF_ANCESTOR
;
479 jfunc
->value
.ancestor
.formal_id
= formal_id
;
480 jfunc
->value
.ancestor
.offset
= offset
;
481 jfunc
->value
.ancestor
.type
= type
;
482 jfunc
->value
.ancestor
.agg_preserved
= agg_preserved
;
483 jfunc
->value
.ancestor
.type_preserved
= type_preserved
;
486 /* Extract the acual BINFO being described by JFUNC which must be a known type
490 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func
*jfunc
)
492 tree base_binfo
= TYPE_BINFO (jfunc
->value
.known_type
.base_type
);
495 return get_binfo_at_offset (base_binfo
,
496 jfunc
->value
.known_type
.offset
,
497 jfunc
->value
.known_type
.component_type
);
500 /* Structure to be passed in between detect_type_change and
501 check_stmt_for_type_change. */
503 struct type_change_info
505 /* Offset into the object where there is the virtual method pointer we are
507 HOST_WIDE_INT offset
;
508 /* The declaration or SSA_NAME pointer of the base that we are checking for
511 /* If we actually can tell the type that the object has changed to, it is
512 stored in this field. Otherwise it remains NULL_TREE. */
513 tree known_current_type
;
514 /* Set to true if dynamic type change has been detected. */
515 bool type_maybe_changed
;
516 /* Set to true if multiple types have been encountered. known_current_type
517 must be disregarded in that case. */
518 bool multiple_types_encountered
;
521 /* Return true if STMT can modify a virtual method table pointer.
523 This function makes special assumptions about both constructors and
524 destructors which are all the functions that are allowed to alter the VMT
525 pointers. It assumes that destructors begin with assignment into all VMT
526 pointers and that constructors essentially look in the following way:
528 1) The very first thing they do is that they call constructors of ancestor
529 sub-objects that have them.
531 2) Then VMT pointers of this and all its ancestors is set to new values
532 corresponding to the type corresponding to the constructor.
534 3) Only afterwards, other stuff such as constructor of member sub-objects
535 and the code written by the user is run. Only this may include calling
536 virtual functions, directly or indirectly.
538 There is no way to call a constructor of an ancestor sub-object in any
541 This means that we do not have to care whether constructors get the correct
542 type information because they will always change it (in fact, if we define
543 the type to be given by the VMT pointer, it is undefined).
545 The most important fact to derive from the above is that if, for some
546 statement in the section 3, we try to detect whether the dynamic type has
547 changed, we can safely ignore all calls as we examine the function body
548 backwards until we reach statements in section 2 because these calls cannot
549 be ancestor constructors or destructors (if the input is not bogus) and so
550 do not change the dynamic type (this holds true only for automatically
551 allocated objects but at the moment we devirtualize only these). We then
552 must detect that statements in section 2 change the dynamic type and can try
553 to derive the new type. That is enough and we can stop, we will never see
554 the calls into constructors of sub-objects in this code. Therefore we can
555 safely ignore all call statements that we traverse.
559 stmt_may_be_vtbl_ptr_store (gimple stmt
)
561 if (is_gimple_call (stmt
))
563 else if (gimple_clobber_p (stmt
))
565 else if (is_gimple_assign (stmt
))
567 tree lhs
= gimple_assign_lhs (stmt
);
569 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs
)))
571 if (flag_strict_aliasing
572 && !POINTER_TYPE_P (TREE_TYPE (lhs
)))
575 if (TREE_CODE (lhs
) == COMPONENT_REF
576 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
578 /* In the future we might want to use get_base_ref_and_offset to find
579 if there is a field corresponding to the offset and if so, proceed
580 almost like if it was a component ref. */
586 /* If STMT can be proved to be an assignment to the virtual method table
587 pointer of ANALYZED_OBJ and the type associated with the new table
588 identified, return the type. Otherwise return NULL_TREE. */
591 extr_type_from_vtbl_ptr_store (gimple stmt
, struct type_change_info
*tci
)
593 HOST_WIDE_INT offset
, size
, max_size
;
594 tree lhs
, rhs
, base
, binfo
;
596 if (!gimple_assign_single_p (stmt
))
599 lhs
= gimple_assign_lhs (stmt
);
600 rhs
= gimple_assign_rhs1 (stmt
);
601 if (TREE_CODE (lhs
) != COMPONENT_REF
602 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
605 base
= get_ref_base_and_extent (lhs
, &offset
, &size
, &max_size
);
606 if (offset
!= tci
->offset
607 || size
!= POINTER_SIZE
608 || max_size
!= POINTER_SIZE
)
610 if (TREE_CODE (base
) == MEM_REF
)
612 if (TREE_CODE (tci
->object
) != MEM_REF
613 || TREE_OPERAND (tci
->object
, 0) != TREE_OPERAND (base
, 0)
614 || !tree_int_cst_equal (TREE_OPERAND (tci
->object
, 1),
615 TREE_OPERAND (base
, 1)))
618 else if (tci
->object
!= base
)
621 binfo
= vtable_pointer_value_to_binfo (rhs
);
623 /* FIXME: vtable_pointer_value_to_binfo may return BINFO of a
624 base of outer type. In this case we would need to either
625 work on binfos or translate it back to outer type and offset.
626 KNOWN_TYPE jump functions are not ready for that, yet. */
627 if (!binfo
|| TYPE_BINFO (BINFO_TYPE (binfo
)) != binfo
)
630 return BINFO_TYPE (binfo
);
633 /* Callback of walk_aliased_vdefs and a helper function for
634 detect_type_change to check whether a particular statement may modify
635 the virtual table pointer, and if possible also determine the new type of
636 the (sub-)object. It stores its result into DATA, which points to a
637 type_change_info structure. */
640 check_stmt_for_type_change (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef
, void *data
)
642 gimple stmt
= SSA_NAME_DEF_STMT (vdef
);
643 struct type_change_info
*tci
= (struct type_change_info
*) data
;
645 if (stmt_may_be_vtbl_ptr_store (stmt
))
648 type
= extr_type_from_vtbl_ptr_store (stmt
, tci
);
649 if (tci
->type_maybe_changed
650 && type
!= tci
->known_current_type
)
651 tci
->multiple_types_encountered
= true;
652 tci
->known_current_type
= type
;
653 tci
->type_maybe_changed
= true;
662 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
663 callsite CALL) by looking for assignments to its virtual table pointer. If
664 it is, return true and fill in the jump function JFUNC with relevant type
665 information or set it to unknown. ARG is the object itself (not a pointer
666 to it, unless dereferenced). BASE is the base of the memory access as
667 returned by get_ref_base_and_extent, as is the offset. */
670 detect_type_change (tree arg
, tree base
, tree comp_type
, gimple call
,
671 struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
)
673 struct type_change_info tci
;
676 gcc_checking_assert (DECL_P (arg
)
677 || TREE_CODE (arg
) == MEM_REF
678 || handled_component_p (arg
));
679 /* Const calls cannot call virtual methods through VMT and so type changes do
681 if (!flag_devirtualize
|| !gimple_vuse (call
)
682 /* Be sure expected_type is polymorphic. */
684 || TREE_CODE (comp_type
) != RECORD_TYPE
685 || !TYPE_BINFO (comp_type
)
686 || !BINFO_VTABLE (TYPE_BINFO (comp_type
)))
689 ao_ref_init (&ao
, arg
);
692 ao
.size
= POINTER_SIZE
;
693 ao
.max_size
= ao
.size
;
696 tci
.object
= get_base_address (arg
);
697 tci
.known_current_type
= NULL_TREE
;
698 tci
.type_maybe_changed
= false;
699 tci
.multiple_types_encountered
= false;
701 walk_aliased_vdefs (&ao
, gimple_vuse (call
), check_stmt_for_type_change
,
703 if (!tci
.type_maybe_changed
)
706 if (!tci
.known_current_type
707 || tci
.multiple_types_encountered
709 jfunc
->type
= IPA_JF_UNKNOWN
;
711 ipa_set_jf_known_type (jfunc
, 0, tci
.known_current_type
, comp_type
);
716 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
717 SSA name (its dereference will become the base and the offset is assumed to
721 detect_type_change_ssa (tree arg
, tree comp_type
,
722 gimple call
, struct ipa_jump_func
*jfunc
)
724 gcc_checking_assert (TREE_CODE (arg
) == SSA_NAME
);
725 if (!flag_devirtualize
726 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
729 arg
= build2 (MEM_REF
, ptr_type_node
, arg
,
730 build_int_cst (ptr_type_node
, 0));
732 return detect_type_change (arg
, arg
, comp_type
, call
, jfunc
, 0);
735 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
736 boolean variable pointed to by DATA. */
739 mark_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
742 bool *b
= (bool *) data
;
747 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
748 a value known not to be modified in this function before reaching the
749 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
750 information about the parameter. */
753 parm_preserved_before_stmt_p (struct param_analysis_info
*parm_ainfo
,
754 gimple stmt
, tree parm_load
)
756 bool modified
= false;
757 bitmap
*visited_stmts
;
760 if (parm_ainfo
&& parm_ainfo
->parm_modified
)
763 gcc_checking_assert (gimple_vuse (stmt
) != NULL_TREE
);
764 ao_ref_init (&refd
, parm_load
);
765 /* We can cache visited statements only when parm_ainfo is available and when
766 we are looking at a naked load of the whole parameter. */
767 if (!parm_ainfo
|| TREE_CODE (parm_load
) != PARM_DECL
)
768 visited_stmts
= NULL
;
770 visited_stmts
= &parm_ainfo
->parm_visited_statements
;
771 walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
, &modified
,
773 if (parm_ainfo
&& modified
)
774 parm_ainfo
->parm_modified
= true;
778 /* If STMT is an assignment that loads a value from an parameter declaration,
779 return the index of the parameter in ipa_node_params which has not been
780 modified. Otherwise return -1. */
783 load_from_unmodified_param (vec
<ipa_param_descriptor
> descriptors
,
784 struct param_analysis_info
*parms_ainfo
,
790 if (!gimple_assign_single_p (stmt
))
793 op1
= gimple_assign_rhs1 (stmt
);
794 if (TREE_CODE (op1
) != PARM_DECL
)
797 index
= ipa_get_param_decl_index_1 (descriptors
, op1
);
799 || !parm_preserved_before_stmt_p (parms_ainfo
? &parms_ainfo
[index
]
806 /* Return true if memory reference REF loads data that are known to be
807 unmodified in this function before reaching statement STMT. PARM_AINFO, if
808 non-NULL, is a pointer to a structure containing temporary information about
812 parm_ref_data_preserved_p (struct param_analysis_info
*parm_ainfo
,
813 gimple stmt
, tree ref
)
815 bool modified
= false;
818 gcc_checking_assert (gimple_vuse (stmt
));
819 if (parm_ainfo
&& parm_ainfo
->ref_modified
)
822 ao_ref_init (&refd
, ref
);
823 walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
, &modified
,
825 if (parm_ainfo
&& modified
)
826 parm_ainfo
->ref_modified
= true;
830 /* Return true if the data pointed to by PARM is known to be unmodified in this
831 function before reaching call statement CALL into which it is passed.
832 PARM_AINFO is a pointer to a structure containing temporary information
836 parm_ref_data_pass_through_p (struct param_analysis_info
*parm_ainfo
,
837 gimple call
, tree parm
)
839 bool modified
= false;
842 /* It's unnecessary to calculate anything about memory contnets for a const
843 function because it is not goin to use it. But do not cache the result
844 either. Also, no such calculations for non-pointers. */
845 if (!gimple_vuse (call
)
846 || !POINTER_TYPE_P (TREE_TYPE (parm
)))
849 if (parm_ainfo
->pt_modified
)
852 ao_ref_init_from_ptr_and_size (&refd
, parm
, NULL_TREE
);
853 walk_aliased_vdefs (&refd
, gimple_vuse (call
), mark_modified
, &modified
,
854 parm_ainfo
? &parm_ainfo
->pt_visited_statements
: NULL
);
856 parm_ainfo
->pt_modified
= true;
860 /* Return true if we can prove that OP is a memory reference loading unmodified
861 data from an aggregate passed as a parameter and if the aggregate is passed
862 by reference, that the alias type of the load corresponds to the type of the
863 formal parameter (so that we can rely on this type for TBAA in callers).
864 INFO and PARMS_AINFO describe parameters of the current function (but the
865 latter can be NULL), STMT is the load statement. If function returns true,
866 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
867 within the aggregate and whether it is a load from a value passed by
868 reference respectively. */
871 ipa_load_from_parm_agg_1 (vec
<ipa_param_descriptor
> descriptors
,
872 struct param_analysis_info
*parms_ainfo
, gimple stmt
,
873 tree op
, int *index_p
, HOST_WIDE_INT
*offset_p
,
874 HOST_WIDE_INT
*size_p
, bool *by_ref_p
)
877 HOST_WIDE_INT size
, max_size
;
878 tree base
= get_ref_base_and_extent (op
, offset_p
, &size
, &max_size
);
880 if (max_size
== -1 || max_size
!= size
|| *offset_p
< 0)
885 int index
= ipa_get_param_decl_index_1 (descriptors
, base
);
887 && parm_preserved_before_stmt_p (parms_ainfo
? &parms_ainfo
[index
]
899 if (TREE_CODE (base
) != MEM_REF
900 || TREE_CODE (TREE_OPERAND (base
, 0)) != SSA_NAME
901 || !integer_zerop (TREE_OPERAND (base
, 1)))
904 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base
, 0)))
906 tree parm
= SSA_NAME_VAR (TREE_OPERAND (base
, 0));
907 index
= ipa_get_param_decl_index_1 (descriptors
, parm
);
911 /* This branch catches situations where a pointer parameter is not a
912 gimple register, for example:
914 void hip7(S*) (struct S * p)
916 void (*<T2e4>) (struct S *) D.1867;
926 gimple def
= SSA_NAME_DEF_STMT (TREE_OPERAND (base
, 0));
927 index
= load_from_unmodified_param (descriptors
, parms_ainfo
, def
);
931 && parm_ref_data_preserved_p (parms_ainfo
? &parms_ainfo
[index
] : NULL
,
943 /* Just like the previous function, just without the param_analysis_info
944 pointer, for users outside of this file. */
947 ipa_load_from_parm_agg (struct ipa_node_params
*info
, gimple stmt
,
948 tree op
, int *index_p
, HOST_WIDE_INT
*offset_p
,
951 return ipa_load_from_parm_agg_1 (info
->descriptors
, NULL
, stmt
, op
, index_p
,
952 offset_p
, NULL
, by_ref_p
);
955 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
956 of an assignment statement STMT, try to determine whether we are actually
957 handling any of the following cases and construct an appropriate jump
958 function into JFUNC if so:
960 1) The passed value is loaded from a formal parameter which is not a gimple
961 register (most probably because it is addressable, the value has to be
962 scalar) and we can guarantee the value has not changed. This case can
963 therefore be described by a simple pass-through jump function. For example:
972 2) The passed value can be described by a simple arithmetic pass-through
979 D.2064_4 = a.1(D) + 4;
982 This case can also occur in combination of the previous one, e.g.:
990 D.2064_4 = a.0_3 + 4;
993 3) The passed value is an address of an object within another one (which
994 also passed by reference). Such situations are described by an ancestor
995 jump function and describe situations such as:
997 B::foo() (struct B * const this)
1001 D.1845_2 = &this_1(D)->D.1748;
1004 INFO is the structure describing individual parameters access different
1005 stages of IPA optimizations. PARMS_AINFO contains the information that is
1006 only needed for intraprocedural analysis. */
1009 compute_complex_assign_jump_func (struct ipa_node_params
*info
,
1010 struct param_analysis_info
*parms_ainfo
,
1011 struct ipa_jump_func
*jfunc
,
1012 gimple call
, gimple stmt
, tree name
,
1015 HOST_WIDE_INT offset
, size
, max_size
;
1016 tree op1
, tc_ssa
, base
, ssa
;
1019 op1
= gimple_assign_rhs1 (stmt
);
1021 if (TREE_CODE (op1
) == SSA_NAME
)
1023 if (SSA_NAME_IS_DEFAULT_DEF (op1
))
1024 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (op1
));
1026 index
= load_from_unmodified_param (info
->descriptors
, parms_ainfo
,
1027 SSA_NAME_DEF_STMT (op1
));
1032 index
= load_from_unmodified_param (info
->descriptors
, parms_ainfo
, stmt
);
1033 tc_ssa
= gimple_assign_lhs (stmt
);
1038 tree op2
= gimple_assign_rhs2 (stmt
);
1042 if (!is_gimple_ip_invariant (op2
)
1043 || (TREE_CODE_CLASS (gimple_expr_code (stmt
)) != tcc_comparison
1044 && !useless_type_conversion_p (TREE_TYPE (name
),
1048 ipa_set_jf_arith_pass_through (jfunc
, index
, op2
,
1049 gimple_assign_rhs_code (stmt
));
1051 else if (gimple_assign_single_p (stmt
))
1053 bool agg_p
= parm_ref_data_pass_through_p (&parms_ainfo
[index
],
1055 bool type_p
= false;
1057 if (param_type
&& POINTER_TYPE_P (param_type
))
1058 type_p
= !detect_type_change_ssa (tc_ssa
, TREE_TYPE (param_type
),
1060 if (type_p
|| jfunc
->type
== IPA_JF_UNKNOWN
)
1061 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
, type_p
);
1066 if (TREE_CODE (op1
) != ADDR_EXPR
)
1068 op1
= TREE_OPERAND (op1
, 0);
1069 if (TREE_CODE (TREE_TYPE (op1
)) != RECORD_TYPE
)
1071 base
= get_ref_base_and_extent (op1
, &offset
, &size
, &max_size
);
1072 if (TREE_CODE (base
) != MEM_REF
1073 /* If this is a varying address, punt. */
1075 || max_size
!= size
)
1077 offset
+= mem_ref_offset (base
).low
* BITS_PER_UNIT
;
1078 ssa
= TREE_OPERAND (base
, 0);
1079 if (TREE_CODE (ssa
) != SSA_NAME
1080 || !SSA_NAME_IS_DEFAULT_DEF (ssa
)
1084 /* Dynamic types are changed in constructors and destructors. */
1085 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (ssa
));
1086 if (index
>= 0 && param_type
&& POINTER_TYPE_P (param_type
))
1088 bool type_p
= !detect_type_change (op1
, base
, TREE_TYPE (param_type
),
1089 call
, jfunc
, offset
);
1090 if (type_p
|| jfunc
->type
== IPA_JF_UNKNOWN
)
1091 ipa_set_ancestor_jf (jfunc
, offset
, TREE_TYPE (op1
), index
,
1092 parm_ref_data_pass_through_p (&parms_ainfo
[index
],
1093 call
, ssa
), type_p
);
1097 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1100 iftmp.1_3 = &obj_2(D)->D.1762;
1102 The base of the MEM_REF must be a default definition SSA NAME of a
1103 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1104 whole MEM_REF expression is returned and the offset calculated from any
1105 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1106 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1109 get_ancestor_addr_info (gimple assign
, tree
*obj_p
, HOST_WIDE_INT
*offset
)
1111 HOST_WIDE_INT size
, max_size
;
1112 tree expr
, parm
, obj
;
1114 if (!gimple_assign_single_p (assign
))
1116 expr
= gimple_assign_rhs1 (assign
);
1118 if (TREE_CODE (expr
) != ADDR_EXPR
)
1120 expr
= TREE_OPERAND (expr
, 0);
1122 expr
= get_ref_base_and_extent (expr
, offset
, &size
, &max_size
);
1124 if (TREE_CODE (expr
) != MEM_REF
1125 /* If this is a varying address, punt. */
1130 parm
= TREE_OPERAND (expr
, 0);
1131 if (TREE_CODE (parm
) != SSA_NAME
1132 || !SSA_NAME_IS_DEFAULT_DEF (parm
)
1133 || TREE_CODE (SSA_NAME_VAR (parm
)) != PARM_DECL
)
1136 *offset
+= mem_ref_offset (expr
).low
* BITS_PER_UNIT
;
1142 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1143 statement PHI, try to find out whether NAME is in fact a
1144 multiple-inheritance typecast from a descendant into an ancestor of a formal
1145 parameter and thus can be described by an ancestor jump function and if so,
1146 write the appropriate function into JFUNC.
1148 Essentially we want to match the following pattern:
1156 iftmp.1_3 = &obj_2(D)->D.1762;
1159 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1160 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1164 compute_complex_ancestor_jump_func (struct ipa_node_params
*info
,
1165 struct param_analysis_info
*parms_ainfo
,
1166 struct ipa_jump_func
*jfunc
,
1167 gimple call
, gimple phi
, tree param_type
)
1169 HOST_WIDE_INT offset
;
1170 gimple assign
, cond
;
1171 basic_block phi_bb
, assign_bb
, cond_bb
;
1172 tree tmp
, parm
, expr
, obj
;
1175 if (gimple_phi_num_args (phi
) != 2)
1178 if (integer_zerop (PHI_ARG_DEF (phi
, 1)))
1179 tmp
= PHI_ARG_DEF (phi
, 0);
1180 else if (integer_zerop (PHI_ARG_DEF (phi
, 0)))
1181 tmp
= PHI_ARG_DEF (phi
, 1);
1184 if (TREE_CODE (tmp
) != SSA_NAME
1185 || SSA_NAME_IS_DEFAULT_DEF (tmp
)
1186 || !POINTER_TYPE_P (TREE_TYPE (tmp
))
1187 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp
))) != RECORD_TYPE
)
1190 assign
= SSA_NAME_DEF_STMT (tmp
);
1191 assign_bb
= gimple_bb (assign
);
1192 if (!single_pred_p (assign_bb
))
1194 expr
= get_ancestor_addr_info (assign
, &obj
, &offset
);
1197 parm
= TREE_OPERAND (expr
, 0);
1198 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (parm
));
1199 gcc_assert (index
>= 0);
1201 cond_bb
= single_pred (assign_bb
);
1202 cond
= last_stmt (cond_bb
);
1204 || gimple_code (cond
) != GIMPLE_COND
1205 || gimple_cond_code (cond
) != NE_EXPR
1206 || gimple_cond_lhs (cond
) != parm
1207 || !integer_zerop (gimple_cond_rhs (cond
)))
1210 phi_bb
= gimple_bb (phi
);
1211 for (i
= 0; i
< 2; i
++)
1213 basic_block pred
= EDGE_PRED (phi_bb
, i
)->src
;
1214 if (pred
!= assign_bb
&& pred
!= cond_bb
)
1218 bool type_p
= false;
1219 if (param_type
&& POINTER_TYPE_P (param_type
))
1220 type_p
= !detect_type_change (obj
, expr
, TREE_TYPE (param_type
),
1221 call
, jfunc
, offset
);
1222 if (type_p
|| jfunc
->type
== IPA_JF_UNKNOWN
)
1223 ipa_set_ancestor_jf (jfunc
, offset
, TREE_TYPE (obj
), index
,
1224 parm_ref_data_pass_through_p (&parms_ainfo
[index
],
1225 call
, parm
), type_p
);
1228 /* Given OP which is passed as an actual argument to a called function,
1229 determine if it is possible to construct a KNOWN_TYPE jump function for it
1230 and if so, create one and store it to JFUNC.
1231 EXPECTED_TYPE represents a type the argument should be in */
1234 compute_known_type_jump_func (tree op
, struct ipa_jump_func
*jfunc
,
1235 gimple call
, tree expected_type
)
1237 HOST_WIDE_INT offset
, size
, max_size
;
1240 if (!flag_devirtualize
1241 || TREE_CODE (op
) != ADDR_EXPR
1242 || TREE_CODE (TREE_TYPE (TREE_TYPE (op
))) != RECORD_TYPE
1243 /* Be sure expected_type is polymorphic. */
1245 || TREE_CODE (expected_type
) != RECORD_TYPE
1246 || !TYPE_BINFO (expected_type
)
1247 || !BINFO_VTABLE (TYPE_BINFO (expected_type
)))
1250 op
= TREE_OPERAND (op
, 0);
1251 base
= get_ref_base_and_extent (op
, &offset
, &size
, &max_size
);
1255 || TREE_CODE (TREE_TYPE (base
)) != RECORD_TYPE
1256 || is_global_var (base
))
1259 if (detect_type_change (op
, base
, expected_type
, call
, jfunc
, offset
))
1262 ipa_set_jf_known_type (jfunc
, offset
, TREE_TYPE (base
),
1266 /* Inspect the given TYPE and return true iff it has the same structure (the
1267 same number of fields of the same types) as a C++ member pointer. If
1268 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1269 corresponding fields there. */
1272 type_like_member_ptr_p (tree type
, tree
*method_ptr
, tree
*delta
)
1276 if (TREE_CODE (type
) != RECORD_TYPE
)
1279 fld
= TYPE_FIELDS (type
);
1280 if (!fld
|| !POINTER_TYPE_P (TREE_TYPE (fld
))
1281 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld
))) != METHOD_TYPE
1282 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1288 fld
= DECL_CHAIN (fld
);
1289 if (!fld
|| INTEGRAL_TYPE_P (fld
)
1290 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1295 if (DECL_CHAIN (fld
))
1301 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1302 return the rhs of its defining statement. Otherwise return RHS as it
1306 get_ssa_def_if_simple_copy (tree rhs
)
1308 while (TREE_CODE (rhs
) == SSA_NAME
&& !SSA_NAME_IS_DEFAULT_DEF (rhs
))
1310 gimple def_stmt
= SSA_NAME_DEF_STMT (rhs
);
1312 if (gimple_assign_single_p (def_stmt
))
1313 rhs
= gimple_assign_rhs1 (def_stmt
);
1320 /* Simple linked list, describing known contents of an aggregate beforere
1323 struct ipa_known_agg_contents_list
1325 /* Offset and size of the described part of the aggregate. */
1326 HOST_WIDE_INT offset
, size
;
1327 /* Known constant value or NULL if the contents is known to be unknown. */
1329 /* Pointer to the next structure in the list. */
1330 struct ipa_known_agg_contents_list
*next
;
1333 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1334 in ARG is filled in with constant values. ARG can either be an aggregate
1335 expression or a pointer to an aggregate. JFUNC is the jump function into
1336 which the constants are subsequently stored. */
1339 determine_known_aggregate_parts (gimple call
, tree arg
,
1340 struct ipa_jump_func
*jfunc
)
1342 struct ipa_known_agg_contents_list
*list
= NULL
;
1343 int item_count
= 0, const_count
= 0;
1344 HOST_WIDE_INT arg_offset
, arg_size
;
1345 gimple_stmt_iterator gsi
;
1347 bool check_ref
, by_ref
;
1350 /* The function operates in three stages. First, we prepare check_ref, r,
1351 arg_base and arg_offset based on what is actually passed as an actual
1354 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
1357 if (TREE_CODE (arg
) == SSA_NAME
)
1360 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg
)))))
1365 type_size
= TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg
)));
1366 arg_size
= tree_to_uhwi (type_size
);
1367 ao_ref_init_from_ptr_and_size (&r
, arg_base
, NULL_TREE
);
1369 else if (TREE_CODE (arg
) == ADDR_EXPR
)
1371 HOST_WIDE_INT arg_max_size
;
1373 arg
= TREE_OPERAND (arg
, 0);
1374 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1376 if (arg_max_size
== -1
1377 || arg_max_size
!= arg_size
1380 if (DECL_P (arg_base
))
1384 size
= build_int_cst (integer_type_node
, arg_size
);
1385 ao_ref_init_from_ptr_and_size (&r
, arg_base
, size
);
1395 HOST_WIDE_INT arg_max_size
;
1397 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg
)));
1401 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1403 if (arg_max_size
== -1
1404 || arg_max_size
!= arg_size
1408 ao_ref_init (&r
, arg
);
1411 /* Second stage walks back the BB, looks at individual statements and as long
1412 as it is confident of how the statements affect contents of the
1413 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1415 gsi
= gsi_for_stmt (call
);
1417 for (; !gsi_end_p (gsi
); gsi_prev (&gsi
))
1419 struct ipa_known_agg_contents_list
*n
, **p
;
1420 gimple stmt
= gsi_stmt (gsi
);
1421 HOST_WIDE_INT lhs_offset
, lhs_size
, lhs_max_size
;
1422 tree lhs
, rhs
, lhs_base
;
1423 bool partial_overlap
;
1425 if (!stmt_may_clobber_ref_p_1 (stmt
, &r
))
1427 if (!gimple_assign_single_p (stmt
))
1430 lhs
= gimple_assign_lhs (stmt
);
1431 rhs
= gimple_assign_rhs1 (stmt
);
1432 if (!is_gimple_reg_type (TREE_TYPE (rhs
))
1433 || TREE_CODE (lhs
) == BIT_FIELD_REF
1434 || contains_bitfld_component_ref_p (lhs
))
1437 lhs_base
= get_ref_base_and_extent (lhs
, &lhs_offset
, &lhs_size
,
1439 if (lhs_max_size
== -1
1440 || lhs_max_size
!= lhs_size
1441 || (lhs_offset
< arg_offset
1442 && lhs_offset
+ lhs_size
> arg_offset
)
1443 || (lhs_offset
< arg_offset
+ arg_size
1444 && lhs_offset
+ lhs_size
> arg_offset
+ arg_size
))
1449 if (TREE_CODE (lhs_base
) != MEM_REF
1450 || TREE_OPERAND (lhs_base
, 0) != arg_base
1451 || !integer_zerop (TREE_OPERAND (lhs_base
, 1)))
1454 else if (lhs_base
!= arg_base
)
1456 if (DECL_P (lhs_base
))
1462 if (lhs_offset
+ lhs_size
< arg_offset
1463 || lhs_offset
>= (arg_offset
+ arg_size
))
1466 partial_overlap
= false;
1468 while (*p
&& (*p
)->offset
< lhs_offset
)
1470 if ((*p
)->offset
+ (*p
)->size
> lhs_offset
)
1472 partial_overlap
= true;
1477 if (partial_overlap
)
1479 if (*p
&& (*p
)->offset
< lhs_offset
+ lhs_size
)
1481 if ((*p
)->offset
== lhs_offset
&& (*p
)->size
== lhs_size
)
1482 /* We already know this value is subsequently overwritten with
1486 /* Otherwise this is a partial overlap which we cannot
1491 rhs
= get_ssa_def_if_simple_copy (rhs
);
1492 n
= XALLOCA (struct ipa_known_agg_contents_list
);
1494 n
->offset
= lhs_offset
;
1495 if (is_gimple_ip_invariant (rhs
))
1501 n
->constant
= NULL_TREE
;
1506 if (const_count
== PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
)
1507 || item_count
== 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
))
1511 /* Third stage just goes over the list and creates an appropriate vector of
1512 ipa_agg_jf_item structures out of it, of sourse only if there are
1513 any known constants to begin with. */
1517 jfunc
->agg
.by_ref
= by_ref
;
1518 vec_alloc (jfunc
->agg
.items
, const_count
);
1523 struct ipa_agg_jf_item item
;
1524 item
.offset
= list
->offset
- arg_offset
;
1525 gcc_assert ((item
.offset
% BITS_PER_UNIT
) == 0);
1526 item
.value
= unshare_expr_without_location (list
->constant
);
1527 jfunc
->agg
.items
->quick_push (item
);
1535 ipa_get_callee_param_type (struct cgraph_edge
*e
, int i
)
1538 tree type
= (e
->callee
1539 ? TREE_TYPE (e
->callee
->decl
)
1540 : gimple_call_fntype (e
->call_stmt
));
1541 tree t
= TYPE_ARG_TYPES (type
);
1543 for (n
= 0; n
< i
; n
++)
1550 return TREE_VALUE (t
);
1553 t
= DECL_ARGUMENTS (e
->callee
->decl
);
1554 for (n
= 0; n
< i
; n
++)
1561 return TREE_TYPE (t
);
1565 /* Compute jump function for all arguments of callsite CS and insert the
1566 information in the jump_functions array in the ipa_edge_args corresponding
1567 to this callsite. */
1570 ipa_compute_jump_functions_for_edge (struct param_analysis_info
*parms_ainfo
,
1571 struct cgraph_edge
*cs
)
1573 struct ipa_node_params
*info
= IPA_NODE_REF (cs
->caller
);
1574 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
1575 gimple call
= cs
->call_stmt
;
1576 int n
, arg_num
= gimple_call_num_args (call
);
1578 if (arg_num
== 0 || args
->jump_functions
)
1580 vec_safe_grow_cleared (args
->jump_functions
, arg_num
);
1582 if (gimple_call_internal_p (call
))
1584 if (ipa_func_spec_opts_forbid_analysis_p (cs
->caller
))
1587 for (n
= 0; n
< arg_num
; n
++)
1589 struct ipa_jump_func
*jfunc
= ipa_get_ith_jump_func (args
, n
);
1590 tree arg
= gimple_call_arg (call
, n
);
1591 tree param_type
= ipa_get_callee_param_type (cs
, n
);
1593 if (is_gimple_ip_invariant (arg
))
1594 ipa_set_jf_constant (jfunc
, arg
, cs
);
1595 else if (!is_gimple_reg_type (TREE_TYPE (arg
))
1596 && TREE_CODE (arg
) == PARM_DECL
)
1598 int index
= ipa_get_param_decl_index (info
, arg
);
1600 gcc_assert (index
>=0);
1601 /* Aggregate passed by value, check for pass-through, otherwise we
1602 will attempt to fill in aggregate contents later in this
1604 if (parm_preserved_before_stmt_p (&parms_ainfo
[index
], call
, arg
))
1606 ipa_set_jf_simple_pass_through (jfunc
, index
, false, false);
1610 else if (TREE_CODE (arg
) == SSA_NAME
)
1612 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
1614 int index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (arg
));
1618 agg_p
= parm_ref_data_pass_through_p (&parms_ainfo
[index
],
1620 if (param_type
&& POINTER_TYPE_P (param_type
))
1621 type_p
= !detect_type_change_ssa (arg
, TREE_TYPE (param_type
),
1625 if (type_p
|| jfunc
->type
== IPA_JF_UNKNOWN
)
1626 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
,
1632 gimple stmt
= SSA_NAME_DEF_STMT (arg
);
1633 if (is_gimple_assign (stmt
))
1634 compute_complex_assign_jump_func (info
, parms_ainfo
, jfunc
,
1635 call
, stmt
, arg
, param_type
);
1636 else if (gimple_code (stmt
) == GIMPLE_PHI
)
1637 compute_complex_ancestor_jump_func (info
, parms_ainfo
, jfunc
,
1638 call
, stmt
, param_type
);
1642 compute_known_type_jump_func (arg
, jfunc
, call
,
1644 && POINTER_TYPE_P (param_type
)
1645 ? TREE_TYPE (param_type
)
1648 if ((jfunc
->type
!= IPA_JF_PASS_THROUGH
1649 || !ipa_get_jf_pass_through_agg_preserved (jfunc
))
1650 && (jfunc
->type
!= IPA_JF_ANCESTOR
1651 || !ipa_get_jf_ancestor_agg_preserved (jfunc
))
1652 && (AGGREGATE_TYPE_P (TREE_TYPE (arg
))
1653 || (POINTER_TYPE_P (TREE_TYPE (arg
)))))
1654 determine_known_aggregate_parts (call
, arg
, jfunc
);
1658 /* Compute jump functions for all edges - both direct and indirect - outgoing
1659 from NODE. Also count the actual arguments in the process. */
1662 ipa_compute_jump_functions (struct cgraph_node
*node
,
1663 struct param_analysis_info
*parms_ainfo
)
1665 struct cgraph_edge
*cs
;
1667 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
1669 struct cgraph_node
*callee
= cgraph_function_or_thunk_node (cs
->callee
,
1671 /* We do not need to bother analyzing calls to unknown
1672 functions unless they may become known during lto/whopr. */
1673 if (!callee
->definition
&& !flag_lto
)
1675 ipa_compute_jump_functions_for_edge (parms_ainfo
, cs
);
1678 for (cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
1679 ipa_compute_jump_functions_for_edge (parms_ainfo
, cs
);
1682 /* If STMT looks like a statement loading a value from a member pointer formal
1683 parameter, return that parameter and store the offset of the field to
1684 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1685 might be clobbered). If USE_DELTA, then we look for a use of the delta
1686 field rather than the pfn. */
1689 ipa_get_stmt_member_ptr_load_param (gimple stmt
, bool use_delta
,
1690 HOST_WIDE_INT
*offset_p
)
1692 tree rhs
, rec
, ref_field
, ref_offset
, fld
, ptr_field
, delta_field
;
1694 if (!gimple_assign_single_p (stmt
))
1697 rhs
= gimple_assign_rhs1 (stmt
);
1698 if (TREE_CODE (rhs
) == COMPONENT_REF
)
1700 ref_field
= TREE_OPERAND (rhs
, 1);
1701 rhs
= TREE_OPERAND (rhs
, 0);
1704 ref_field
= NULL_TREE
;
1705 if (TREE_CODE (rhs
) != MEM_REF
)
1707 rec
= TREE_OPERAND (rhs
, 0);
1708 if (TREE_CODE (rec
) != ADDR_EXPR
)
1710 rec
= TREE_OPERAND (rec
, 0);
1711 if (TREE_CODE (rec
) != PARM_DECL
1712 || !type_like_member_ptr_p (TREE_TYPE (rec
), &ptr_field
, &delta_field
))
1714 ref_offset
= TREE_OPERAND (rhs
, 1);
1721 *offset_p
= int_bit_position (fld
);
1725 if (integer_nonzerop (ref_offset
))
1727 return ref_field
== fld
? rec
: NULL_TREE
;
1730 return tree_int_cst_equal (byte_position (fld
), ref_offset
) ? rec
1734 /* Returns true iff T is an SSA_NAME defined by a statement. */
1737 ipa_is_ssa_with_stmt_def (tree t
)
1739 if (TREE_CODE (t
) == SSA_NAME
1740 && !SSA_NAME_IS_DEFAULT_DEF (t
))
1746 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1747 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1748 indirect call graph edge. */
1750 static struct cgraph_edge
*
1751 ipa_note_param_call (struct cgraph_node
*node
, int param_index
, gimple stmt
)
1753 struct cgraph_edge
*cs
;
1755 cs
= cgraph_edge (node
, stmt
);
1756 cs
->indirect_info
->param_index
= param_index
;
1757 cs
->indirect_info
->agg_contents
= 0;
1758 cs
->indirect_info
->member_ptr
= 0;
1762 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1763 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1764 intermediate information about each formal parameter. Currently it checks
1765 whether the call calls a pointer that is a formal parameter and if so, the
1766 parameter is marked with the called flag and an indirect call graph edge
1767 describing the call is created. This is very simple for ordinary pointers
1768 represented in SSA but not-so-nice when it comes to member pointers. The
1769 ugly part of this function does nothing more than trying to match the
1770 pattern of such a call. An example of such a pattern is the gimple dump
1771 below, the call is on the last line:
1774 f$__delta_5 = f.__delta;
1775 f$__pfn_24 = f.__pfn;
1779 f$__delta_5 = MEM[(struct *)&f];
1780 f$__pfn_24 = MEM[(struct *)&f + 4B];
1782 and a few lines below:
1785 D.2496_3 = (int) f$__pfn_24;
1786 D.2497_4 = D.2496_3 & 1;
1793 D.2500_7 = (unsigned int) f$__delta_5;
1794 D.2501_8 = &S + D.2500_7;
1795 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1796 D.2503_10 = *D.2502_9;
1797 D.2504_12 = f$__pfn_24 + -1;
1798 D.2505_13 = (unsigned int) D.2504_12;
1799 D.2506_14 = D.2503_10 + D.2505_13;
1800 D.2507_15 = *D.2506_14;
1801 iftmp.11_16 = (String:: *) D.2507_15;
1804 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1805 D.2500_19 = (unsigned int) f$__delta_5;
1806 D.2508_20 = &S + D.2500_19;
1807 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1809 Such patterns are results of simple calls to a member pointer:
1811 int doprinting (int (MyString::* f)(int) const)
1813 MyString S ("somestring");
1818 Moreover, the function also looks for called pointers loaded from aggregates
1819 passed by value or reference. */
1822 ipa_analyze_indirect_call_uses (struct cgraph_node
*node
,
1823 struct ipa_node_params
*info
,
1824 struct param_analysis_info
*parms_ainfo
,
1825 gimple call
, tree target
)
1830 tree rec
, rec2
, cond
;
1833 basic_block bb
, virt_bb
, join
;
1834 HOST_WIDE_INT offset
;
1837 if (SSA_NAME_IS_DEFAULT_DEF (target
))
1839 tree var
= SSA_NAME_VAR (target
);
1840 index
= ipa_get_param_decl_index (info
, var
);
1842 ipa_note_param_call (node
, index
, call
);
1846 def
= SSA_NAME_DEF_STMT (target
);
1847 if (gimple_assign_single_p (def
)
1848 && ipa_load_from_parm_agg_1 (info
->descriptors
, parms_ainfo
, def
,
1849 gimple_assign_rhs1 (def
), &index
, &offset
,
1852 struct cgraph_edge
*cs
= ipa_note_param_call (node
, index
, call
);
1853 if (cs
->indirect_info
->offset
!= offset
)
1854 cs
->indirect_info
->outer_type
= NULL
;
1855 cs
->indirect_info
->offset
= offset
;
1856 cs
->indirect_info
->agg_contents
= 1;
1857 cs
->indirect_info
->by_ref
= by_ref
;
1861 /* Now we need to try to match the complex pattern of calling a member
1863 if (gimple_code (def
) != GIMPLE_PHI
1864 || gimple_phi_num_args (def
) != 2
1865 || !POINTER_TYPE_P (TREE_TYPE (target
))
1866 || TREE_CODE (TREE_TYPE (TREE_TYPE (target
))) != METHOD_TYPE
)
1869 /* First, we need to check whether one of these is a load from a member
1870 pointer that is a parameter to this function. */
1871 n1
= PHI_ARG_DEF (def
, 0);
1872 n2
= PHI_ARG_DEF (def
, 1);
1873 if (!ipa_is_ssa_with_stmt_def (n1
) || !ipa_is_ssa_with_stmt_def (n2
))
1875 d1
= SSA_NAME_DEF_STMT (n1
);
1876 d2
= SSA_NAME_DEF_STMT (n2
);
1878 join
= gimple_bb (def
);
1879 if ((rec
= ipa_get_stmt_member_ptr_load_param (d1
, false, &offset
)))
1881 if (ipa_get_stmt_member_ptr_load_param (d2
, false, NULL
))
1884 bb
= EDGE_PRED (join
, 0)->src
;
1885 virt_bb
= gimple_bb (d2
);
1887 else if ((rec
= ipa_get_stmt_member_ptr_load_param (d2
, false, &offset
)))
1889 bb
= EDGE_PRED (join
, 1)->src
;
1890 virt_bb
= gimple_bb (d1
);
1895 /* Second, we need to check that the basic blocks are laid out in the way
1896 corresponding to the pattern. */
1898 if (!single_pred_p (virt_bb
) || !single_succ_p (virt_bb
)
1899 || single_pred (virt_bb
) != bb
1900 || single_succ (virt_bb
) != join
)
1903 /* Third, let's see that the branching is done depending on the least
1904 significant bit of the pfn. */
1906 branch
= last_stmt (bb
);
1907 if (!branch
|| gimple_code (branch
) != GIMPLE_COND
)
1910 if ((gimple_cond_code (branch
) != NE_EXPR
1911 && gimple_cond_code (branch
) != EQ_EXPR
)
1912 || !integer_zerop (gimple_cond_rhs (branch
)))
1915 cond
= gimple_cond_lhs (branch
);
1916 if (!ipa_is_ssa_with_stmt_def (cond
))
1919 def
= SSA_NAME_DEF_STMT (cond
);
1920 if (!is_gimple_assign (def
)
1921 || gimple_assign_rhs_code (def
) != BIT_AND_EXPR
1922 || !integer_onep (gimple_assign_rhs2 (def
)))
1925 cond
= gimple_assign_rhs1 (def
);
1926 if (!ipa_is_ssa_with_stmt_def (cond
))
1929 def
= SSA_NAME_DEF_STMT (cond
);
1931 if (is_gimple_assign (def
)
1932 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
1934 cond
= gimple_assign_rhs1 (def
);
1935 if (!ipa_is_ssa_with_stmt_def (cond
))
1937 def
= SSA_NAME_DEF_STMT (cond
);
1940 rec2
= ipa_get_stmt_member_ptr_load_param (def
,
1941 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1942 == ptrmemfunc_vbit_in_delta
),
1947 index
= ipa_get_param_decl_index (info
, rec
);
1949 && parm_preserved_before_stmt_p (&parms_ainfo
[index
], call
, rec
))
1951 struct cgraph_edge
*cs
= ipa_note_param_call (node
, index
, call
);
1952 if (cs
->indirect_info
->offset
!= offset
)
1953 cs
->indirect_info
->outer_type
= NULL
;
1954 cs
->indirect_info
->offset
= offset
;
1955 cs
->indirect_info
->agg_contents
= 1;
1956 cs
->indirect_info
->member_ptr
= 1;
1962 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1963 object referenced in the expression is a formal parameter of the caller
1964 (described by INFO), create a call note for the statement. */
1967 ipa_analyze_virtual_call_uses (struct cgraph_node
*node
,
1968 struct ipa_node_params
*info
, gimple call
,
1971 struct cgraph_edge
*cs
;
1972 struct cgraph_indirect_call_info
*ii
;
1973 struct ipa_jump_func jfunc
;
1974 tree obj
= OBJ_TYPE_REF_OBJECT (target
);
1976 HOST_WIDE_INT anc_offset
;
1978 if (!flag_devirtualize
)
1981 if (TREE_CODE (obj
) != SSA_NAME
)
1984 if (SSA_NAME_IS_DEFAULT_DEF (obj
))
1986 if (TREE_CODE (SSA_NAME_VAR (obj
)) != PARM_DECL
)
1990 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (obj
));
1991 gcc_assert (index
>= 0);
1992 if (detect_type_change_ssa (obj
, obj_type_ref_class (target
),
1998 gimple stmt
= SSA_NAME_DEF_STMT (obj
);
2001 expr
= get_ancestor_addr_info (stmt
, &obj
, &anc_offset
);
2004 index
= ipa_get_param_decl_index (info
,
2005 SSA_NAME_VAR (TREE_OPERAND (expr
, 0)));
2006 gcc_assert (index
>= 0);
2007 if (detect_type_change (obj
, expr
, obj_type_ref_class (target
),
2008 call
, &jfunc
, anc_offset
))
2012 cs
= ipa_note_param_call (node
, index
, call
);
2013 ii
= cs
->indirect_info
;
2014 ii
->offset
= anc_offset
;
2015 ii
->otr_token
= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target
));
2016 ii
->otr_type
= obj_type_ref_class (target
);
2017 ii
->polymorphic
= 1;
2020 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2021 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2022 containing intermediate information about each formal parameter. */
2025 ipa_analyze_call_uses (struct cgraph_node
*node
,
2026 struct ipa_node_params
*info
,
2027 struct param_analysis_info
*parms_ainfo
, gimple call
)
2029 tree target
= gimple_call_fn (call
);
2030 struct cgraph_edge
*cs
;
2033 || (TREE_CODE (target
) != SSA_NAME
2034 && !virtual_method_call_p (target
)))
2037 /* If we previously turned the call into a direct call, there is
2038 no need to analyze. */
2039 cs
= cgraph_edge (node
, call
);
2040 if (cs
&& !cs
->indirect_unknown_callee
)
2042 if (TREE_CODE (target
) == SSA_NAME
)
2043 ipa_analyze_indirect_call_uses (node
, info
, parms_ainfo
, call
, target
);
2044 else if (virtual_method_call_p (target
))
2045 ipa_analyze_virtual_call_uses (node
, info
, call
, target
);
2049 /* Analyze the call statement STMT with respect to formal parameters (described
2050 in INFO) of caller given by NODE. Currently it only checks whether formal
2051 parameters are called. PARMS_AINFO is a pointer to a vector containing
2052 intermediate information about each formal parameter. */
2055 ipa_analyze_stmt_uses (struct cgraph_node
*node
, struct ipa_node_params
*info
,
2056 struct param_analysis_info
*parms_ainfo
, gimple stmt
)
2058 if (is_gimple_call (stmt
))
2059 ipa_analyze_call_uses (node
, info
, parms_ainfo
, stmt
);
2062 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2063 If OP is a parameter declaration, mark it as used in the info structure
2067 visit_ref_for_mod_analysis (gimple
, tree op
, tree
, void *data
)
2069 struct ipa_node_params
*info
= (struct ipa_node_params
*) data
;
2071 op
= get_base_address (op
);
2073 && TREE_CODE (op
) == PARM_DECL
)
2075 int index
= ipa_get_param_decl_index (info
, op
);
2076 gcc_assert (index
>= 0);
2077 ipa_set_param_used (info
, index
, true);
2083 /* Scan the function body of NODE and inspect the uses of formal parameters.
2084 Store the findings in various structures of the associated ipa_node_params
2085 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
2086 vector containing intermediate information about each formal parameter. */
2089 ipa_analyze_params_uses (struct cgraph_node
*node
,
2090 struct param_analysis_info
*parms_ainfo
)
2092 tree decl
= node
->decl
;
2094 struct function
*func
;
2095 gimple_stmt_iterator gsi
;
2096 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
2099 if (ipa_get_param_count (info
) == 0 || info
->uses_analysis_done
)
2102 info
->uses_analysis_done
= 1;
2103 if (ipa_func_spec_opts_forbid_analysis_p (node
))
2105 for (i
= 0; i
< ipa_get_param_count (info
); i
++)
2107 ipa_set_param_used (info
, i
, true);
2108 ipa_set_controlled_uses (info
, i
, IPA_UNDESCRIBED_USE
);
2113 for (i
= 0; i
< ipa_get_param_count (info
); i
++)
2115 tree parm
= ipa_get_param (info
, i
);
2116 int controlled_uses
= 0;
2118 /* For SSA regs see if parameter is used. For non-SSA we compute
2119 the flag during modification analysis. */
2120 if (is_gimple_reg (parm
))
2122 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
),
2124 if (ddef
&& !has_zero_uses (ddef
))
2126 imm_use_iterator imm_iter
;
2127 use_operand_p use_p
;
2129 ipa_set_param_used (info
, i
, true);
2130 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, ddef
)
2131 if (!is_gimple_call (USE_STMT (use_p
)))
2133 if (!is_gimple_debug (USE_STMT (use_p
)))
2135 controlled_uses
= IPA_UNDESCRIBED_USE
;
2143 controlled_uses
= 0;
2146 controlled_uses
= IPA_UNDESCRIBED_USE
;
2147 ipa_set_controlled_uses (info
, i
, controlled_uses
);
2150 func
= DECL_STRUCT_FUNCTION (decl
);
2151 FOR_EACH_BB_FN (bb
, func
)
2153 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2155 gimple stmt
= gsi_stmt (gsi
);
2157 if (is_gimple_debug (stmt
))
2160 ipa_analyze_stmt_uses (node
, info
, parms_ainfo
, stmt
);
2161 walk_stmt_load_store_addr_ops (stmt
, info
,
2162 visit_ref_for_mod_analysis
,
2163 visit_ref_for_mod_analysis
,
2164 visit_ref_for_mod_analysis
);
2166 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2167 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), info
,
2168 visit_ref_for_mod_analysis
,
2169 visit_ref_for_mod_analysis
,
2170 visit_ref_for_mod_analysis
);
2174 /* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
2177 free_parms_ainfo (struct param_analysis_info
*parms_ainfo
, int param_count
)
2181 for (i
= 0; i
< param_count
; i
++)
2183 if (parms_ainfo
[i
].parm_visited_statements
)
2184 BITMAP_FREE (parms_ainfo
[i
].parm_visited_statements
);
2185 if (parms_ainfo
[i
].pt_visited_statements
)
2186 BITMAP_FREE (parms_ainfo
[i
].pt_visited_statements
);
2190 /* Initialize the array describing properties of of formal parameters
2191 of NODE, analyze their uses and compute jump functions associated
2192 with actual arguments of calls from within NODE. */
2195 ipa_analyze_node (struct cgraph_node
*node
)
2197 struct ipa_node_params
*info
;
2198 struct param_analysis_info
*parms_ainfo
;
2201 ipa_check_create_node_params ();
2202 ipa_check_create_edge_args ();
2203 info
= IPA_NODE_REF (node
);
2204 push_cfun (DECL_STRUCT_FUNCTION (node
->decl
));
2205 ipa_initialize_node_params (node
);
2207 param_count
= ipa_get_param_count (info
);
2208 parms_ainfo
= XALLOCAVEC (struct param_analysis_info
, param_count
);
2209 memset (parms_ainfo
, 0, sizeof (struct param_analysis_info
) * param_count
);
2211 ipa_analyze_params_uses (node
, parms_ainfo
);
2212 ipa_compute_jump_functions (node
, parms_ainfo
);
2214 free_parms_ainfo (parms_ainfo
, param_count
);
2218 /* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2219 attempt a type-based devirtualization. If successful, return the
2220 target function declaration, otherwise return NULL. */
2223 ipa_intraprocedural_devirtualization (gimple call
)
2225 tree binfo
, token
, fndecl
;
2226 struct ipa_jump_func jfunc
;
2227 tree otr
= gimple_call_fn (call
);
2229 jfunc
.type
= IPA_JF_UNKNOWN
;
2230 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr
), &jfunc
,
2231 call
, obj_type_ref_class (otr
));
2232 if (jfunc
.type
!= IPA_JF_KNOWN_TYPE
)
2234 binfo
= ipa_binfo_from_known_type_jfunc (&jfunc
);
2237 token
= OBJ_TYPE_REF_TOKEN (otr
);
2238 fndecl
= gimple_get_virt_method_for_binfo (tree_to_uhwi (token
),
2240 #ifdef ENABLE_CHECKING
2242 gcc_assert (possible_polymorphic_call_target_p
2243 (otr
, cgraph_get_node (fndecl
)));
2248 /* Update the jump function DST when the call graph edge corresponding to SRC is
2249 is being inlined, knowing that DST is of type ancestor and src of known
2253 combine_known_type_and_ancestor_jfs (struct ipa_jump_func
*src
,
2254 struct ipa_jump_func
*dst
)
2256 HOST_WIDE_INT combined_offset
;
2259 if (!ipa_get_jf_ancestor_type_preserved (dst
))
2261 dst
->type
= IPA_JF_UNKNOWN
;
2265 combined_offset
= ipa_get_jf_known_type_offset (src
)
2266 + ipa_get_jf_ancestor_offset (dst
);
2267 combined_type
= ipa_get_jf_ancestor_type (dst
);
2269 ipa_set_jf_known_type (dst
, combined_offset
,
2270 ipa_get_jf_known_type_base_type (src
),
2274 /* Update the jump functions associated with call graph edge E when the call
2275 graph edge CS is being inlined, assuming that E->caller is already (possibly
2276 indirectly) inlined into CS->callee and that E has not been inlined. */
2279 update_jump_functions_after_inlining (struct cgraph_edge
*cs
,
2280 struct cgraph_edge
*e
)
2282 struct ipa_edge_args
*top
= IPA_EDGE_REF (cs
);
2283 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
2284 int count
= ipa_get_cs_argument_count (args
);
2287 for (i
= 0; i
< count
; i
++)
2289 struct ipa_jump_func
*dst
= ipa_get_ith_jump_func (args
, i
);
2291 if (dst
->type
== IPA_JF_ANCESTOR
)
2293 struct ipa_jump_func
*src
;
2294 int dst_fid
= dst
->value
.ancestor
.formal_id
;
2296 /* Variable number of arguments can cause havoc if we try to access
2297 one that does not exist in the inlined edge. So make sure we
2299 if (dst_fid
>= ipa_get_cs_argument_count (top
))
2301 dst
->type
= IPA_JF_UNKNOWN
;
2305 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2308 && (dst
->value
.ancestor
.agg_preserved
|| !src
->agg
.by_ref
))
2310 struct ipa_agg_jf_item
*item
;
2313 /* Currently we do not produce clobber aggregate jump functions,
2314 replace with merging when we do. */
2315 gcc_assert (!dst
->agg
.items
);
2317 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2318 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2319 FOR_EACH_VEC_SAFE_ELT (dst
->agg
.items
, j
, item
)
2320 item
->offset
-= dst
->value
.ancestor
.offset
;
2323 if (src
->type
== IPA_JF_KNOWN_TYPE
)
2324 combine_known_type_and_ancestor_jfs (src
, dst
);
2325 else if (src
->type
== IPA_JF_PASS_THROUGH
2326 && src
->value
.pass_through
.operation
== NOP_EXPR
)
2328 dst
->value
.ancestor
.formal_id
= src
->value
.pass_through
.formal_id
;
2329 dst
->value
.ancestor
.agg_preserved
&=
2330 src
->value
.pass_through
.agg_preserved
;
2331 dst
->value
.ancestor
.type_preserved
&=
2332 src
->value
.pass_through
.type_preserved
;
2334 else if (src
->type
== IPA_JF_ANCESTOR
)
2336 dst
->value
.ancestor
.formal_id
= src
->value
.ancestor
.formal_id
;
2337 dst
->value
.ancestor
.offset
+= src
->value
.ancestor
.offset
;
2338 dst
->value
.ancestor
.agg_preserved
&=
2339 src
->value
.ancestor
.agg_preserved
;
2340 dst
->value
.ancestor
.type_preserved
&=
2341 src
->value
.ancestor
.type_preserved
;
2344 dst
->type
= IPA_JF_UNKNOWN
;
2346 else if (dst
->type
== IPA_JF_PASS_THROUGH
)
2348 struct ipa_jump_func
*src
;
2349 /* We must check range due to calls with variable number of arguments
2350 and we cannot combine jump functions with operations. */
2351 if (dst
->value
.pass_through
.operation
== NOP_EXPR
2352 && (dst
->value
.pass_through
.formal_id
2353 < ipa_get_cs_argument_count (top
)))
2355 int dst_fid
= dst
->value
.pass_through
.formal_id
;
2356 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2357 bool dst_agg_p
= ipa_get_jf_pass_through_agg_preserved (dst
);
2361 case IPA_JF_UNKNOWN
:
2362 dst
->type
= IPA_JF_UNKNOWN
;
2364 case IPA_JF_KNOWN_TYPE
:
2365 if (ipa_get_jf_pass_through_type_preserved (dst
))
2366 ipa_set_jf_known_type (dst
,
2367 ipa_get_jf_known_type_offset (src
),
2368 ipa_get_jf_known_type_base_type (src
),
2369 ipa_get_jf_known_type_base_type (src
));
2371 dst
->type
= IPA_JF_UNKNOWN
;
2374 ipa_set_jf_cst_copy (dst
, src
);
2377 case IPA_JF_PASS_THROUGH
:
2379 int formal_id
= ipa_get_jf_pass_through_formal_id (src
);
2380 enum tree_code operation
;
2381 operation
= ipa_get_jf_pass_through_operation (src
);
2383 if (operation
== NOP_EXPR
)
2387 && ipa_get_jf_pass_through_agg_preserved (src
);
2388 type_p
= ipa_get_jf_pass_through_type_preserved (src
)
2389 && ipa_get_jf_pass_through_type_preserved (dst
);
2390 ipa_set_jf_simple_pass_through (dst
, formal_id
,
2395 tree operand
= ipa_get_jf_pass_through_operand (src
);
2396 ipa_set_jf_arith_pass_through (dst
, formal_id
, operand
,
2401 case IPA_JF_ANCESTOR
:
2405 && ipa_get_jf_ancestor_agg_preserved (src
);
2406 type_p
= ipa_get_jf_ancestor_type_preserved (src
)
2407 && ipa_get_jf_pass_through_type_preserved (dst
);
2408 ipa_set_ancestor_jf (dst
,
2409 ipa_get_jf_ancestor_offset (src
),
2410 ipa_get_jf_ancestor_type (src
),
2411 ipa_get_jf_ancestor_formal_id (src
),
2420 && (dst_agg_p
|| !src
->agg
.by_ref
))
2422 /* Currently we do not produce clobber aggregate jump
2423 functions, replace with merging when we do. */
2424 gcc_assert (!dst
->agg
.items
);
2426 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2427 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2431 dst
->type
= IPA_JF_UNKNOWN
;
2436 /* If TARGET is an addr_expr of a function declaration, make it the destination
2437 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2439 struct cgraph_edge
*
2440 ipa_make_edge_direct_to_target (struct cgraph_edge
*ie
, tree target
)
2442 struct cgraph_node
*callee
;
2443 struct inline_edge_summary
*es
= inline_edge_summary (ie
);
2444 bool unreachable
= false;
2446 if (TREE_CODE (target
) == ADDR_EXPR
)
2447 target
= TREE_OPERAND (target
, 0);
2448 if (TREE_CODE (target
) != FUNCTION_DECL
)
2450 target
= canonicalize_constructor_val (target
, NULL
);
2451 if (!target
|| TREE_CODE (target
) != FUNCTION_DECL
)
2453 if (ie
->indirect_info
->member_ptr
)
2454 /* Member pointer call that goes through a VMT lookup. */
2458 fprintf (dump_file
, "ipa-prop: Discovered direct call to non-function"
2459 " in %s/%i, making it unreachable.\n",
2460 ie
->caller
->name (), ie
->caller
->order
);
2461 target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2462 callee
= cgraph_get_create_node (target
);
2466 callee
= cgraph_get_node (target
);
2469 callee
= cgraph_get_node (target
);
2471 /* Because may-edges are not explicitely represented and vtable may be external,
2472 we may create the first reference to the object in the unit. */
2473 if (!callee
|| callee
->global
.inlined_to
)
2476 /* We are better to ensure we can refer to it.
2477 In the case of static functions we are out of luck, since we already
2478 removed its body. In the case of public functions we may or may
2479 not introduce the reference. */
2480 if (!canonicalize_constructor_val (target
, NULL
)
2481 || !TREE_PUBLIC (target
))
2484 fprintf (dump_file
, "ipa-prop: Discovered call to a known target "
2485 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2486 xstrdup (ie
->caller
->name ()),
2488 xstrdup (ie
->callee
->name ()),
2492 callee
= cgraph_get_create_node (target
);
2494 ipa_check_create_node_params ();
2496 /* We can not make edges to inline clones. It is bug that someone removed
2497 the cgraph node too early. */
2498 gcc_assert (!callee
->global
.inlined_to
);
2500 if (dump_file
&& !unreachable
)
2502 fprintf (dump_file
, "ipa-prop: Discovered %s call to a known target "
2503 "(%s/%i -> %s/%i), for stmt ",
2504 ie
->indirect_info
->polymorphic
? "a virtual" : "an indirect",
2505 xstrdup (ie
->caller
->name ()),
2507 xstrdup (callee
->name ()),
2510 print_gimple_stmt (dump_file
, ie
->call_stmt
, 2, TDF_SLIM
);
2512 fprintf (dump_file
, "with uid %i\n", ie
->lto_stmt_uid
);
2514 ie
= cgraph_make_edge_direct (ie
, callee
);
2515 es
= inline_edge_summary (ie
);
2516 es
->call_stmt_size
-= (eni_size_weights
.indirect_call_cost
2517 - eni_size_weights
.call_cost
);
2518 es
->call_stmt_time
-= (eni_time_weights
.indirect_call_cost
2519 - eni_time_weights
.call_cost
);
2524 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2525 return NULL if there is not any. BY_REF specifies whether the value has to
2526 be passed by reference or by value. */
2529 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function
*agg
,
2530 HOST_WIDE_INT offset
, bool by_ref
)
2532 struct ipa_agg_jf_item
*item
;
2535 if (by_ref
!= agg
->by_ref
)
2538 FOR_EACH_VEC_SAFE_ELT (agg
->items
, i
, item
)
2539 if (item
->offset
== offset
)
2541 /* Currently we do not have clobber values, return NULL for them once
2543 gcc_checking_assert (is_gimple_ip_invariant (item
->value
));
2549 /* Remove a reference to SYMBOL from the list of references of a node given by
2550 reference description RDESC. Return true if the reference has been
2551 successfully found and removed. */
2554 remove_described_reference (symtab_node
*symbol
, struct ipa_cst_ref_desc
*rdesc
)
2556 struct ipa_ref
*to_del
;
2557 struct cgraph_edge
*origin
;
2562 to_del
= ipa_find_reference (origin
->caller
, symbol
,
2563 origin
->call_stmt
, origin
->lto_stmt_uid
);
2567 ipa_remove_reference (to_del
);
2569 fprintf (dump_file
, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2570 xstrdup (origin
->caller
->name ()),
2571 origin
->caller
->order
, xstrdup (symbol
->name ()));
2575 /* If JFUNC has a reference description with refcount different from
2576 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2577 NULL. JFUNC must be a constant jump function. */
2579 static struct ipa_cst_ref_desc
*
2580 jfunc_rdesc_usable (struct ipa_jump_func
*jfunc
)
2582 struct ipa_cst_ref_desc
*rdesc
= ipa_get_jf_constant_rdesc (jfunc
);
2583 if (rdesc
&& rdesc
->refcount
!= IPA_UNDESCRIBED_USE
)
2589 /* If the value of constant jump function JFUNC is an address of a function
2590 declaration, return the associated call graph node. Otherwise return
2593 static cgraph_node
*
2594 cgraph_node_for_jfunc (struct ipa_jump_func
*jfunc
)
2596 gcc_checking_assert (jfunc
->type
== IPA_JF_CONST
);
2597 tree cst
= ipa_get_jf_constant (jfunc
);
2598 if (TREE_CODE (cst
) != ADDR_EXPR
2599 || TREE_CODE (TREE_OPERAND (cst
, 0)) != FUNCTION_DECL
)
2602 return cgraph_get_node (TREE_OPERAND (cst
, 0));
2606 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2607 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2608 the edge specified in the rdesc. Return false if either the symbol or the
2609 reference could not be found, otherwise return true. */
2612 try_decrement_rdesc_refcount (struct ipa_jump_func
*jfunc
)
2614 struct ipa_cst_ref_desc
*rdesc
;
2615 if (jfunc
->type
== IPA_JF_CONST
2616 && (rdesc
= jfunc_rdesc_usable (jfunc
))
2617 && --rdesc
->refcount
== 0)
2619 symtab_node
*symbol
= cgraph_node_for_jfunc (jfunc
);
2623 return remove_described_reference (symbol
, rdesc
);
2628 /* Try to find a destination for indirect edge IE that corresponds to a simple
2629 call or a call of a member function pointer and where the destination is a
2630 pointer formal parameter described by jump function JFUNC. If it can be
2631 determined, return the newly direct edge, otherwise return NULL.
2632 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2634 static struct cgraph_edge
*
2635 try_make_edge_direct_simple_call (struct cgraph_edge
*ie
,
2636 struct ipa_jump_func
*jfunc
,
2637 struct ipa_node_params
*new_root_info
)
2639 struct cgraph_edge
*cs
;
2641 bool agg_contents
= ie
->indirect_info
->agg_contents
;
2643 if (ie
->indirect_info
->agg_contents
)
2644 target
= ipa_find_agg_cst_for_param (&jfunc
->agg
,
2645 ie
->indirect_info
->offset
,
2646 ie
->indirect_info
->by_ref
);
2648 target
= ipa_value_from_jfunc (new_root_info
, jfunc
);
2651 cs
= ipa_make_edge_direct_to_target (ie
, target
);
2653 if (cs
&& !agg_contents
)
2656 gcc_checking_assert (cs
->callee
2658 || jfunc
->type
!= IPA_JF_CONST
2659 || !cgraph_node_for_jfunc (jfunc
)
2660 || cs
->callee
== cgraph_node_for_jfunc (jfunc
)));
2661 ok
= try_decrement_rdesc_refcount (jfunc
);
2662 gcc_checking_assert (ok
);
2668 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2669 call based on a formal parameter which is described by jump function JFUNC
2670 and if it can be determined, make it direct and return the direct edge.
2671 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2674 static struct cgraph_edge
*
2675 try_make_edge_direct_virtual_call (struct cgraph_edge
*ie
,
2676 struct ipa_jump_func
*jfunc
,
2677 struct ipa_node_params
*new_root_info
)
2681 binfo
= ipa_value_from_jfunc (new_root_info
, jfunc
);
2686 if (TREE_CODE (binfo
) != TREE_BINFO
)
2688 binfo
= gimple_extract_devirt_binfo_from_cst
2689 (binfo
, ie
->indirect_info
->otr_type
);
2694 binfo
= get_binfo_at_offset (binfo
, ie
->indirect_info
->offset
,
2695 ie
->indirect_info
->otr_type
);
2697 target
= gimple_get_virt_method_for_binfo (ie
->indirect_info
->otr_token
,
2704 #ifdef ENABLE_CHECKING
2705 gcc_assert (possible_polymorphic_call_target_p
2706 (ie
, cgraph_get_node (target
)));
2708 return ipa_make_edge_direct_to_target (ie
, target
);
2714 /* Update the param called notes associated with NODE when CS is being inlined,
2715 assuming NODE is (potentially indirectly) inlined into CS->callee.
2716 Moreover, if the callee is discovered to be constant, create a new cgraph
2717 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2718 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2721 update_indirect_edges_after_inlining (struct cgraph_edge
*cs
,
2722 struct cgraph_node
*node
,
2723 vec
<cgraph_edge_p
> *new_edges
)
2725 struct ipa_edge_args
*top
;
2726 struct cgraph_edge
*ie
, *next_ie
, *new_direct_edge
;
2727 struct ipa_node_params
*new_root_info
;
2730 ipa_check_create_edge_args ();
2731 top
= IPA_EDGE_REF (cs
);
2732 new_root_info
= IPA_NODE_REF (cs
->caller
->global
.inlined_to
2733 ? cs
->caller
->global
.inlined_to
2736 for (ie
= node
->indirect_calls
; ie
; ie
= next_ie
)
2738 struct cgraph_indirect_call_info
*ici
= ie
->indirect_info
;
2739 struct ipa_jump_func
*jfunc
;
2742 next_ie
= ie
->next_callee
;
2744 if (ici
->param_index
== -1)
2747 /* We must check range due to calls with variable number of arguments: */
2748 if (ici
->param_index
>= ipa_get_cs_argument_count (top
))
2750 ici
->param_index
= -1;
2754 param_index
= ici
->param_index
;
2755 jfunc
= ipa_get_ith_jump_func (top
, param_index
);
2757 if (!flag_indirect_inlining
)
2758 new_direct_edge
= NULL
;
2759 else if (ici
->polymorphic
)
2760 new_direct_edge
= try_make_edge_direct_virtual_call (ie
, jfunc
,
2763 new_direct_edge
= try_make_edge_direct_simple_call (ie
, jfunc
,
2765 /* If speculation was removed, then we need to do nothing. */
2766 if (new_direct_edge
&& new_direct_edge
!= ie
)
2768 new_direct_edge
->indirect_inlining_edge
= 1;
2769 top
= IPA_EDGE_REF (cs
);
2772 else if (new_direct_edge
)
2774 new_direct_edge
->indirect_inlining_edge
= 1;
2775 if (new_direct_edge
->call_stmt
)
2776 new_direct_edge
->call_stmt_cannot_inline_p
2777 = !gimple_check_call_matching_types (
2778 new_direct_edge
->call_stmt
,
2779 new_direct_edge
->callee
->decl
, false);
2782 new_edges
->safe_push (new_direct_edge
);
2785 top
= IPA_EDGE_REF (cs
);
2787 else if (jfunc
->type
== IPA_JF_PASS_THROUGH
2788 && ipa_get_jf_pass_through_operation (jfunc
) == NOP_EXPR
)
2790 if (ici
->agg_contents
2791 && !ipa_get_jf_pass_through_agg_preserved (jfunc
))
2792 ici
->param_index
= -1;
2794 ici
->param_index
= ipa_get_jf_pass_through_formal_id (jfunc
);
2796 else if (jfunc
->type
== IPA_JF_ANCESTOR
)
2798 if (ici
->agg_contents
2799 && !ipa_get_jf_ancestor_agg_preserved (jfunc
))
2800 ici
->param_index
= -1;
2803 ici
->param_index
= ipa_get_jf_ancestor_formal_id (jfunc
);
2804 if (ipa_get_jf_ancestor_offset (jfunc
))
2805 ici
->outer_type
= NULL
;
2806 ici
->offset
+= ipa_get_jf_ancestor_offset (jfunc
);
2810 /* Either we can find a destination for this edge now or never. */
2811 ici
->param_index
= -1;
2817 /* Recursively traverse subtree of NODE (including node) made of inlined
2818 cgraph_edges when CS has been inlined and invoke
2819 update_indirect_edges_after_inlining on all nodes and
2820 update_jump_functions_after_inlining on all non-inlined edges that lead out
2821 of this subtree. Newly discovered indirect edges will be added to
2822 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2826 propagate_info_to_inlined_callees (struct cgraph_edge
*cs
,
2827 struct cgraph_node
*node
,
2828 vec
<cgraph_edge_p
> *new_edges
)
2830 struct cgraph_edge
*e
;
2833 res
= update_indirect_edges_after_inlining (cs
, node
, new_edges
);
2835 for (e
= node
->callees
; e
; e
= e
->next_callee
)
2836 if (!e
->inline_failed
)
2837 res
|= propagate_info_to_inlined_callees (cs
, e
->callee
, new_edges
);
2839 update_jump_functions_after_inlining (cs
, e
);
2840 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
2841 update_jump_functions_after_inlining (cs
, e
);
2846 /* Combine two controlled uses counts as done during inlining. */
2849 combine_controlled_uses_counters (int c
, int d
)
2851 if (c
== IPA_UNDESCRIBED_USE
|| d
== IPA_UNDESCRIBED_USE
)
2852 return IPA_UNDESCRIBED_USE
;
2857 /* Propagate number of controlled users from CS->caleee to the new root of the
2858 tree of inlined nodes. */
2861 propagate_controlled_uses (struct cgraph_edge
*cs
)
2863 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
2864 struct cgraph_node
*new_root
= cs
->caller
->global
.inlined_to
2865 ? cs
->caller
->global
.inlined_to
: cs
->caller
;
2866 struct ipa_node_params
*new_root_info
= IPA_NODE_REF (new_root
);
2867 struct ipa_node_params
*old_root_info
= IPA_NODE_REF (cs
->callee
);
2870 count
= MIN (ipa_get_cs_argument_count (args
),
2871 ipa_get_param_count (old_root_info
));
2872 for (i
= 0; i
< count
; i
++)
2874 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
2875 struct ipa_cst_ref_desc
*rdesc
;
2877 if (jf
->type
== IPA_JF_PASS_THROUGH
)
2880 src_idx
= ipa_get_jf_pass_through_formal_id (jf
);
2881 c
= ipa_get_controlled_uses (new_root_info
, src_idx
);
2882 d
= ipa_get_controlled_uses (old_root_info
, i
);
2884 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf
)
2885 == NOP_EXPR
|| c
== IPA_UNDESCRIBED_USE
);
2886 c
= combine_controlled_uses_counters (c
, d
);
2887 ipa_set_controlled_uses (new_root_info
, src_idx
, c
);
2888 if (c
== 0 && new_root_info
->ipcp_orig_node
)
2890 struct cgraph_node
*n
;
2891 struct ipa_ref
*ref
;
2892 tree t
= new_root_info
->known_vals
[src_idx
];
2894 if (t
&& TREE_CODE (t
) == ADDR_EXPR
2895 && TREE_CODE (TREE_OPERAND (t
, 0)) == FUNCTION_DECL
2896 && (n
= cgraph_get_node (TREE_OPERAND (t
, 0)))
2897 && (ref
= ipa_find_reference (new_root
,
2901 fprintf (dump_file
, "ipa-prop: Removing cloning-created "
2902 "reference from %s/%i to %s/%i.\n",
2903 xstrdup (new_root
->name ()),
2905 xstrdup (n
->name ()), n
->order
);
2906 ipa_remove_reference (ref
);
2910 else if (jf
->type
== IPA_JF_CONST
2911 && (rdesc
= jfunc_rdesc_usable (jf
)))
2913 int d
= ipa_get_controlled_uses (old_root_info
, i
);
2914 int c
= rdesc
->refcount
;
2915 rdesc
->refcount
= combine_controlled_uses_counters (c
, d
);
2916 if (rdesc
->refcount
== 0)
2918 tree cst
= ipa_get_jf_constant (jf
);
2919 struct cgraph_node
*n
;
2920 gcc_checking_assert (TREE_CODE (cst
) == ADDR_EXPR
2921 && TREE_CODE (TREE_OPERAND (cst
, 0))
2923 n
= cgraph_get_node (TREE_OPERAND (cst
, 0));
2926 struct cgraph_node
*clone
;
2928 ok
= remove_described_reference (n
, rdesc
);
2929 gcc_checking_assert (ok
);
2932 while (clone
->global
.inlined_to
2933 && clone
!= rdesc
->cs
->caller
2934 && IPA_NODE_REF (clone
)->ipcp_orig_node
)
2936 struct ipa_ref
*ref
;
2937 ref
= ipa_find_reference (clone
,
2942 fprintf (dump_file
, "ipa-prop: Removing "
2943 "cloning-created reference "
2944 "from %s/%i to %s/%i.\n",
2945 xstrdup (clone
->name ()),
2947 xstrdup (n
->name ()),
2949 ipa_remove_reference (ref
);
2951 clone
= clone
->callers
->caller
;
2958 for (i
= ipa_get_param_count (old_root_info
);
2959 i
< ipa_get_cs_argument_count (args
);
2962 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
2964 if (jf
->type
== IPA_JF_CONST
)
2966 struct ipa_cst_ref_desc
*rdesc
= jfunc_rdesc_usable (jf
);
2968 rdesc
->refcount
= IPA_UNDESCRIBED_USE
;
2970 else if (jf
->type
== IPA_JF_PASS_THROUGH
)
2971 ipa_set_controlled_uses (new_root_info
,
2972 jf
->value
.pass_through
.formal_id
,
2973 IPA_UNDESCRIBED_USE
);
2977 /* Update jump functions and call note functions on inlining the call site CS.
2978 CS is expected to lead to a node already cloned by
2979 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
2980 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2984 ipa_propagate_indirect_call_infos (struct cgraph_edge
*cs
,
2985 vec
<cgraph_edge_p
> *new_edges
)
2988 /* Do nothing if the preparation phase has not been carried out yet
2989 (i.e. during early inlining). */
2990 if (!ipa_node_params_vector
.exists ())
2992 gcc_assert (ipa_edge_args_vector
);
2994 propagate_controlled_uses (cs
);
2995 changed
= propagate_info_to_inlined_callees (cs
, cs
->callee
, new_edges
);
3000 /* Frees all dynamically allocated structures that the argument info points
3004 ipa_free_edge_args_substructures (struct ipa_edge_args
*args
)
3006 vec_free (args
->jump_functions
);
3007 memset (args
, 0, sizeof (*args
));
3010 /* Free all ipa_edge structures. */
3013 ipa_free_all_edge_args (void)
3016 struct ipa_edge_args
*args
;
3018 if (!ipa_edge_args_vector
)
3021 FOR_EACH_VEC_ELT (*ipa_edge_args_vector
, i
, args
)
3022 ipa_free_edge_args_substructures (args
);
3024 vec_free (ipa_edge_args_vector
);
3027 /* Frees all dynamically allocated structures that the param info points
3031 ipa_free_node_params_substructures (struct ipa_node_params
*info
)
3033 info
->descriptors
.release ();
3034 free (info
->lattices
);
3035 /* Lattice values and their sources are deallocated with their alocation
3037 info
->known_vals
.release ();
3038 memset (info
, 0, sizeof (*info
));
3041 /* Free all ipa_node_params structures. */
3044 ipa_free_all_node_params (void)
3047 struct ipa_node_params
*info
;
3049 FOR_EACH_VEC_ELT (ipa_node_params_vector
, i
, info
)
3050 ipa_free_node_params_substructures (info
);
3052 ipa_node_params_vector
.release ();
3055 /* Set the aggregate replacements of NODE to be AGGVALS. */
3058 ipa_set_node_agg_value_chain (struct cgraph_node
*node
,
3059 struct ipa_agg_replacement_value
*aggvals
)
3061 if (vec_safe_length (ipa_node_agg_replacements
) <= (unsigned) cgraph_max_uid
)
3062 vec_safe_grow_cleared (ipa_node_agg_replacements
, cgraph_max_uid
+ 1);
3064 (*ipa_node_agg_replacements
)[node
->uid
] = aggvals
;
3067 /* Hook that is called by cgraph.c when an edge is removed. */
3070 ipa_edge_removal_hook (struct cgraph_edge
*cs
, void *data ATTRIBUTE_UNUSED
)
3072 struct ipa_edge_args
*args
;
3074 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3075 if (vec_safe_length (ipa_edge_args_vector
) <= (unsigned)cs
->uid
)
3078 args
= IPA_EDGE_REF (cs
);
3079 if (args
->jump_functions
)
3081 struct ipa_jump_func
*jf
;
3083 FOR_EACH_VEC_ELT (*args
->jump_functions
, i
, jf
)
3085 struct ipa_cst_ref_desc
*rdesc
;
3086 try_decrement_rdesc_refcount (jf
);
3087 if (jf
->type
== IPA_JF_CONST
3088 && (rdesc
= ipa_get_jf_constant_rdesc (jf
))
3094 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs
));
3097 /* Hook that is called by cgraph.c when a node is removed. */
3100 ipa_node_removal_hook (struct cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
3102 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3103 if (ipa_node_params_vector
.length () > (unsigned)node
->uid
)
3104 ipa_free_node_params_substructures (IPA_NODE_REF (node
));
3105 if (vec_safe_length (ipa_node_agg_replacements
) > (unsigned)node
->uid
)
3106 (*ipa_node_agg_replacements
)[(unsigned)node
->uid
] = NULL
;
3109 /* Hook that is called by cgraph.c when an edge is duplicated. */
3112 ipa_edge_duplication_hook (struct cgraph_edge
*src
, struct cgraph_edge
*dst
,
3113 __attribute__((unused
)) void *data
)
3115 struct ipa_edge_args
*old_args
, *new_args
;
3118 ipa_check_create_edge_args ();
3120 old_args
= IPA_EDGE_REF (src
);
3121 new_args
= IPA_EDGE_REF (dst
);
3123 new_args
->jump_functions
= vec_safe_copy (old_args
->jump_functions
);
3125 for (i
= 0; i
< vec_safe_length (old_args
->jump_functions
); i
++)
3127 struct ipa_jump_func
*src_jf
= ipa_get_ith_jump_func (old_args
, i
);
3128 struct ipa_jump_func
*dst_jf
= ipa_get_ith_jump_func (new_args
, i
);
3130 dst_jf
->agg
.items
= vec_safe_copy (dst_jf
->agg
.items
);
3132 if (src_jf
->type
== IPA_JF_CONST
)
3134 struct ipa_cst_ref_desc
*src_rdesc
= jfunc_rdesc_usable (src_jf
);
3137 dst_jf
->value
.constant
.rdesc
= NULL
;
3138 else if (src
->caller
== dst
->caller
)
3140 struct ipa_ref
*ref
;
3141 symtab_node
*n
= cgraph_node_for_jfunc (src_jf
);
3142 gcc_checking_assert (n
);
3143 ref
= ipa_find_reference (src
->caller
, n
,
3144 src
->call_stmt
, src
->lto_stmt_uid
);
3145 gcc_checking_assert (ref
);
3146 ipa_clone_ref (ref
, dst
->caller
, ref
->stmt
);
3148 gcc_checking_assert (ipa_refdesc_pool
);
3149 struct ipa_cst_ref_desc
*dst_rdesc
3150 = (struct ipa_cst_ref_desc
*) pool_alloc (ipa_refdesc_pool
);
3151 dst_rdesc
->cs
= dst
;
3152 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3153 dst_rdesc
->next_duplicate
= NULL
;
3154 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3156 else if (src_rdesc
->cs
== src
)
3158 struct ipa_cst_ref_desc
*dst_rdesc
;
3159 gcc_checking_assert (ipa_refdesc_pool
);
3161 = (struct ipa_cst_ref_desc
*) pool_alloc (ipa_refdesc_pool
);
3162 dst_rdesc
->cs
= dst
;
3163 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3164 dst_rdesc
->next_duplicate
= src_rdesc
->next_duplicate
;
3165 src_rdesc
->next_duplicate
= dst_rdesc
;
3166 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3170 struct ipa_cst_ref_desc
*dst_rdesc
;
3171 /* This can happen during inlining, when a JFUNC can refer to a
3172 reference taken in a function up in the tree of inline clones.
3173 We need to find the duplicate that refers to our tree of
3176 gcc_assert (dst
->caller
->global
.inlined_to
);
3177 for (dst_rdesc
= src_rdesc
->next_duplicate
;
3179 dst_rdesc
= dst_rdesc
->next_duplicate
)
3181 struct cgraph_node
*top
;
3182 top
= dst_rdesc
->cs
->caller
->global
.inlined_to
3183 ? dst_rdesc
->cs
->caller
->global
.inlined_to
3184 : dst_rdesc
->cs
->caller
;
3185 if (dst
->caller
->global
.inlined_to
== top
)
3188 gcc_assert (dst_rdesc
);
3189 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3195 /* Hook that is called by cgraph.c when a node is duplicated. */
3198 ipa_node_duplication_hook (struct cgraph_node
*src
, struct cgraph_node
*dst
,
3199 ATTRIBUTE_UNUSED
void *data
)
3201 struct ipa_node_params
*old_info
, *new_info
;
3202 struct ipa_agg_replacement_value
*old_av
, *new_av
;
3204 ipa_check_create_node_params ();
3205 old_info
= IPA_NODE_REF (src
);
3206 new_info
= IPA_NODE_REF (dst
);
3208 new_info
->descriptors
= old_info
->descriptors
.copy ();
3209 new_info
->lattices
= NULL
;
3210 new_info
->ipcp_orig_node
= old_info
->ipcp_orig_node
;
3212 new_info
->uses_analysis_done
= old_info
->uses_analysis_done
;
3213 new_info
->node_enqueued
= old_info
->node_enqueued
;
3215 old_av
= ipa_get_agg_replacements_for_node (src
);
3222 struct ipa_agg_replacement_value
*v
;
3224 v
= ggc_alloc_ipa_agg_replacement_value ();
3225 memcpy (v
, old_av
, sizeof (*v
));
3228 old_av
= old_av
->next
;
3230 ipa_set_node_agg_value_chain (dst
, new_av
);
3234 /* Analyze newly added function into callgraph. */
3237 ipa_add_new_function (struct cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
3239 if (cgraph_function_with_gimple_body_p (node
))
3240 ipa_analyze_node (node
);
3243 /* Register our cgraph hooks if they are not already there. */
3246 ipa_register_cgraph_hooks (void)
3248 if (!edge_removal_hook_holder
)
3249 edge_removal_hook_holder
=
3250 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook
, NULL
);
3251 if (!node_removal_hook_holder
)
3252 node_removal_hook_holder
=
3253 cgraph_add_node_removal_hook (&ipa_node_removal_hook
, NULL
);
3254 if (!edge_duplication_hook_holder
)
3255 edge_duplication_hook_holder
=
3256 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook
, NULL
);
3257 if (!node_duplication_hook_holder
)
3258 node_duplication_hook_holder
=
3259 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook
, NULL
);
3260 function_insertion_hook_holder
=
3261 cgraph_add_function_insertion_hook (&ipa_add_new_function
, NULL
);
3264 /* Unregister our cgraph hooks if they are not already there. */
3267 ipa_unregister_cgraph_hooks (void)
3269 cgraph_remove_edge_removal_hook (edge_removal_hook_holder
);
3270 edge_removal_hook_holder
= NULL
;
3271 cgraph_remove_node_removal_hook (node_removal_hook_holder
);
3272 node_removal_hook_holder
= NULL
;
3273 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder
);
3274 edge_duplication_hook_holder
= NULL
;
3275 cgraph_remove_node_duplication_hook (node_duplication_hook_holder
);
3276 node_duplication_hook_holder
= NULL
;
3277 cgraph_remove_function_insertion_hook (function_insertion_hook_holder
);
3278 function_insertion_hook_holder
= NULL
;
3281 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3282 longer needed after ipa-cp. */
3285 ipa_free_all_structures_after_ipa_cp (void)
3289 ipa_free_all_edge_args ();
3290 ipa_free_all_node_params ();
3291 free_alloc_pool (ipcp_sources_pool
);
3292 free_alloc_pool (ipcp_values_pool
);
3293 free_alloc_pool (ipcp_agg_lattice_pool
);
3294 ipa_unregister_cgraph_hooks ();
3295 if (ipa_refdesc_pool
)
3296 free_alloc_pool (ipa_refdesc_pool
);
3300 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3301 longer needed after indirect inlining. */
3304 ipa_free_all_structures_after_iinln (void)
3306 ipa_free_all_edge_args ();
3307 ipa_free_all_node_params ();
3308 ipa_unregister_cgraph_hooks ();
3309 if (ipcp_sources_pool
)
3310 free_alloc_pool (ipcp_sources_pool
);
3311 if (ipcp_values_pool
)
3312 free_alloc_pool (ipcp_values_pool
);
3313 if (ipcp_agg_lattice_pool
)
3314 free_alloc_pool (ipcp_agg_lattice_pool
);
3315 if (ipa_refdesc_pool
)
3316 free_alloc_pool (ipa_refdesc_pool
);
3319 /* Print ipa_tree_map data structures of all functions in the
3323 ipa_print_node_params (FILE *f
, struct cgraph_node
*node
)
3326 struct ipa_node_params
*info
;
3328 if (!node
->definition
)
3330 info
= IPA_NODE_REF (node
);
3331 fprintf (f
, " function %s/%i parameter descriptors:\n",
3332 node
->name (), node
->order
);
3333 count
= ipa_get_param_count (info
);
3334 for (i
= 0; i
< count
; i
++)
3339 ipa_dump_param (f
, info
, i
);
3340 if (ipa_is_param_used (info
, i
))
3341 fprintf (f
, " used");
3342 c
= ipa_get_controlled_uses (info
, i
);
3343 if (c
== IPA_UNDESCRIBED_USE
)
3344 fprintf (f
, " undescribed_use");
3346 fprintf (f
, " controlled_uses=%i", c
);
3351 /* Print ipa_tree_map data structures of all functions in the
3355 ipa_print_all_params (FILE * f
)
3357 struct cgraph_node
*node
;
3359 fprintf (f
, "\nFunction parameters:\n");
3360 FOR_EACH_FUNCTION (node
)
3361 ipa_print_node_params (f
, node
);
3364 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3367 ipa_get_vector_of_formal_parms (tree fndecl
)
3373 gcc_assert (!flag_wpa
);
3374 count
= count_formal_params (fndecl
);
3375 args
.create (count
);
3376 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
3377 args
.quick_push (parm
);
3382 /* Return a heap allocated vector containing types of formal parameters of
3383 function type FNTYPE. */
3386 ipa_get_vector_of_formal_parm_types (tree fntype
)
3392 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
3395 types
.create (count
);
3396 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
3397 types
.quick_push (TREE_VALUE (t
));
3402 /* Modify the function declaration FNDECL and its type according to the plan in
3403 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3404 to reflect the actual parameters being modified which are determined by the
3405 base_index field. */
3408 ipa_modify_formal_parameters (tree fndecl
, ipa_parm_adjustment_vec adjustments
)
3410 vec
<tree
> oparms
= ipa_get_vector_of_formal_parms (fndecl
);
3411 tree orig_type
= TREE_TYPE (fndecl
);
3412 tree old_arg_types
= TYPE_ARG_TYPES (orig_type
);
3414 /* The following test is an ugly hack, some functions simply don't have any
3415 arguments in their type. This is probably a bug but well... */
3416 bool care_for_types
= (old_arg_types
!= NULL_TREE
);
3417 bool last_parm_void
;
3421 last_parm_void
= (TREE_VALUE (tree_last (old_arg_types
))
3423 otypes
= ipa_get_vector_of_formal_parm_types (orig_type
);
3425 gcc_assert (oparms
.length () + 1 == otypes
.length ());
3427 gcc_assert (oparms
.length () == otypes
.length ());
3431 last_parm_void
= false;
3435 int len
= adjustments
.length ();
3436 tree
*link
= &DECL_ARGUMENTS (fndecl
);
3437 tree new_arg_types
= NULL
;
3438 for (int i
= 0; i
< len
; i
++)
3440 struct ipa_parm_adjustment
*adj
;
3443 adj
= &adjustments
[i
];
3445 if (adj
->op
== IPA_PARM_OP_NEW
)
3448 parm
= oparms
[adj
->base_index
];
3451 if (adj
->op
== IPA_PARM_OP_COPY
)
3454 new_arg_types
= tree_cons (NULL_TREE
, otypes
[adj
->base_index
],
3457 link
= &DECL_CHAIN (parm
);
3459 else if (adj
->op
!= IPA_PARM_OP_REMOVE
)
3465 ptype
= build_pointer_type (adj
->type
);
3469 if (is_gimple_reg_type (ptype
))
3471 unsigned malign
= GET_MODE_ALIGNMENT (TYPE_MODE (ptype
));
3472 if (TYPE_ALIGN (ptype
) < malign
)
3473 ptype
= build_aligned_type (ptype
, malign
);
3478 new_arg_types
= tree_cons (NULL_TREE
, ptype
, new_arg_types
);
3480 new_parm
= build_decl (UNKNOWN_LOCATION
, PARM_DECL
, NULL_TREE
,
3482 const char *prefix
= adj
->arg_prefix
? adj
->arg_prefix
: "SYNTH";
3483 DECL_NAME (new_parm
) = create_tmp_var_name (prefix
);
3484 DECL_ARTIFICIAL (new_parm
) = 1;
3485 DECL_ARG_TYPE (new_parm
) = ptype
;
3486 DECL_CONTEXT (new_parm
) = fndecl
;
3487 TREE_USED (new_parm
) = 1;
3488 DECL_IGNORED_P (new_parm
) = 1;
3489 layout_decl (new_parm
, 0);
3491 if (adj
->op
== IPA_PARM_OP_NEW
)
3495 adj
->new_decl
= new_parm
;
3498 link
= &DECL_CHAIN (new_parm
);
3504 tree new_reversed
= NULL
;
3507 new_reversed
= nreverse (new_arg_types
);
3511 TREE_CHAIN (new_arg_types
) = void_list_node
;
3513 new_reversed
= void_list_node
;
3517 /* Use copy_node to preserve as much as possible from original type
3518 (debug info, attribute lists etc.)
3519 Exception is METHOD_TYPEs must have THIS argument.
3520 When we are asked to remove it, we need to build new FUNCTION_TYPE
3522 tree new_type
= NULL
;
3523 if (TREE_CODE (orig_type
) != METHOD_TYPE
3524 || (adjustments
[0].op
== IPA_PARM_OP_COPY
3525 && adjustments
[0].base_index
== 0))
3527 new_type
= build_distinct_type_copy (orig_type
);
3528 TYPE_ARG_TYPES (new_type
) = new_reversed
;
3533 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type
),
3535 TYPE_CONTEXT (new_type
) = TYPE_CONTEXT (orig_type
);
3536 DECL_VINDEX (fndecl
) = NULL_TREE
;
3539 /* When signature changes, we need to clear builtin info. */
3540 if (DECL_BUILT_IN (fndecl
))
3542 DECL_BUILT_IN_CLASS (fndecl
) = NOT_BUILT_IN
;
3543 DECL_FUNCTION_CODE (fndecl
) = (enum built_in_function
) 0;
3546 /* This is a new type, not a copy of an old type. Need to reassociate
3547 variants. We can handle everything except the main variant lazily. */
3548 tree t
= TYPE_MAIN_VARIANT (orig_type
);
3551 TYPE_MAIN_VARIANT (new_type
) = t
;
3552 TYPE_NEXT_VARIANT (new_type
) = TYPE_NEXT_VARIANT (t
);
3553 TYPE_NEXT_VARIANT (t
) = new_type
;
3557 TYPE_MAIN_VARIANT (new_type
) = new_type
;
3558 TYPE_NEXT_VARIANT (new_type
) = NULL
;
3561 TREE_TYPE (fndecl
) = new_type
;
3562 DECL_VIRTUAL_P (fndecl
) = 0;
3567 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3568 If this is a directly recursive call, CS must be NULL. Otherwise it must
3569 contain the corresponding call graph edge. */
3572 ipa_modify_call_arguments (struct cgraph_edge
*cs
, gimple stmt
,
3573 ipa_parm_adjustment_vec adjustments
)
3575 struct cgraph_node
*current_node
= cgraph_get_node (current_function_decl
);
3577 vec
<tree
, va_gc
> **debug_args
= NULL
;
3579 gimple_stmt_iterator gsi
, prev_gsi
;
3583 len
= adjustments
.length ();
3585 callee_decl
= !cs
? gimple_call_fndecl (stmt
) : cs
->callee
->decl
;
3586 ipa_remove_stmt_references (current_node
, stmt
);
3588 gsi
= gsi_for_stmt (stmt
);
3590 gsi_prev (&prev_gsi
);
3591 for (i
= 0; i
< len
; i
++)
3593 struct ipa_parm_adjustment
*adj
;
3595 adj
= &adjustments
[i
];
3597 if (adj
->op
== IPA_PARM_OP_COPY
)
3599 tree arg
= gimple_call_arg (stmt
, adj
->base_index
);
3601 vargs
.quick_push (arg
);
3603 else if (adj
->op
!= IPA_PARM_OP_REMOVE
)
3605 tree expr
, base
, off
;
3607 unsigned int deref_align
= 0;
3608 bool deref_base
= false;
3610 /* We create a new parameter out of the value of the old one, we can
3611 do the following kind of transformations:
3613 - A scalar passed by reference is converted to a scalar passed by
3614 value. (adj->by_ref is false and the type of the original
3615 actual argument is a pointer to a scalar).
3617 - A part of an aggregate is passed instead of the whole aggregate.
3618 The part can be passed either by value or by reference, this is
3619 determined by value of adj->by_ref. Moreover, the code below
3620 handles both situations when the original aggregate is passed by
3621 value (its type is not a pointer) and when it is passed by
3622 reference (it is a pointer to an aggregate).
3624 When the new argument is passed by reference (adj->by_ref is true)
3625 it must be a part of an aggregate and therefore we form it by
3626 simply taking the address of a reference inside the original
3629 gcc_checking_assert (adj
->offset
% BITS_PER_UNIT
== 0);
3630 base
= gimple_call_arg (stmt
, adj
->base_index
);
3631 loc
= DECL_P (base
) ? DECL_SOURCE_LOCATION (base
)
3632 : EXPR_LOCATION (base
);
3634 if (TREE_CODE (base
) != ADDR_EXPR
3635 && POINTER_TYPE_P (TREE_TYPE (base
)))
3636 off
= build_int_cst (adj
->alias_ptr_type
,
3637 adj
->offset
/ BITS_PER_UNIT
);
3640 HOST_WIDE_INT base_offset
;
3644 if (TREE_CODE (base
) == ADDR_EXPR
)
3646 base
= TREE_OPERAND (base
, 0);
3652 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
3653 /* Aggregate arguments can have non-invariant addresses. */
3656 base
= build_fold_addr_expr (prev_base
);
3657 off
= build_int_cst (adj
->alias_ptr_type
,
3658 adj
->offset
/ BITS_PER_UNIT
);
3660 else if (TREE_CODE (base
) == MEM_REF
)
3665 deref_align
= TYPE_ALIGN (TREE_TYPE (base
));
3667 off
= build_int_cst (adj
->alias_ptr_type
,
3669 + adj
->offset
/ BITS_PER_UNIT
);
3670 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1),
3672 base
= TREE_OPERAND (base
, 0);
3676 off
= build_int_cst (adj
->alias_ptr_type
,
3678 + adj
->offset
/ BITS_PER_UNIT
);
3679 base
= build_fold_addr_expr (base
);
3685 tree type
= adj
->type
;
3687 unsigned HOST_WIDE_INT misalign
;
3691 align
= deref_align
;
3696 get_pointer_alignment_1 (base
, &align
, &misalign
);
3697 if (TYPE_ALIGN (type
) > align
)
3698 align
= TYPE_ALIGN (type
);
3700 misalign
+= (tree_to_double_int (off
)
3701 .sext (TYPE_PRECISION (TREE_TYPE (off
))).low
3703 misalign
= misalign
& (align
- 1);
3705 align
= (misalign
& -misalign
);
3706 if (align
< TYPE_ALIGN (type
))
3707 type
= build_aligned_type (type
, align
);
3708 expr
= fold_build2_loc (loc
, MEM_REF
, type
, base
, off
);
3712 expr
= fold_build2_loc (loc
, MEM_REF
, adj
->type
, base
, off
);
3713 expr
= build_fold_addr_expr (expr
);
3716 expr
= force_gimple_operand_gsi (&gsi
, expr
,
3718 || is_gimple_reg_type (adj
->type
),
3719 NULL
, true, GSI_SAME_STMT
);
3720 vargs
.quick_push (expr
);
3722 if (adj
->op
!= IPA_PARM_OP_COPY
&& MAY_HAVE_DEBUG_STMTS
)
3725 tree ddecl
= NULL_TREE
, origin
= DECL_ORIGIN (adj
->base
), arg
;
3728 arg
= gimple_call_arg (stmt
, adj
->base_index
);
3729 if (!useless_type_conversion_p (TREE_TYPE (origin
), TREE_TYPE (arg
)))
3731 if (!fold_convertible_p (TREE_TYPE (origin
), arg
))
3733 arg
= fold_convert_loc (gimple_location (stmt
),
3734 TREE_TYPE (origin
), arg
);
3736 if (debug_args
== NULL
)
3737 debug_args
= decl_debug_args_insert (callee_decl
);
3738 for (ix
= 0; vec_safe_iterate (*debug_args
, ix
, &ddecl
); ix
+= 2)
3739 if (ddecl
== origin
)
3741 ddecl
= (**debug_args
)[ix
+ 1];
3746 ddecl
= make_node (DEBUG_EXPR_DECL
);
3747 DECL_ARTIFICIAL (ddecl
) = 1;
3748 TREE_TYPE (ddecl
) = TREE_TYPE (origin
);
3749 DECL_MODE (ddecl
) = DECL_MODE (origin
);
3751 vec_safe_push (*debug_args
, origin
);
3752 vec_safe_push (*debug_args
, ddecl
);
3754 def_temp
= gimple_build_debug_bind (ddecl
, unshare_expr (arg
), stmt
);
3755 gsi_insert_before (&gsi
, def_temp
, GSI_SAME_STMT
);
3759 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3761 fprintf (dump_file
, "replacing stmt:");
3762 print_gimple_stmt (dump_file
, gsi_stmt (gsi
), 0, 0);
3765 new_stmt
= gimple_build_call_vec (callee_decl
, vargs
);
3767 if (gimple_call_lhs (stmt
))
3768 gimple_call_set_lhs (new_stmt
, gimple_call_lhs (stmt
));
3770 gimple_set_block (new_stmt
, gimple_block (stmt
));
3771 if (gimple_has_location (stmt
))
3772 gimple_set_location (new_stmt
, gimple_location (stmt
));
3773 gimple_call_set_chain (new_stmt
, gimple_call_chain (stmt
));
3774 gimple_call_copy_flags (new_stmt
, stmt
);
3776 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3778 fprintf (dump_file
, "with stmt:");
3779 print_gimple_stmt (dump_file
, new_stmt
, 0, 0);
3780 fprintf (dump_file
, "\n");
3782 gsi_replace (&gsi
, new_stmt
, true);
3784 cgraph_set_call_stmt (cs
, new_stmt
);
3787 ipa_record_stmt_references (current_node
, gsi_stmt (gsi
));
3790 while ((gsi_end_p (prev_gsi
) && !gsi_end_p (gsi
))
3791 || (!gsi_end_p (prev_gsi
) && gsi_stmt (gsi
) == gsi_stmt (prev_gsi
)));
3793 update_ssa (TODO_update_ssa
);
3794 free_dominance_info (CDI_DOMINATORS
);
3797 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
3798 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
3799 specifies whether the function should care about type incompatibility the
3800 current and new expressions. If it is false, the function will leave
3801 incompatibility issues to the caller. Return true iff the expression
3805 ipa_modify_expr (tree
*expr
, bool convert
,
3806 ipa_parm_adjustment_vec adjustments
)
3808 struct ipa_parm_adjustment
*cand
3809 = ipa_get_adjustment_candidate (&expr
, &convert
, adjustments
, false);
3815 src
= build_simple_mem_ref (cand
->new_decl
);
3817 src
= cand
->new_decl
;
3819 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3821 fprintf (dump_file
, "About to replace expr ");
3822 print_generic_expr (dump_file
, *expr
, 0);
3823 fprintf (dump_file
, " with ");
3824 print_generic_expr (dump_file
, src
, 0);
3825 fprintf (dump_file
, "\n");
3828 if (convert
&& !useless_type_conversion_p (TREE_TYPE (*expr
), cand
->type
))
3830 tree vce
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (*expr
), src
);
3838 /* If T is an SSA_NAME, return NULL if it is not a default def or
3839 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
3840 the base variable is always returned, regardless if it is a default
3841 def. Return T if it is not an SSA_NAME. */
3844 get_ssa_base_param (tree t
, bool ignore_default_def
)
3846 if (TREE_CODE (t
) == SSA_NAME
)
3848 if (ignore_default_def
|| SSA_NAME_IS_DEFAULT_DEF (t
))
3849 return SSA_NAME_VAR (t
);
3856 /* Given an expression, return an adjustment entry specifying the
3857 transformation to be done on EXPR. If no suitable adjustment entry
3858 was found, returns NULL.
3860 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
3861 default def, otherwise bail on them.
3863 If CONVERT is non-NULL, this function will set *CONVERT if the
3864 expression provided is a component reference. ADJUSTMENTS is the
3865 adjustments vector. */
3867 ipa_parm_adjustment
*
3868 ipa_get_adjustment_candidate (tree
**expr
, bool *convert
,
3869 ipa_parm_adjustment_vec adjustments
,
3870 bool ignore_default_def
)
3872 if (TREE_CODE (**expr
) == BIT_FIELD_REF
3873 || TREE_CODE (**expr
) == IMAGPART_EXPR
3874 || TREE_CODE (**expr
) == REALPART_EXPR
)
3876 *expr
= &TREE_OPERAND (**expr
, 0);
3881 HOST_WIDE_INT offset
, size
, max_size
;
3882 tree base
= get_ref_base_and_extent (**expr
, &offset
, &size
, &max_size
);
3883 if (!base
|| size
== -1 || max_size
== -1)
3886 if (TREE_CODE (base
) == MEM_REF
)
3888 offset
+= mem_ref_offset (base
).low
* BITS_PER_UNIT
;
3889 base
= TREE_OPERAND (base
, 0);
3892 base
= get_ssa_base_param (base
, ignore_default_def
);
3893 if (!base
|| TREE_CODE (base
) != PARM_DECL
)
3896 struct ipa_parm_adjustment
*cand
= NULL
;
3897 unsigned int len
= adjustments
.length ();
3898 for (unsigned i
= 0; i
< len
; i
++)
3900 struct ipa_parm_adjustment
*adj
= &adjustments
[i
];
3902 if (adj
->base
== base
3903 && (adj
->offset
== offset
|| adj
->op
== IPA_PARM_OP_REMOVE
))
3910 if (!cand
|| cand
->op
== IPA_PARM_OP_COPY
|| cand
->op
== IPA_PARM_OP_REMOVE
)
3915 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
3918 index_in_adjustments_multiple_times_p (int base_index
,
3919 ipa_parm_adjustment_vec adjustments
)
3921 int i
, len
= adjustments
.length ();
3924 for (i
= 0; i
< len
; i
++)
3926 struct ipa_parm_adjustment
*adj
;
3927 adj
= &adjustments
[i
];
3929 if (adj
->base_index
== base_index
)
3941 /* Return adjustments that should have the same effect on function parameters
3942 and call arguments as if they were first changed according to adjustments in
3943 INNER and then by adjustments in OUTER. */
3945 ipa_parm_adjustment_vec
3946 ipa_combine_adjustments (ipa_parm_adjustment_vec inner
,
3947 ipa_parm_adjustment_vec outer
)
3949 int i
, outlen
= outer
.length ();
3950 int inlen
= inner
.length ();
3952 ipa_parm_adjustment_vec adjustments
, tmp
;
3955 for (i
= 0; i
< inlen
; i
++)
3957 struct ipa_parm_adjustment
*n
;
3960 if (n
->op
== IPA_PARM_OP_REMOVE
)
3964 /* FIXME: Handling of new arguments are not implemented yet. */
3965 gcc_assert (n
->op
!= IPA_PARM_OP_NEW
);
3966 tmp
.quick_push (*n
);
3970 adjustments
.create (outlen
+ removals
);
3971 for (i
= 0; i
< outlen
; i
++)
3973 struct ipa_parm_adjustment r
;
3974 struct ipa_parm_adjustment
*out
= &outer
[i
];
3975 struct ipa_parm_adjustment
*in
= &tmp
[out
->base_index
];
3977 memset (&r
, 0, sizeof (r
));
3978 gcc_assert (in
->op
!= IPA_PARM_OP_REMOVE
);
3979 if (out
->op
== IPA_PARM_OP_REMOVE
)
3981 if (!index_in_adjustments_multiple_times_p (in
->base_index
, tmp
))
3983 r
.op
= IPA_PARM_OP_REMOVE
;
3984 adjustments
.quick_push (r
);
3990 /* FIXME: Handling of new arguments are not implemented yet. */
3991 gcc_assert (out
->op
!= IPA_PARM_OP_NEW
);
3994 r
.base_index
= in
->base_index
;
3997 /* FIXME: Create nonlocal value too. */
3999 if (in
->op
== IPA_PARM_OP_COPY
&& out
->op
== IPA_PARM_OP_COPY
)
4000 r
.op
= IPA_PARM_OP_COPY
;
4001 else if (in
->op
== IPA_PARM_OP_COPY
)
4002 r
.offset
= out
->offset
;
4003 else if (out
->op
== IPA_PARM_OP_COPY
)
4004 r
.offset
= in
->offset
;
4006 r
.offset
= in
->offset
+ out
->offset
;
4007 adjustments
.quick_push (r
);
4010 for (i
= 0; i
< inlen
; i
++)
4012 struct ipa_parm_adjustment
*n
= &inner
[i
];
4014 if (n
->op
== IPA_PARM_OP_REMOVE
)
4015 adjustments
.quick_push (*n
);
4022 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4023 friendly way, assuming they are meant to be applied to FNDECL. */
4026 ipa_dump_param_adjustments (FILE *file
, ipa_parm_adjustment_vec adjustments
,
4029 int i
, len
= adjustments
.length ();
4031 vec
<tree
> parms
= ipa_get_vector_of_formal_parms (fndecl
);
4033 fprintf (file
, "IPA param adjustments: ");
4034 for (i
= 0; i
< len
; i
++)
4036 struct ipa_parm_adjustment
*adj
;
4037 adj
= &adjustments
[i
];
4040 fprintf (file
, " ");
4044 fprintf (file
, "%i. base_index: %i - ", i
, adj
->base_index
);
4045 print_generic_expr (file
, parms
[adj
->base_index
], 0);
4048 fprintf (file
, ", base: ");
4049 print_generic_expr (file
, adj
->base
, 0);
4053 fprintf (file
, ", new_decl: ");
4054 print_generic_expr (file
, adj
->new_decl
, 0);
4056 if (adj
->new_ssa_base
)
4058 fprintf (file
, ", new_ssa_base: ");
4059 print_generic_expr (file
, adj
->new_ssa_base
, 0);
4062 if (adj
->op
== IPA_PARM_OP_COPY
)
4063 fprintf (file
, ", copy_param");
4064 else if (adj
->op
== IPA_PARM_OP_REMOVE
)
4065 fprintf (file
, ", remove_param");
4067 fprintf (file
, ", offset %li", (long) adj
->offset
);
4069 fprintf (file
, ", by_ref");
4070 print_node_brief (file
, ", type: ", adj
->type
, 0);
4071 fprintf (file
, "\n");
4076 /* Dump the AV linked list. */
4079 ipa_dump_agg_replacement_values (FILE *f
, struct ipa_agg_replacement_value
*av
)
4082 fprintf (f
, " Aggregate replacements:");
4083 for (; av
; av
= av
->next
)
4085 fprintf (f
, "%s %i[" HOST_WIDE_INT_PRINT_DEC
"]=", comma
? "," : "",
4086 av
->index
, av
->offset
);
4087 print_generic_expr (f
, av
->value
, 0);
4093 /* Stream out jump function JUMP_FUNC to OB. */
4096 ipa_write_jump_function (struct output_block
*ob
,
4097 struct ipa_jump_func
*jump_func
)
4099 struct ipa_agg_jf_item
*item
;
4100 struct bitpack_d bp
;
4103 streamer_write_uhwi (ob
, jump_func
->type
);
4104 switch (jump_func
->type
)
4106 case IPA_JF_UNKNOWN
:
4108 case IPA_JF_KNOWN_TYPE
:
4109 streamer_write_uhwi (ob
, jump_func
->value
.known_type
.offset
);
4110 stream_write_tree (ob
, jump_func
->value
.known_type
.base_type
, true);
4111 stream_write_tree (ob
, jump_func
->value
.known_type
.component_type
, true);
4115 EXPR_LOCATION (jump_func
->value
.constant
.value
) == UNKNOWN_LOCATION
);
4116 stream_write_tree (ob
, jump_func
->value
.constant
.value
, true);
4118 case IPA_JF_PASS_THROUGH
:
4119 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.operation
);
4120 if (jump_func
->value
.pass_through
.operation
== NOP_EXPR
)
4122 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4123 bp
= bitpack_create (ob
->main_stream
);
4124 bp_pack_value (&bp
, jump_func
->value
.pass_through
.agg_preserved
, 1);
4125 bp_pack_value (&bp
, jump_func
->value
.pass_through
.type_preserved
, 1);
4126 streamer_write_bitpack (&bp
);
4130 stream_write_tree (ob
, jump_func
->value
.pass_through
.operand
, true);
4131 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4134 case IPA_JF_ANCESTOR
:
4135 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.offset
);
4136 stream_write_tree (ob
, jump_func
->value
.ancestor
.type
, true);
4137 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.formal_id
);
4138 bp
= bitpack_create (ob
->main_stream
);
4139 bp_pack_value (&bp
, jump_func
->value
.ancestor
.agg_preserved
, 1);
4140 bp_pack_value (&bp
, jump_func
->value
.ancestor
.type_preserved
, 1);
4141 streamer_write_bitpack (&bp
);
4145 count
= vec_safe_length (jump_func
->agg
.items
);
4146 streamer_write_uhwi (ob
, count
);
4149 bp
= bitpack_create (ob
->main_stream
);
4150 bp_pack_value (&bp
, jump_func
->agg
.by_ref
, 1);
4151 streamer_write_bitpack (&bp
);
4154 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, i
, item
)
4156 streamer_write_uhwi (ob
, item
->offset
);
4157 stream_write_tree (ob
, item
->value
, true);
4161 /* Read in jump function JUMP_FUNC from IB. */
4164 ipa_read_jump_function (struct lto_input_block
*ib
,
4165 struct ipa_jump_func
*jump_func
,
4166 struct cgraph_edge
*cs
,
4167 struct data_in
*data_in
)
4169 enum jump_func_type jftype
;
4170 enum tree_code operation
;
4173 jftype
= (enum jump_func_type
) streamer_read_uhwi (ib
);
4176 case IPA_JF_UNKNOWN
:
4177 jump_func
->type
= IPA_JF_UNKNOWN
;
4179 case IPA_JF_KNOWN_TYPE
:
4181 HOST_WIDE_INT offset
= streamer_read_uhwi (ib
);
4182 tree base_type
= stream_read_tree (ib
, data_in
);
4183 tree component_type
= stream_read_tree (ib
, data_in
);
4185 ipa_set_jf_known_type (jump_func
, offset
, base_type
, component_type
);
4189 ipa_set_jf_constant (jump_func
, stream_read_tree (ib
, data_in
), cs
);
4191 case IPA_JF_PASS_THROUGH
:
4192 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
4193 if (operation
== NOP_EXPR
)
4195 int formal_id
= streamer_read_uhwi (ib
);
4196 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4197 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4198 bool type_preserved
= bp_unpack_value (&bp
, 1);
4199 ipa_set_jf_simple_pass_through (jump_func
, formal_id
, agg_preserved
,
4204 tree operand
= stream_read_tree (ib
, data_in
);
4205 int formal_id
= streamer_read_uhwi (ib
);
4206 ipa_set_jf_arith_pass_through (jump_func
, formal_id
, operand
,
4210 case IPA_JF_ANCESTOR
:
4212 HOST_WIDE_INT offset
= streamer_read_uhwi (ib
);
4213 tree type
= stream_read_tree (ib
, data_in
);
4214 int formal_id
= streamer_read_uhwi (ib
);
4215 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4216 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4217 bool type_preserved
= bp_unpack_value (&bp
, 1);
4219 ipa_set_ancestor_jf (jump_func
, offset
, type
, formal_id
, agg_preserved
,
4225 count
= streamer_read_uhwi (ib
);
4226 vec_alloc (jump_func
->agg
.items
, count
);
4229 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4230 jump_func
->agg
.by_ref
= bp_unpack_value (&bp
, 1);
4232 for (i
= 0; i
< count
; i
++)
4234 struct ipa_agg_jf_item item
;
4235 item
.offset
= streamer_read_uhwi (ib
);
4236 item
.value
= stream_read_tree (ib
, data_in
);
4237 jump_func
->agg
.items
->quick_push (item
);
4241 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4242 relevant to indirect inlining to OB. */
4245 ipa_write_indirect_edge_info (struct output_block
*ob
,
4246 struct cgraph_edge
*cs
)
4248 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4249 struct bitpack_d bp
;
4251 streamer_write_hwi (ob
, ii
->param_index
);
4252 streamer_write_hwi (ob
, ii
->offset
);
4253 bp
= bitpack_create (ob
->main_stream
);
4254 bp_pack_value (&bp
, ii
->polymorphic
, 1);
4255 bp_pack_value (&bp
, ii
->agg_contents
, 1);
4256 bp_pack_value (&bp
, ii
->member_ptr
, 1);
4257 bp_pack_value (&bp
, ii
->by_ref
, 1);
4258 bp_pack_value (&bp
, ii
->maybe_in_construction
, 1);
4259 bp_pack_value (&bp
, ii
->maybe_derived_type
, 1);
4260 streamer_write_bitpack (&bp
);
4262 if (ii
->polymorphic
)
4264 streamer_write_hwi (ob
, ii
->otr_token
);
4265 stream_write_tree (ob
, ii
->otr_type
, true);
4266 stream_write_tree (ob
, ii
->outer_type
, true);
4270 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4271 relevant to indirect inlining from IB. */
4274 ipa_read_indirect_edge_info (struct lto_input_block
*ib
,
4275 struct data_in
*data_in ATTRIBUTE_UNUSED
,
4276 struct cgraph_edge
*cs
)
4278 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4279 struct bitpack_d bp
;
4281 ii
->param_index
= (int) streamer_read_hwi (ib
);
4282 ii
->offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4283 bp
= streamer_read_bitpack (ib
);
4284 ii
->polymorphic
= bp_unpack_value (&bp
, 1);
4285 ii
->agg_contents
= bp_unpack_value (&bp
, 1);
4286 ii
->member_ptr
= bp_unpack_value (&bp
, 1);
4287 ii
->by_ref
= bp_unpack_value (&bp
, 1);
4288 ii
->maybe_in_construction
= bp_unpack_value (&bp
, 1);
4289 ii
->maybe_derived_type
= bp_unpack_value (&bp
, 1);
4290 if (ii
->polymorphic
)
4292 ii
->otr_token
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4293 ii
->otr_type
= stream_read_tree (ib
, data_in
);
4294 ii
->outer_type
= stream_read_tree (ib
, data_in
);
4298 /* Stream out NODE info to OB. */
4301 ipa_write_node_info (struct output_block
*ob
, struct cgraph_node
*node
)
4304 lto_symtab_encoder_t encoder
;
4305 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
4307 struct cgraph_edge
*e
;
4308 struct bitpack_d bp
;
4310 encoder
= ob
->decl_state
->symtab_node_encoder
;
4311 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4312 streamer_write_uhwi (ob
, node_ref
);
4314 streamer_write_uhwi (ob
, ipa_get_param_count (info
));
4315 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4316 streamer_write_uhwi (ob
, ipa_get_param_move_cost (info
, j
));
4317 bp
= bitpack_create (ob
->main_stream
);
4318 gcc_assert (info
->uses_analysis_done
4319 || ipa_get_param_count (info
) == 0);
4320 gcc_assert (!info
->node_enqueued
);
4321 gcc_assert (!info
->ipcp_orig_node
);
4322 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4323 bp_pack_value (&bp
, ipa_is_param_used (info
, j
), 1);
4324 streamer_write_bitpack (&bp
);
4325 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4326 streamer_write_hwi (ob
, ipa_get_controlled_uses (info
, j
));
4327 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4329 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4331 streamer_write_uhwi (ob
, ipa_get_cs_argument_count (args
));
4332 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4333 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4335 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4337 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4339 streamer_write_uhwi (ob
, ipa_get_cs_argument_count (args
));
4340 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4341 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4342 ipa_write_indirect_edge_info (ob
, e
);
4346 /* Stream in NODE info from IB. */
4349 ipa_read_node_info (struct lto_input_block
*ib
, struct cgraph_node
*node
,
4350 struct data_in
*data_in
)
4352 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
4354 struct cgraph_edge
*e
;
4355 struct bitpack_d bp
;
4357 ipa_alloc_node_params (node
, streamer_read_uhwi (ib
));
4359 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4360 info
->descriptors
[k
].move_cost
= streamer_read_uhwi (ib
);
4362 bp
= streamer_read_bitpack (ib
);
4363 if (ipa_get_param_count (info
) != 0)
4364 info
->uses_analysis_done
= true;
4365 info
->node_enqueued
= false;
4366 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4367 ipa_set_param_used (info
, k
, bp_unpack_value (&bp
, 1));
4368 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4369 ipa_set_controlled_uses (info
, k
, streamer_read_hwi (ib
));
4370 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4372 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4373 int count
= streamer_read_uhwi (ib
);
4377 vec_safe_grow_cleared (args
->jump_functions
, count
);
4379 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
4380 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4383 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4385 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4386 int count
= streamer_read_uhwi (ib
);
4390 vec_safe_grow_cleared (args
->jump_functions
, count
);
4391 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
4392 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4395 ipa_read_indirect_edge_info (ib
, data_in
, e
);
4399 /* Write jump functions for nodes in SET. */
4402 ipa_prop_write_jump_functions (void)
4404 struct cgraph_node
*node
;
4405 struct output_block
*ob
;
4406 unsigned int count
= 0;
4407 lto_symtab_encoder_iterator lsei
;
4408 lto_symtab_encoder_t encoder
;
4411 if (!ipa_node_params_vector
.exists ())
4414 ob
= create_output_block (LTO_section_jump_functions
);
4415 encoder
= ob
->decl_state
->symtab_node_encoder
;
4416 ob
->cgraph_node
= NULL
;
4417 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4418 lsei_next_function_in_partition (&lsei
))
4420 node
= lsei_cgraph_node (lsei
);
4421 if (cgraph_function_with_gimple_body_p (node
)
4422 && IPA_NODE_REF (node
) != NULL
)
4426 streamer_write_uhwi (ob
, count
);
4428 /* Process all of the functions. */
4429 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4430 lsei_next_function_in_partition (&lsei
))
4432 node
= lsei_cgraph_node (lsei
);
4433 if (cgraph_function_with_gimple_body_p (node
)
4434 && IPA_NODE_REF (node
) != NULL
)
4435 ipa_write_node_info (ob
, node
);
4437 streamer_write_char_stream (ob
->main_stream
, 0);
4438 produce_asm (ob
, NULL
);
4439 destroy_output_block (ob
);
4442 /* Read section in file FILE_DATA of length LEN with data DATA. */
4445 ipa_prop_read_section (struct lto_file_decl_data
*file_data
, const char *data
,
4448 const struct lto_function_header
*header
=
4449 (const struct lto_function_header
*) data
;
4450 const int cfg_offset
= sizeof (struct lto_function_header
);
4451 const int main_offset
= cfg_offset
+ header
->cfg_size
;
4452 const int string_offset
= main_offset
+ header
->main_size
;
4453 struct data_in
*data_in
;
4454 struct lto_input_block ib_main
;
4458 LTO_INIT_INPUT_BLOCK (ib_main
, (const char *) data
+ main_offset
, 0,
4462 lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
4463 header
->string_size
, vNULL
);
4464 count
= streamer_read_uhwi (&ib_main
);
4466 for (i
= 0; i
< count
; i
++)
4469 struct cgraph_node
*node
;
4470 lto_symtab_encoder_t encoder
;
4472 index
= streamer_read_uhwi (&ib_main
);
4473 encoder
= file_data
->symtab_node_encoder
;
4474 node
= cgraph (lto_symtab_encoder_deref (encoder
, index
));
4475 gcc_assert (node
->definition
);
4476 ipa_read_node_info (&ib_main
, node
, data_in
);
4478 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
4480 lto_data_in_delete (data_in
);
4483 /* Read ipcp jump functions. */
4486 ipa_prop_read_jump_functions (void)
4488 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
4489 struct lto_file_decl_data
*file_data
;
4492 ipa_check_create_node_params ();
4493 ipa_check_create_edge_args ();
4494 ipa_register_cgraph_hooks ();
4496 while ((file_data
= file_data_vec
[j
++]))
4499 const char *data
= lto_get_section_data (file_data
, LTO_section_jump_functions
, NULL
, &len
);
4502 ipa_prop_read_section (file_data
, data
, len
);
4506 /* After merging units, we can get mismatch in argument counts.
4507 Also decl merging might've rendered parameter lists obsolete.
4508 Also compute called_with_variable_arg info. */
4511 ipa_update_after_lto_read (void)
4513 ipa_check_create_node_params ();
4514 ipa_check_create_edge_args ();
4518 write_agg_replacement_chain (struct output_block
*ob
, struct cgraph_node
*node
)
4521 unsigned int count
= 0;
4522 lto_symtab_encoder_t encoder
;
4523 struct ipa_agg_replacement_value
*aggvals
, *av
;
4525 aggvals
= ipa_get_agg_replacements_for_node (node
);
4526 encoder
= ob
->decl_state
->symtab_node_encoder
;
4527 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4528 streamer_write_uhwi (ob
, node_ref
);
4530 for (av
= aggvals
; av
; av
= av
->next
)
4532 streamer_write_uhwi (ob
, count
);
4534 for (av
= aggvals
; av
; av
= av
->next
)
4536 struct bitpack_d bp
;
4538 streamer_write_uhwi (ob
, av
->offset
);
4539 streamer_write_uhwi (ob
, av
->index
);
4540 stream_write_tree (ob
, av
->value
, true);
4542 bp
= bitpack_create (ob
->main_stream
);
4543 bp_pack_value (&bp
, av
->by_ref
, 1);
4544 streamer_write_bitpack (&bp
);
4548 /* Stream in the aggregate value replacement chain for NODE from IB. */
4551 read_agg_replacement_chain (struct lto_input_block
*ib
,
4552 struct cgraph_node
*node
,
4553 struct data_in
*data_in
)
4555 struct ipa_agg_replacement_value
*aggvals
= NULL
;
4556 unsigned int count
, i
;
4558 count
= streamer_read_uhwi (ib
);
4559 for (i
= 0; i
<count
; i
++)
4561 struct ipa_agg_replacement_value
*av
;
4562 struct bitpack_d bp
;
4564 av
= ggc_alloc_ipa_agg_replacement_value ();
4565 av
->offset
= streamer_read_uhwi (ib
);
4566 av
->index
= streamer_read_uhwi (ib
);
4567 av
->value
= stream_read_tree (ib
, data_in
);
4568 bp
= streamer_read_bitpack (ib
);
4569 av
->by_ref
= bp_unpack_value (&bp
, 1);
4573 ipa_set_node_agg_value_chain (node
, aggvals
);
4576 /* Write all aggregate replacement for nodes in set. */
4579 ipa_prop_write_all_agg_replacement (void)
4581 struct cgraph_node
*node
;
4582 struct output_block
*ob
;
4583 unsigned int count
= 0;
4584 lto_symtab_encoder_iterator lsei
;
4585 lto_symtab_encoder_t encoder
;
4587 if (!ipa_node_agg_replacements
)
4590 ob
= create_output_block (LTO_section_ipcp_transform
);
4591 encoder
= ob
->decl_state
->symtab_node_encoder
;
4592 ob
->cgraph_node
= NULL
;
4593 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4594 lsei_next_function_in_partition (&lsei
))
4596 node
= lsei_cgraph_node (lsei
);
4597 if (cgraph_function_with_gimple_body_p (node
)
4598 && ipa_get_agg_replacements_for_node (node
) != NULL
)
4602 streamer_write_uhwi (ob
, count
);
4604 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4605 lsei_next_function_in_partition (&lsei
))
4607 node
= lsei_cgraph_node (lsei
);
4608 if (cgraph_function_with_gimple_body_p (node
)
4609 && ipa_get_agg_replacements_for_node (node
) != NULL
)
4610 write_agg_replacement_chain (ob
, node
);
4612 streamer_write_char_stream (ob
->main_stream
, 0);
4613 produce_asm (ob
, NULL
);
4614 destroy_output_block (ob
);
4617 /* Read replacements section in file FILE_DATA of length LEN with data
4621 read_replacements_section (struct lto_file_decl_data
*file_data
,
4625 const struct lto_function_header
*header
=
4626 (const struct lto_function_header
*) data
;
4627 const int cfg_offset
= sizeof (struct lto_function_header
);
4628 const int main_offset
= cfg_offset
+ header
->cfg_size
;
4629 const int string_offset
= main_offset
+ header
->main_size
;
4630 struct data_in
*data_in
;
4631 struct lto_input_block ib_main
;
4635 LTO_INIT_INPUT_BLOCK (ib_main
, (const char *) data
+ main_offset
, 0,
4638 data_in
= lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
4639 header
->string_size
, vNULL
);
4640 count
= streamer_read_uhwi (&ib_main
);
4642 for (i
= 0; i
< count
; i
++)
4645 struct cgraph_node
*node
;
4646 lto_symtab_encoder_t encoder
;
4648 index
= streamer_read_uhwi (&ib_main
);
4649 encoder
= file_data
->symtab_node_encoder
;
4650 node
= cgraph (lto_symtab_encoder_deref (encoder
, index
));
4651 gcc_assert (node
->definition
);
4652 read_agg_replacement_chain (&ib_main
, node
, data_in
);
4654 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
4656 lto_data_in_delete (data_in
);
4659 /* Read IPA-CP aggregate replacements. */
4662 ipa_prop_read_all_agg_replacement (void)
4664 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
4665 struct lto_file_decl_data
*file_data
;
4668 while ((file_data
= file_data_vec
[j
++]))
4671 const char *data
= lto_get_section_data (file_data
,
4672 LTO_section_ipcp_transform
,
4675 read_replacements_section (file_data
, data
, len
);
4679 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4683 adjust_agg_replacement_values (struct cgraph_node
*node
,
4684 struct ipa_agg_replacement_value
*aggval
)
4686 struct ipa_agg_replacement_value
*v
;
4687 int i
, c
= 0, d
= 0, *adj
;
4689 if (!node
->clone
.combined_args_to_skip
)
4692 for (v
= aggval
; v
; v
= v
->next
)
4694 gcc_assert (v
->index
>= 0);
4700 adj
= XALLOCAVEC (int, c
);
4701 for (i
= 0; i
< c
; i
++)
4702 if (bitmap_bit_p (node
->clone
.combined_args_to_skip
, i
))
4710 for (v
= aggval
; v
; v
= v
->next
)
4711 v
->index
= adj
[v
->index
];
4715 /* Function body transformation phase. */
4718 ipcp_transform_function (struct cgraph_node
*node
)
4720 vec
<ipa_param_descriptor
> descriptors
= vNULL
;
4721 struct param_analysis_info
*parms_ainfo
;
4722 struct ipa_agg_replacement_value
*aggval
;
4723 gimple_stmt_iterator gsi
;
4726 bool cfg_changed
= false, something_changed
= false;
4728 gcc_checking_assert (cfun
);
4729 gcc_checking_assert (current_function_decl
);
4732 fprintf (dump_file
, "Modification phase of node %s/%i\n",
4733 node
->name (), node
->order
);
4735 aggval
= ipa_get_agg_replacements_for_node (node
);
4738 param_count
= count_formal_params (node
->decl
);
4739 if (param_count
== 0)
4741 adjust_agg_replacement_values (node
, aggval
);
4743 ipa_dump_agg_replacement_values (dump_file
, aggval
);
4744 parms_ainfo
= XALLOCAVEC (struct param_analysis_info
, param_count
);
4745 memset (parms_ainfo
, 0, sizeof (struct param_analysis_info
) * param_count
);
4746 descriptors
.safe_grow_cleared (param_count
);
4747 ipa_populate_param_decls (node
, descriptors
);
4749 FOR_EACH_BB_FN (bb
, cfun
)
4750 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4752 struct ipa_agg_replacement_value
*v
;
4753 gimple stmt
= gsi_stmt (gsi
);
4755 HOST_WIDE_INT offset
, size
;
4759 if (!gimple_assign_load_p (stmt
))
4761 rhs
= gimple_assign_rhs1 (stmt
);
4762 if (!is_gimple_reg_type (TREE_TYPE (rhs
)))
4767 while (handled_component_p (t
))
4769 /* V_C_E can do things like convert an array of integers to one
4770 bigger integer and similar things we do not handle below. */
4771 if (TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
)
4776 t
= TREE_OPERAND (t
, 0);
4781 if (!ipa_load_from_parm_agg_1 (descriptors
, parms_ainfo
, stmt
,
4782 rhs
, &index
, &offset
, &size
, &by_ref
))
4784 for (v
= aggval
; v
; v
= v
->next
)
4785 if (v
->index
== index
4786 && v
->offset
== offset
)
4789 || v
->by_ref
!= by_ref
4790 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v
->value
))) != size
)
4793 gcc_checking_assert (is_gimple_ip_invariant (v
->value
));
4794 if (!useless_type_conversion_p (TREE_TYPE (rhs
), TREE_TYPE (v
->value
)))
4796 if (fold_convertible_p (TREE_TYPE (rhs
), v
->value
))
4797 val
= fold_build1 (NOP_EXPR
, TREE_TYPE (rhs
), v
->value
);
4798 else if (TYPE_SIZE (TREE_TYPE (rhs
))
4799 == TYPE_SIZE (TREE_TYPE (v
->value
)))
4800 val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (rhs
), v
->value
);
4805 fprintf (dump_file
, " const ");
4806 print_generic_expr (dump_file
, v
->value
, 0);
4807 fprintf (dump_file
, " can't be converted to type of ");
4808 print_generic_expr (dump_file
, rhs
, 0);
4809 fprintf (dump_file
, "\n");
4817 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4819 fprintf (dump_file
, "Modifying stmt:\n ");
4820 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4822 gimple_assign_set_rhs_from_tree (&gsi
, val
);
4825 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4827 fprintf (dump_file
, "into:\n ");
4828 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4829 fprintf (dump_file
, "\n");
4832 something_changed
= true;
4833 if (maybe_clean_eh_stmt (stmt
)
4834 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
4838 (*ipa_node_agg_replacements
)[node
->uid
] = NULL
;
4839 free_parms_ainfo (parms_ainfo
, param_count
);
4840 descriptors
.release ();
4842 if (!something_changed
)
4844 else if (cfg_changed
)
4845 return TODO_update_ssa_only_virtuals
| TODO_cleanup_cfg
;
4847 return TODO_update_ssa_only_virtuals
;