1 /* Interprocedural analyses.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
24 #include "basic-block.h"
25 #include "tree-ssa-alias.h"
26 #include "internal-fn.h"
27 #include "gimple-fold.h"
29 #include "gimple-expr.h"
33 #include "stor-layout.h"
34 #include "print-tree.h"
36 #include "gimple-iterator.h"
37 #include "gimplify-me.h"
38 #include "gimple-walk.h"
39 #include "langhooks.h"
43 #include "gimple-ssa.h"
45 #include "tree-phinodes.h"
46 #include "ssa-iterators.h"
47 #include "tree-into-ssa.h"
49 #include "tree-pass.h"
50 #include "tree-inline.h"
51 #include "ipa-inline.h"
53 #include "diagnostic.h"
54 #include "gimple-pretty-print.h"
55 #include "lto-streamer.h"
56 #include "data-streamer.h"
57 #include "tree-streamer.h"
59 #include "ipa-utils.h"
61 /* Intermediate information about a parameter that is only useful during the
62 run of ipa_analyze_node and is not kept afterwards. */
64 struct param_analysis_info
66 bool parm_modified
, ref_modified
, pt_modified
;
67 bitmap parm_visited_statements
, pt_visited_statements
;
70 /* Vector where the parameter infos are actually stored. */
71 vec
<ipa_node_params
> ipa_node_params_vector
;
72 /* Vector of known aggregate values in cloned nodes. */
73 vec
<ipa_agg_replacement_value_p
, va_gc
> *ipa_node_agg_replacements
;
74 /* Vector where the parameter infos are actually stored. */
75 vec
<ipa_edge_args
, va_gc
> *ipa_edge_args_vector
;
77 /* Holders of ipa cgraph hooks: */
78 static struct cgraph_edge_hook_list
*edge_removal_hook_holder
;
79 static struct cgraph_node_hook_list
*node_removal_hook_holder
;
80 static struct cgraph_2edge_hook_list
*edge_duplication_hook_holder
;
81 static struct cgraph_2node_hook_list
*node_duplication_hook_holder
;
82 static struct cgraph_node_hook_list
*function_insertion_hook_holder
;
84 /* Description of a reference to an IPA constant. */
85 struct ipa_cst_ref_desc
87 /* Edge that corresponds to the statement which took the reference. */
88 struct cgraph_edge
*cs
;
89 /* Linked list of duplicates created when call graph edges are cloned. */
90 struct ipa_cst_ref_desc
*next_duplicate
;
91 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
96 /* Allocation pool for reference descriptions. */
98 static alloc_pool ipa_refdesc_pool
;
100 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
101 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
104 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node
*node
)
106 tree fs_opts
= DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node
->decl
);
107 struct cl_optimization
*os
;
111 os
= TREE_OPTIMIZATION (fs_opts
);
112 return !os
->x_optimize
|| !os
->x_flag_ipa_cp
;
115 /* Return index of the formal whose tree is PTREE in function which corresponds
119 ipa_get_param_decl_index_1 (vec
<ipa_param_descriptor
> descriptors
, tree ptree
)
123 count
= descriptors
.length ();
124 for (i
= 0; i
< count
; i
++)
125 if (descriptors
[i
].decl
== ptree
)
131 /* Return index of the formal whose tree is PTREE in function which corresponds
135 ipa_get_param_decl_index (struct ipa_node_params
*info
, tree ptree
)
137 return ipa_get_param_decl_index_1 (info
->descriptors
, ptree
);
140 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
144 ipa_populate_param_decls (struct cgraph_node
*node
,
145 vec
<ipa_param_descriptor
> &descriptors
)
153 gcc_assert (gimple_has_body_p (fndecl
));
154 fnargs
= DECL_ARGUMENTS (fndecl
);
156 for (parm
= fnargs
; parm
; parm
= DECL_CHAIN (parm
))
158 descriptors
[param_num
].decl
= parm
;
159 descriptors
[param_num
].move_cost
= estimate_move_cost (TREE_TYPE (parm
));
164 /* Return how many formal parameters FNDECL has. */
167 count_formal_params (tree fndecl
)
171 gcc_assert (gimple_has_body_p (fndecl
));
173 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
179 /* Return the declaration of Ith formal parameter of the function corresponding
180 to INFO. Note there is no setter function as this array is built just once
181 using ipa_initialize_node_params. */
184 ipa_dump_param (FILE *file
, struct ipa_node_params
*info
, int i
)
186 fprintf (file
, "param #%i", i
);
187 if (info
->descriptors
[i
].decl
)
190 print_generic_expr (file
, info
->descriptors
[i
].decl
, 0);
194 /* Initialize the ipa_node_params structure associated with NODE
195 to hold PARAM_COUNT parameters. */
198 ipa_alloc_node_params (struct cgraph_node
*node
, int param_count
)
200 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
202 if (!info
->descriptors
.exists () && param_count
)
203 info
->descriptors
.safe_grow_cleared (param_count
);
206 /* Initialize the ipa_node_params structure associated with NODE by counting
207 the function parameters, creating the descriptors and populating their
211 ipa_initialize_node_params (struct cgraph_node
*node
)
213 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
215 if (!info
->descriptors
.exists ())
217 ipa_alloc_node_params (node
, count_formal_params (node
->decl
));
218 ipa_populate_param_decls (node
, info
->descriptors
);
222 /* Print the jump functions associated with call graph edge CS to file F. */
225 ipa_print_node_jump_functions_for_edge (FILE *f
, struct cgraph_edge
*cs
)
229 count
= ipa_get_cs_argument_count (IPA_EDGE_REF (cs
));
230 for (i
= 0; i
< count
; i
++)
232 struct ipa_jump_func
*jump_func
;
233 enum jump_func_type type
;
235 jump_func
= ipa_get_ith_jump_func (IPA_EDGE_REF (cs
), i
);
236 type
= jump_func
->type
;
238 fprintf (f
, " param %d: ", i
);
239 if (type
== IPA_JF_UNKNOWN
)
240 fprintf (f
, "UNKNOWN\n");
241 else if (type
== IPA_JF_KNOWN_TYPE
)
243 fprintf (f
, "KNOWN TYPE: base ");
244 print_generic_expr (f
, jump_func
->value
.known_type
.base_type
, 0);
245 fprintf (f
, ", offset "HOST_WIDE_INT_PRINT_DEC
", component ",
246 jump_func
->value
.known_type
.offset
);
247 print_generic_expr (f
, jump_func
->value
.known_type
.component_type
, 0);
250 else if (type
== IPA_JF_CONST
)
252 tree val
= jump_func
->value
.constant
.value
;
253 fprintf (f
, "CONST: ");
254 print_generic_expr (f
, val
, 0);
255 if (TREE_CODE (val
) == ADDR_EXPR
256 && TREE_CODE (TREE_OPERAND (val
, 0)) == CONST_DECL
)
259 print_generic_expr (f
, DECL_INITIAL (TREE_OPERAND (val
, 0)),
264 else if (type
== IPA_JF_PASS_THROUGH
)
266 fprintf (f
, "PASS THROUGH: ");
267 fprintf (f
, "%d, op %s",
268 jump_func
->value
.pass_through
.formal_id
,
269 get_tree_code_name(jump_func
->value
.pass_through
.operation
));
270 if (jump_func
->value
.pass_through
.operation
!= NOP_EXPR
)
273 print_generic_expr (f
,
274 jump_func
->value
.pass_through
.operand
, 0);
276 if (jump_func
->value
.pass_through
.agg_preserved
)
277 fprintf (f
, ", agg_preserved");
278 if (jump_func
->value
.pass_through
.type_preserved
)
279 fprintf (f
, ", type_preserved");
282 else if (type
== IPA_JF_ANCESTOR
)
284 fprintf (f
, "ANCESTOR: ");
285 fprintf (f
, "%d, offset "HOST_WIDE_INT_PRINT_DEC
", ",
286 jump_func
->value
.ancestor
.formal_id
,
287 jump_func
->value
.ancestor
.offset
);
288 print_generic_expr (f
, jump_func
->value
.ancestor
.type
, 0);
289 if (jump_func
->value
.ancestor
.agg_preserved
)
290 fprintf (f
, ", agg_preserved");
291 if (jump_func
->value
.ancestor
.type_preserved
)
292 fprintf (f
, ", type_preserved");
296 if (jump_func
->agg
.items
)
298 struct ipa_agg_jf_item
*item
;
301 fprintf (f
, " Aggregate passed by %s:\n",
302 jump_func
->agg
.by_ref
? "reference" : "value");
303 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, j
, item
)
305 fprintf (f
, " offset: " HOST_WIDE_INT_PRINT_DEC
", ",
307 if (TYPE_P (item
->value
))
308 fprintf (f
, "clobber of " HOST_WIDE_INT_PRINT_DEC
" bits",
309 tree_to_uhwi (TYPE_SIZE (item
->value
)));
312 fprintf (f
, "cst: ");
313 print_generic_expr (f
, item
->value
, 0);
322 /* Print the jump functions of all arguments on all call graph edges going from
326 ipa_print_node_jump_functions (FILE *f
, struct cgraph_node
*node
)
328 struct cgraph_edge
*cs
;
330 fprintf (f
, " Jump functions of caller %s/%i:\n", node
->name (),
332 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
334 if (!ipa_edge_args_info_available_for_edge_p (cs
))
337 fprintf (f
, " callsite %s/%i -> %s/%i : \n",
338 xstrdup (node
->name ()), node
->order
,
339 xstrdup (cs
->callee
->name ()),
341 ipa_print_node_jump_functions_for_edge (f
, cs
);
344 for (cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
346 struct cgraph_indirect_call_info
*ii
;
347 if (!ipa_edge_args_info_available_for_edge_p (cs
))
350 ii
= cs
->indirect_info
;
351 if (ii
->agg_contents
)
352 fprintf (f
, " indirect %s callsite, calling param %i, "
353 "offset " HOST_WIDE_INT_PRINT_DEC
", %s",
354 ii
->member_ptr
? "member ptr" : "aggregate",
355 ii
->param_index
, ii
->offset
,
356 ii
->by_ref
? "by reference" : "by_value");
358 fprintf (f
, " indirect %s callsite, calling param %i, "
359 "offset " HOST_WIDE_INT_PRINT_DEC
,
360 ii
->polymorphic
? "polymorphic" : "simple", ii
->param_index
,
365 fprintf (f
, ", for stmt ");
366 print_gimple_stmt (f
, cs
->call_stmt
, 0, TDF_SLIM
);
370 ipa_print_node_jump_functions_for_edge (f
, cs
);
374 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
377 ipa_print_all_jump_functions (FILE *f
)
379 struct cgraph_node
*node
;
381 fprintf (f
, "\nJump functions:\n");
382 FOR_EACH_FUNCTION (node
)
384 ipa_print_node_jump_functions (f
, node
);
388 /* Set JFUNC to be a known type jump function. */
391 ipa_set_jf_known_type (struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
,
392 tree base_type
, tree component_type
)
394 gcc_assert (TREE_CODE (component_type
) == RECORD_TYPE
395 && TYPE_BINFO (component_type
));
396 jfunc
->type
= IPA_JF_KNOWN_TYPE
;
397 jfunc
->value
.known_type
.offset
= offset
,
398 jfunc
->value
.known_type
.base_type
= base_type
;
399 jfunc
->value
.known_type
.component_type
= component_type
;
400 gcc_assert (component_type
);
403 /* Set JFUNC to be a copy of another jmp (to be used by jump function
404 combination code). The two functions will share their rdesc. */
407 ipa_set_jf_cst_copy (struct ipa_jump_func
*dst
,
408 struct ipa_jump_func
*src
)
411 gcc_checking_assert (src
->type
== IPA_JF_CONST
);
412 dst
->type
= IPA_JF_CONST
;
413 dst
->value
.constant
= src
->value
.constant
;
416 /* Set JFUNC to be a constant jmp function. */
419 ipa_set_jf_constant (struct ipa_jump_func
*jfunc
, tree constant
,
420 struct cgraph_edge
*cs
)
422 constant
= unshare_expr (constant
);
423 if (constant
&& EXPR_P (constant
))
424 SET_EXPR_LOCATION (constant
, UNKNOWN_LOCATION
);
425 jfunc
->type
= IPA_JF_CONST
;
426 jfunc
->value
.constant
.value
= unshare_expr_without_location (constant
);
428 if (TREE_CODE (constant
) == ADDR_EXPR
429 && TREE_CODE (TREE_OPERAND (constant
, 0)) == FUNCTION_DECL
)
431 struct ipa_cst_ref_desc
*rdesc
;
432 if (!ipa_refdesc_pool
)
433 ipa_refdesc_pool
= create_alloc_pool ("IPA-PROP ref descriptions",
434 sizeof (struct ipa_cst_ref_desc
), 32);
436 rdesc
= (struct ipa_cst_ref_desc
*) pool_alloc (ipa_refdesc_pool
);
438 rdesc
->next_duplicate
= NULL
;
440 jfunc
->value
.constant
.rdesc
= rdesc
;
443 jfunc
->value
.constant
.rdesc
= NULL
;
446 /* Set JFUNC to be a simple pass-through jump function. */
448 ipa_set_jf_simple_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
449 bool agg_preserved
, bool type_preserved
)
451 jfunc
->type
= IPA_JF_PASS_THROUGH
;
452 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
453 jfunc
->value
.pass_through
.formal_id
= formal_id
;
454 jfunc
->value
.pass_through
.operation
= NOP_EXPR
;
455 jfunc
->value
.pass_through
.agg_preserved
= agg_preserved
;
456 jfunc
->value
.pass_through
.type_preserved
= type_preserved
;
459 /* Set JFUNC to be an arithmetic pass through jump function. */
462 ipa_set_jf_arith_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
463 tree operand
, enum tree_code operation
)
465 jfunc
->type
= IPA_JF_PASS_THROUGH
;
466 jfunc
->value
.pass_through
.operand
= unshare_expr_without_location (operand
);
467 jfunc
->value
.pass_through
.formal_id
= formal_id
;
468 jfunc
->value
.pass_through
.operation
= operation
;
469 jfunc
->value
.pass_through
.agg_preserved
= false;
470 jfunc
->value
.pass_through
.type_preserved
= false;
473 /* Set JFUNC to be an ancestor jump function. */
476 ipa_set_ancestor_jf (struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
,
477 tree type
, int formal_id
, bool agg_preserved
,
480 jfunc
->type
= IPA_JF_ANCESTOR
;
481 jfunc
->value
.ancestor
.formal_id
= formal_id
;
482 jfunc
->value
.ancestor
.offset
= offset
;
483 jfunc
->value
.ancestor
.type
= type
;
484 jfunc
->value
.ancestor
.agg_preserved
= agg_preserved
;
485 jfunc
->value
.ancestor
.type_preserved
= type_preserved
;
488 /* Extract the acual BINFO being described by JFUNC which must be a known type
492 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func
*jfunc
)
494 tree base_binfo
= TYPE_BINFO (jfunc
->value
.known_type
.base_type
);
497 return get_binfo_at_offset (base_binfo
,
498 jfunc
->value
.known_type
.offset
,
499 jfunc
->value
.known_type
.component_type
);
502 /* Structure to be passed in between detect_type_change and
503 check_stmt_for_type_change. */
505 struct type_change_info
507 /* Offset into the object where there is the virtual method pointer we are
509 HOST_WIDE_INT offset
;
510 /* The declaration or SSA_NAME pointer of the base that we are checking for
513 /* If we actually can tell the type that the object has changed to, it is
514 stored in this field. Otherwise it remains NULL_TREE. */
515 tree known_current_type
;
516 /* Set to true if dynamic type change has been detected. */
517 bool type_maybe_changed
;
518 /* Set to true if multiple types have been encountered. known_current_type
519 must be disregarded in that case. */
520 bool multiple_types_encountered
;
523 /* Return true if STMT can modify a virtual method table pointer.
525 This function makes special assumptions about both constructors and
526 destructors which are all the functions that are allowed to alter the VMT
527 pointers. It assumes that destructors begin with assignment into all VMT
528 pointers and that constructors essentially look in the following way:
530 1) The very first thing they do is that they call constructors of ancestor
531 sub-objects that have them.
533 2) Then VMT pointers of this and all its ancestors is set to new values
534 corresponding to the type corresponding to the constructor.
536 3) Only afterwards, other stuff such as constructor of member sub-objects
537 and the code written by the user is run. Only this may include calling
538 virtual functions, directly or indirectly.
540 There is no way to call a constructor of an ancestor sub-object in any
543 This means that we do not have to care whether constructors get the correct
544 type information because they will always change it (in fact, if we define
545 the type to be given by the VMT pointer, it is undefined).
547 The most important fact to derive from the above is that if, for some
548 statement in the section 3, we try to detect whether the dynamic type has
549 changed, we can safely ignore all calls as we examine the function body
550 backwards until we reach statements in section 2 because these calls cannot
551 be ancestor constructors or destructors (if the input is not bogus) and so
552 do not change the dynamic type (this holds true only for automatically
553 allocated objects but at the moment we devirtualize only these). We then
554 must detect that statements in section 2 change the dynamic type and can try
555 to derive the new type. That is enough and we can stop, we will never see
556 the calls into constructors of sub-objects in this code. Therefore we can
557 safely ignore all call statements that we traverse.
561 stmt_may_be_vtbl_ptr_store (gimple stmt
)
563 if (is_gimple_call (stmt
))
565 else if (gimple_clobber_p (stmt
))
567 else if (is_gimple_assign (stmt
))
569 tree lhs
= gimple_assign_lhs (stmt
);
571 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs
)))
573 if (flag_strict_aliasing
574 && !POINTER_TYPE_P (TREE_TYPE (lhs
)))
577 if (TREE_CODE (lhs
) == COMPONENT_REF
578 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
580 /* In the future we might want to use get_base_ref_and_offset to find
581 if there is a field corresponding to the offset and if so, proceed
582 almost like if it was a component ref. */
588 /* If STMT can be proved to be an assignment to the virtual method table
589 pointer of ANALYZED_OBJ and the type associated with the new table
590 identified, return the type. Otherwise return NULL_TREE. */
593 extr_type_from_vtbl_ptr_store (gimple stmt
, struct type_change_info
*tci
)
595 HOST_WIDE_INT offset
, size
, max_size
;
596 tree lhs
, rhs
, base
, binfo
;
598 if (!gimple_assign_single_p (stmt
))
601 lhs
= gimple_assign_lhs (stmt
);
602 rhs
= gimple_assign_rhs1 (stmt
);
603 if (TREE_CODE (lhs
) != COMPONENT_REF
604 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
607 base
= get_ref_base_and_extent (lhs
, &offset
, &size
, &max_size
);
608 if (offset
!= tci
->offset
609 || size
!= POINTER_SIZE
610 || max_size
!= POINTER_SIZE
)
612 if (TREE_CODE (base
) == MEM_REF
)
614 if (TREE_CODE (tci
->object
) != MEM_REF
615 || TREE_OPERAND (tci
->object
, 0) != TREE_OPERAND (base
, 0)
616 || !tree_int_cst_equal (TREE_OPERAND (tci
->object
, 1),
617 TREE_OPERAND (base
, 1)))
620 else if (tci
->object
!= base
)
623 binfo
= vtable_pointer_value_to_binfo (rhs
);
625 /* FIXME: vtable_pointer_value_to_binfo may return BINFO of a
626 base of outer type. In this case we would need to either
627 work on binfos or translate it back to outer type and offset.
628 KNOWN_TYPE jump functions are not ready for that, yet. */
629 if (!binfo
|| TYPE_BINFO (BINFO_TYPE (binfo
)) != binfo
)
632 return BINFO_TYPE (binfo
);
635 /* Callback of walk_aliased_vdefs and a helper function for
636 detect_type_change to check whether a particular statement may modify
637 the virtual table pointer, and if possible also determine the new type of
638 the (sub-)object. It stores its result into DATA, which points to a
639 type_change_info structure. */
642 check_stmt_for_type_change (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef
, void *data
)
644 gimple stmt
= SSA_NAME_DEF_STMT (vdef
);
645 struct type_change_info
*tci
= (struct type_change_info
*) data
;
647 if (stmt_may_be_vtbl_ptr_store (stmt
))
650 type
= extr_type_from_vtbl_ptr_store (stmt
, tci
);
651 if (tci
->type_maybe_changed
652 && type
!= tci
->known_current_type
)
653 tci
->multiple_types_encountered
= true;
654 tci
->known_current_type
= type
;
655 tci
->type_maybe_changed
= true;
664 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
665 callsite CALL) by looking for assignments to its virtual table pointer. If
666 it is, return true and fill in the jump function JFUNC with relevant type
667 information or set it to unknown. ARG is the object itself (not a pointer
668 to it, unless dereferenced). BASE is the base of the memory access as
669 returned by get_ref_base_and_extent, as is the offset. */
672 detect_type_change (tree arg
, tree base
, tree comp_type
, gimple call
,
673 struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
)
675 struct type_change_info tci
;
678 gcc_checking_assert (DECL_P (arg
)
679 || TREE_CODE (arg
) == MEM_REF
680 || handled_component_p (arg
));
681 /* Const calls cannot call virtual methods through VMT and so type changes do
683 if (!flag_devirtualize
|| !gimple_vuse (call
)
684 /* Be sure expected_type is polymorphic. */
686 || TREE_CODE (comp_type
) != RECORD_TYPE
687 || !TYPE_BINFO (comp_type
)
688 || !BINFO_VTABLE (TYPE_BINFO (comp_type
)))
691 /* C++ methods are not allowed to change THIS pointer unless they
692 are constructors or destructors. */
693 if (TREE_CODE (base
) == MEM_REF
694 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
695 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base
, 0))
696 && TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (base
, 0))) == PARM_DECL
697 && TREE_CODE (TREE_TYPE (current_function_decl
)) == METHOD_TYPE
698 && !DECL_CXX_CONSTRUCTOR_P (current_function_decl
)
699 && !DECL_CXX_DESTRUCTOR_P (current_function_decl
)
700 && (SSA_NAME_VAR (TREE_OPERAND (base
, 0))
701 == DECL_ARGUMENTS (current_function_decl
)))
704 ao_ref_init (&ao
, arg
);
707 ao
.size
= POINTER_SIZE
;
708 ao
.max_size
= ao
.size
;
711 tci
.object
= get_base_address (arg
);
712 tci
.known_current_type
= NULL_TREE
;
713 tci
.type_maybe_changed
= false;
714 tci
.multiple_types_encountered
= false;
716 walk_aliased_vdefs (&ao
, gimple_vuse (call
), check_stmt_for_type_change
,
718 if (!tci
.type_maybe_changed
)
721 if (!tci
.known_current_type
722 || tci
.multiple_types_encountered
724 jfunc
->type
= IPA_JF_UNKNOWN
;
726 ipa_set_jf_known_type (jfunc
, 0, tci
.known_current_type
, comp_type
);
731 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
732 SSA name (its dereference will become the base and the offset is assumed to
736 detect_type_change_ssa (tree arg
, tree comp_type
,
737 gimple call
, struct ipa_jump_func
*jfunc
)
739 gcc_checking_assert (TREE_CODE (arg
) == SSA_NAME
);
740 if (!flag_devirtualize
741 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
744 arg
= build2 (MEM_REF
, ptr_type_node
, arg
,
745 build_int_cst (ptr_type_node
, 0));
747 return detect_type_change (arg
, arg
, comp_type
, call
, jfunc
, 0);
750 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
751 boolean variable pointed to by DATA. */
754 mark_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
757 bool *b
= (bool *) data
;
762 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
763 a value known not to be modified in this function before reaching the
764 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
765 information about the parameter. */
768 parm_preserved_before_stmt_p (struct param_analysis_info
*parm_ainfo
,
769 gimple stmt
, tree parm_load
)
771 bool modified
= false;
772 bitmap
*visited_stmts
;
775 if (parm_ainfo
&& parm_ainfo
->parm_modified
)
778 gcc_checking_assert (gimple_vuse (stmt
) != NULL_TREE
);
779 ao_ref_init (&refd
, parm_load
);
780 /* We can cache visited statements only when parm_ainfo is available and when
781 we are looking at a naked load of the whole parameter. */
782 if (!parm_ainfo
|| TREE_CODE (parm_load
) != PARM_DECL
)
783 visited_stmts
= NULL
;
785 visited_stmts
= &parm_ainfo
->parm_visited_statements
;
786 walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
, &modified
,
788 if (parm_ainfo
&& modified
)
789 parm_ainfo
->parm_modified
= true;
793 /* If STMT is an assignment that loads a value from an parameter declaration,
794 return the index of the parameter in ipa_node_params which has not been
795 modified. Otherwise return -1. */
798 load_from_unmodified_param (vec
<ipa_param_descriptor
> descriptors
,
799 struct param_analysis_info
*parms_ainfo
,
805 if (!gimple_assign_single_p (stmt
))
808 op1
= gimple_assign_rhs1 (stmt
);
809 if (TREE_CODE (op1
) != PARM_DECL
)
812 index
= ipa_get_param_decl_index_1 (descriptors
, op1
);
814 || !parm_preserved_before_stmt_p (parms_ainfo
? &parms_ainfo
[index
]
821 /* Return true if memory reference REF loads data that are known to be
822 unmodified in this function before reaching statement STMT. PARM_AINFO, if
823 non-NULL, is a pointer to a structure containing temporary information about
827 parm_ref_data_preserved_p (struct param_analysis_info
*parm_ainfo
,
828 gimple stmt
, tree ref
)
830 bool modified
= false;
833 gcc_checking_assert (gimple_vuse (stmt
));
834 if (parm_ainfo
&& parm_ainfo
->ref_modified
)
837 ao_ref_init (&refd
, ref
);
838 walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
, &modified
,
840 if (parm_ainfo
&& modified
)
841 parm_ainfo
->ref_modified
= true;
845 /* Return true if the data pointed to by PARM is known to be unmodified in this
846 function before reaching call statement CALL into which it is passed.
847 PARM_AINFO is a pointer to a structure containing temporary information
851 parm_ref_data_pass_through_p (struct param_analysis_info
*parm_ainfo
,
852 gimple call
, tree parm
)
854 bool modified
= false;
857 /* It's unnecessary to calculate anything about memory contnets for a const
858 function because it is not goin to use it. But do not cache the result
859 either. Also, no such calculations for non-pointers. */
860 if (!gimple_vuse (call
)
861 || !POINTER_TYPE_P (TREE_TYPE (parm
)))
864 if (parm_ainfo
->pt_modified
)
867 ao_ref_init_from_ptr_and_size (&refd
, parm
, NULL_TREE
);
868 walk_aliased_vdefs (&refd
, gimple_vuse (call
), mark_modified
, &modified
,
869 parm_ainfo
? &parm_ainfo
->pt_visited_statements
: NULL
);
871 parm_ainfo
->pt_modified
= true;
875 /* Return true if we can prove that OP is a memory reference loading unmodified
876 data from an aggregate passed as a parameter and if the aggregate is passed
877 by reference, that the alias type of the load corresponds to the type of the
878 formal parameter (so that we can rely on this type for TBAA in callers).
879 INFO and PARMS_AINFO describe parameters of the current function (but the
880 latter can be NULL), STMT is the load statement. If function returns true,
881 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
882 within the aggregate and whether it is a load from a value passed by
883 reference respectively. */
886 ipa_load_from_parm_agg_1 (vec
<ipa_param_descriptor
> descriptors
,
887 struct param_analysis_info
*parms_ainfo
, gimple stmt
,
888 tree op
, int *index_p
, HOST_WIDE_INT
*offset_p
,
889 HOST_WIDE_INT
*size_p
, bool *by_ref_p
)
892 HOST_WIDE_INT size
, max_size
;
893 tree base
= get_ref_base_and_extent (op
, offset_p
, &size
, &max_size
);
895 if (max_size
== -1 || max_size
!= size
|| *offset_p
< 0)
900 int index
= ipa_get_param_decl_index_1 (descriptors
, base
);
902 && parm_preserved_before_stmt_p (parms_ainfo
? &parms_ainfo
[index
]
914 if (TREE_CODE (base
) != MEM_REF
915 || TREE_CODE (TREE_OPERAND (base
, 0)) != SSA_NAME
916 || !integer_zerop (TREE_OPERAND (base
, 1)))
919 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base
, 0)))
921 tree parm
= SSA_NAME_VAR (TREE_OPERAND (base
, 0));
922 index
= ipa_get_param_decl_index_1 (descriptors
, parm
);
926 /* This branch catches situations where a pointer parameter is not a
927 gimple register, for example:
929 void hip7(S*) (struct S * p)
931 void (*<T2e4>) (struct S *) D.1867;
941 gimple def
= SSA_NAME_DEF_STMT (TREE_OPERAND (base
, 0));
942 index
= load_from_unmodified_param (descriptors
, parms_ainfo
, def
);
946 && parm_ref_data_preserved_p (parms_ainfo
? &parms_ainfo
[index
] : NULL
,
958 /* Just like the previous function, just without the param_analysis_info
959 pointer, for users outside of this file. */
962 ipa_load_from_parm_agg (struct ipa_node_params
*info
, gimple stmt
,
963 tree op
, int *index_p
, HOST_WIDE_INT
*offset_p
,
966 return ipa_load_from_parm_agg_1 (info
->descriptors
, NULL
, stmt
, op
, index_p
,
967 offset_p
, NULL
, by_ref_p
);
970 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
971 of an assignment statement STMT, try to determine whether we are actually
972 handling any of the following cases and construct an appropriate jump
973 function into JFUNC if so:
975 1) The passed value is loaded from a formal parameter which is not a gimple
976 register (most probably because it is addressable, the value has to be
977 scalar) and we can guarantee the value has not changed. This case can
978 therefore be described by a simple pass-through jump function. For example:
987 2) The passed value can be described by a simple arithmetic pass-through
994 D.2064_4 = a.1(D) + 4;
997 This case can also occur in combination of the previous one, e.g.:
1005 D.2064_4 = a.0_3 + 4;
1008 3) The passed value is an address of an object within another one (which
1009 also passed by reference). Such situations are described by an ancestor
1010 jump function and describe situations such as:
1012 B::foo() (struct B * const this)
1016 D.1845_2 = &this_1(D)->D.1748;
1019 INFO is the structure describing individual parameters access different
1020 stages of IPA optimizations. PARMS_AINFO contains the information that is
1021 only needed for intraprocedural analysis. */
1024 compute_complex_assign_jump_func (struct ipa_node_params
*info
,
1025 struct param_analysis_info
*parms_ainfo
,
1026 struct ipa_jump_func
*jfunc
,
1027 gimple call
, gimple stmt
, tree name
,
1030 HOST_WIDE_INT offset
, size
, max_size
;
1031 tree op1
, tc_ssa
, base
, ssa
;
1034 op1
= gimple_assign_rhs1 (stmt
);
1036 if (TREE_CODE (op1
) == SSA_NAME
)
1038 if (SSA_NAME_IS_DEFAULT_DEF (op1
))
1039 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (op1
));
1041 index
= load_from_unmodified_param (info
->descriptors
, parms_ainfo
,
1042 SSA_NAME_DEF_STMT (op1
));
1047 index
= load_from_unmodified_param (info
->descriptors
, parms_ainfo
, stmt
);
1048 tc_ssa
= gimple_assign_lhs (stmt
);
1053 tree op2
= gimple_assign_rhs2 (stmt
);
1057 if (!is_gimple_ip_invariant (op2
)
1058 || (TREE_CODE_CLASS (gimple_expr_code (stmt
)) != tcc_comparison
1059 && !useless_type_conversion_p (TREE_TYPE (name
),
1063 ipa_set_jf_arith_pass_through (jfunc
, index
, op2
,
1064 gimple_assign_rhs_code (stmt
));
1066 else if (gimple_assign_single_p (stmt
))
1068 bool agg_p
= parm_ref_data_pass_through_p (&parms_ainfo
[index
],
1070 bool type_p
= false;
1072 if (param_type
&& POINTER_TYPE_P (param_type
))
1073 type_p
= !detect_type_change_ssa (tc_ssa
, TREE_TYPE (param_type
),
1075 if (type_p
|| jfunc
->type
== IPA_JF_UNKNOWN
)
1076 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
, type_p
);
1081 if (TREE_CODE (op1
) != ADDR_EXPR
)
1083 op1
= TREE_OPERAND (op1
, 0);
1084 if (TREE_CODE (TREE_TYPE (op1
)) != RECORD_TYPE
)
1086 base
= get_ref_base_and_extent (op1
, &offset
, &size
, &max_size
);
1087 if (TREE_CODE (base
) != MEM_REF
1088 /* If this is a varying address, punt. */
1090 || max_size
!= size
)
1092 offset
+= mem_ref_offset (base
).low
* BITS_PER_UNIT
;
1093 ssa
= TREE_OPERAND (base
, 0);
1094 if (TREE_CODE (ssa
) != SSA_NAME
1095 || !SSA_NAME_IS_DEFAULT_DEF (ssa
)
1099 /* Dynamic types are changed in constructors and destructors. */
1100 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (ssa
));
1101 if (index
>= 0 && param_type
&& POINTER_TYPE_P (param_type
))
1103 bool type_p
= !detect_type_change (op1
, base
, TREE_TYPE (param_type
),
1104 call
, jfunc
, offset
);
1105 if (type_p
|| jfunc
->type
== IPA_JF_UNKNOWN
)
1106 ipa_set_ancestor_jf (jfunc
, offset
, TREE_TYPE (op1
), index
,
1107 parm_ref_data_pass_through_p (&parms_ainfo
[index
],
1108 call
, ssa
), type_p
);
1112 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1115 iftmp.1_3 = &obj_2(D)->D.1762;
1117 The base of the MEM_REF must be a default definition SSA NAME of a
1118 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1119 whole MEM_REF expression is returned and the offset calculated from any
1120 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1121 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1124 get_ancestor_addr_info (gimple assign
, tree
*obj_p
, HOST_WIDE_INT
*offset
)
1126 HOST_WIDE_INT size
, max_size
;
1127 tree expr
, parm
, obj
;
1129 if (!gimple_assign_single_p (assign
))
1131 expr
= gimple_assign_rhs1 (assign
);
1133 if (TREE_CODE (expr
) != ADDR_EXPR
)
1135 expr
= TREE_OPERAND (expr
, 0);
1137 expr
= get_ref_base_and_extent (expr
, offset
, &size
, &max_size
);
1139 if (TREE_CODE (expr
) != MEM_REF
1140 /* If this is a varying address, punt. */
1145 parm
= TREE_OPERAND (expr
, 0);
1146 if (TREE_CODE (parm
) != SSA_NAME
1147 || !SSA_NAME_IS_DEFAULT_DEF (parm
)
1148 || TREE_CODE (SSA_NAME_VAR (parm
)) != PARM_DECL
)
1151 *offset
+= mem_ref_offset (expr
).low
* BITS_PER_UNIT
;
1157 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1158 statement PHI, try to find out whether NAME is in fact a
1159 multiple-inheritance typecast from a descendant into an ancestor of a formal
1160 parameter and thus can be described by an ancestor jump function and if so,
1161 write the appropriate function into JFUNC.
1163 Essentially we want to match the following pattern:
1171 iftmp.1_3 = &obj_2(D)->D.1762;
1174 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1175 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1179 compute_complex_ancestor_jump_func (struct ipa_node_params
*info
,
1180 struct param_analysis_info
*parms_ainfo
,
1181 struct ipa_jump_func
*jfunc
,
1182 gimple call
, gimple phi
, tree param_type
)
1184 HOST_WIDE_INT offset
;
1185 gimple assign
, cond
;
1186 basic_block phi_bb
, assign_bb
, cond_bb
;
1187 tree tmp
, parm
, expr
, obj
;
1190 if (gimple_phi_num_args (phi
) != 2)
1193 if (integer_zerop (PHI_ARG_DEF (phi
, 1)))
1194 tmp
= PHI_ARG_DEF (phi
, 0);
1195 else if (integer_zerop (PHI_ARG_DEF (phi
, 0)))
1196 tmp
= PHI_ARG_DEF (phi
, 1);
1199 if (TREE_CODE (tmp
) != SSA_NAME
1200 || SSA_NAME_IS_DEFAULT_DEF (tmp
)
1201 || !POINTER_TYPE_P (TREE_TYPE (tmp
))
1202 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp
))) != RECORD_TYPE
)
1205 assign
= SSA_NAME_DEF_STMT (tmp
);
1206 assign_bb
= gimple_bb (assign
);
1207 if (!single_pred_p (assign_bb
))
1209 expr
= get_ancestor_addr_info (assign
, &obj
, &offset
);
1212 parm
= TREE_OPERAND (expr
, 0);
1213 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (parm
));
1214 gcc_assert (index
>= 0);
1216 cond_bb
= single_pred (assign_bb
);
1217 cond
= last_stmt (cond_bb
);
1219 || gimple_code (cond
) != GIMPLE_COND
1220 || gimple_cond_code (cond
) != NE_EXPR
1221 || gimple_cond_lhs (cond
) != parm
1222 || !integer_zerop (gimple_cond_rhs (cond
)))
1225 phi_bb
= gimple_bb (phi
);
1226 for (i
= 0; i
< 2; i
++)
1228 basic_block pred
= EDGE_PRED (phi_bb
, i
)->src
;
1229 if (pred
!= assign_bb
&& pred
!= cond_bb
)
1233 bool type_p
= false;
1234 if (param_type
&& POINTER_TYPE_P (param_type
))
1235 type_p
= !detect_type_change (obj
, expr
, TREE_TYPE (param_type
),
1236 call
, jfunc
, offset
);
1237 if (type_p
|| jfunc
->type
== IPA_JF_UNKNOWN
)
1238 ipa_set_ancestor_jf (jfunc
, offset
, TREE_TYPE (obj
), index
,
1239 parm_ref_data_pass_through_p (&parms_ainfo
[index
],
1240 call
, parm
), type_p
);
1243 /* Given OP which is passed as an actual argument to a called function,
1244 determine if it is possible to construct a KNOWN_TYPE jump function for it
1245 and if so, create one and store it to JFUNC.
1246 EXPECTED_TYPE represents a type the argument should be in */
1249 compute_known_type_jump_func (tree op
, struct ipa_jump_func
*jfunc
,
1250 gimple call
, tree expected_type
)
1252 HOST_WIDE_INT offset
, size
, max_size
;
1255 if (!flag_devirtualize
1256 || TREE_CODE (op
) != ADDR_EXPR
1257 || TREE_CODE (TREE_TYPE (TREE_TYPE (op
))) != RECORD_TYPE
1258 /* Be sure expected_type is polymorphic. */
1260 || TREE_CODE (expected_type
) != RECORD_TYPE
1261 || !TYPE_BINFO (expected_type
)
1262 || !BINFO_VTABLE (TYPE_BINFO (expected_type
)))
1265 op
= TREE_OPERAND (op
, 0);
1266 base
= get_ref_base_and_extent (op
, &offset
, &size
, &max_size
);
1270 || TREE_CODE (TREE_TYPE (base
)) != RECORD_TYPE
1271 || is_global_var (base
))
1274 if (detect_type_change (op
, base
, expected_type
, call
, jfunc
, offset
))
1277 ipa_set_jf_known_type (jfunc
, offset
, TREE_TYPE (base
),
1281 /* Inspect the given TYPE and return true iff it has the same structure (the
1282 same number of fields of the same types) as a C++ member pointer. If
1283 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1284 corresponding fields there. */
1287 type_like_member_ptr_p (tree type
, tree
*method_ptr
, tree
*delta
)
1291 if (TREE_CODE (type
) != RECORD_TYPE
)
1294 fld
= TYPE_FIELDS (type
);
1295 if (!fld
|| !POINTER_TYPE_P (TREE_TYPE (fld
))
1296 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld
))) != METHOD_TYPE
1297 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1303 fld
= DECL_CHAIN (fld
);
1304 if (!fld
|| INTEGRAL_TYPE_P (fld
)
1305 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1310 if (DECL_CHAIN (fld
))
1316 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1317 return the rhs of its defining statement. Otherwise return RHS as it
1321 get_ssa_def_if_simple_copy (tree rhs
)
1323 while (TREE_CODE (rhs
) == SSA_NAME
&& !SSA_NAME_IS_DEFAULT_DEF (rhs
))
1325 gimple def_stmt
= SSA_NAME_DEF_STMT (rhs
);
1327 if (gimple_assign_single_p (def_stmt
))
1328 rhs
= gimple_assign_rhs1 (def_stmt
);
1335 /* Simple linked list, describing known contents of an aggregate beforere
1338 struct ipa_known_agg_contents_list
1340 /* Offset and size of the described part of the aggregate. */
1341 HOST_WIDE_INT offset
, size
;
1342 /* Known constant value or NULL if the contents is known to be unknown. */
1344 /* Pointer to the next structure in the list. */
1345 struct ipa_known_agg_contents_list
*next
;
1348 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1349 in ARG is filled in with constant values. ARG can either be an aggregate
1350 expression or a pointer to an aggregate. ARG_TYPE is the type of the aggregate.
1351 JFUNC is the jump function into which the constants are subsequently stored. */
1354 determine_known_aggregate_parts (gimple call
, tree arg
, tree arg_type
,
1355 struct ipa_jump_func
*jfunc
)
1357 struct ipa_known_agg_contents_list
*list
= NULL
;
1358 int item_count
= 0, const_count
= 0;
1359 HOST_WIDE_INT arg_offset
, arg_size
;
1360 gimple_stmt_iterator gsi
;
1362 bool check_ref
, by_ref
;
1365 /* The function operates in three stages. First, we prepare check_ref, r,
1366 arg_base and arg_offset based on what is actually passed as an actual
1369 if (POINTER_TYPE_P (arg_type
))
1372 if (TREE_CODE (arg
) == SSA_NAME
)
1375 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type
))))
1380 type_size
= TYPE_SIZE (TREE_TYPE (arg_type
));
1381 arg_size
= tree_to_uhwi (type_size
);
1382 ao_ref_init_from_ptr_and_size (&r
, arg_base
, NULL_TREE
);
1384 else if (TREE_CODE (arg
) == ADDR_EXPR
)
1386 HOST_WIDE_INT arg_max_size
;
1388 arg
= TREE_OPERAND (arg
, 0);
1389 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1391 if (arg_max_size
== -1
1392 || arg_max_size
!= arg_size
1395 if (DECL_P (arg_base
))
1399 size
= build_int_cst (integer_type_node
, arg_size
);
1400 ao_ref_init_from_ptr_and_size (&r
, arg_base
, size
);
1410 HOST_WIDE_INT arg_max_size
;
1412 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg
)));
1416 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1418 if (arg_max_size
== -1
1419 || arg_max_size
!= arg_size
1423 ao_ref_init (&r
, arg
);
1426 /* Second stage walks back the BB, looks at individual statements and as long
1427 as it is confident of how the statements affect contents of the
1428 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1430 gsi
= gsi_for_stmt (call
);
1432 for (; !gsi_end_p (gsi
); gsi_prev (&gsi
))
1434 struct ipa_known_agg_contents_list
*n
, **p
;
1435 gimple stmt
= gsi_stmt (gsi
);
1436 HOST_WIDE_INT lhs_offset
, lhs_size
, lhs_max_size
;
1437 tree lhs
, rhs
, lhs_base
;
1438 bool partial_overlap
;
1440 if (!stmt_may_clobber_ref_p_1 (stmt
, &r
))
1442 if (!gimple_assign_single_p (stmt
))
1445 lhs
= gimple_assign_lhs (stmt
);
1446 rhs
= gimple_assign_rhs1 (stmt
);
1447 if (!is_gimple_reg_type (TREE_TYPE (rhs
))
1448 || TREE_CODE (lhs
) == BIT_FIELD_REF
1449 || contains_bitfld_component_ref_p (lhs
))
1452 lhs_base
= get_ref_base_and_extent (lhs
, &lhs_offset
, &lhs_size
,
1454 if (lhs_max_size
== -1
1455 || lhs_max_size
!= lhs_size
1456 || (lhs_offset
< arg_offset
1457 && lhs_offset
+ lhs_size
> arg_offset
)
1458 || (lhs_offset
< arg_offset
+ arg_size
1459 && lhs_offset
+ lhs_size
> arg_offset
+ arg_size
))
1464 if (TREE_CODE (lhs_base
) != MEM_REF
1465 || TREE_OPERAND (lhs_base
, 0) != arg_base
1466 || !integer_zerop (TREE_OPERAND (lhs_base
, 1)))
1469 else if (lhs_base
!= arg_base
)
1471 if (DECL_P (lhs_base
))
1477 if (lhs_offset
+ lhs_size
< arg_offset
1478 || lhs_offset
>= (arg_offset
+ arg_size
))
1481 partial_overlap
= false;
1483 while (*p
&& (*p
)->offset
< lhs_offset
)
1485 if ((*p
)->offset
+ (*p
)->size
> lhs_offset
)
1487 partial_overlap
= true;
1492 if (partial_overlap
)
1494 if (*p
&& (*p
)->offset
< lhs_offset
+ lhs_size
)
1496 if ((*p
)->offset
== lhs_offset
&& (*p
)->size
== lhs_size
)
1497 /* We already know this value is subsequently overwritten with
1501 /* Otherwise this is a partial overlap which we cannot
1506 rhs
= get_ssa_def_if_simple_copy (rhs
);
1507 n
= XALLOCA (struct ipa_known_agg_contents_list
);
1509 n
->offset
= lhs_offset
;
1510 if (is_gimple_ip_invariant (rhs
))
1516 n
->constant
= NULL_TREE
;
1521 if (const_count
== PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
)
1522 || item_count
== 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
))
1526 /* Third stage just goes over the list and creates an appropriate vector of
1527 ipa_agg_jf_item structures out of it, of sourse only if there are
1528 any known constants to begin with. */
1532 jfunc
->agg
.by_ref
= by_ref
;
1533 vec_alloc (jfunc
->agg
.items
, const_count
);
1538 struct ipa_agg_jf_item item
;
1539 item
.offset
= list
->offset
- arg_offset
;
1540 gcc_assert ((item
.offset
% BITS_PER_UNIT
) == 0);
1541 item
.value
= unshare_expr_without_location (list
->constant
);
1542 jfunc
->agg
.items
->quick_push (item
);
1550 ipa_get_callee_param_type (struct cgraph_edge
*e
, int i
)
1553 tree type
= (e
->callee
1554 ? TREE_TYPE (e
->callee
->decl
)
1555 : gimple_call_fntype (e
->call_stmt
));
1556 tree t
= TYPE_ARG_TYPES (type
);
1558 for (n
= 0; n
< i
; n
++)
1565 return TREE_VALUE (t
);
1568 t
= DECL_ARGUMENTS (e
->callee
->decl
);
1569 for (n
= 0; n
< i
; n
++)
1576 return TREE_TYPE (t
);
1580 /* Compute jump function for all arguments of callsite CS and insert the
1581 information in the jump_functions array in the ipa_edge_args corresponding
1582 to this callsite. */
1585 ipa_compute_jump_functions_for_edge (struct param_analysis_info
*parms_ainfo
,
1586 struct cgraph_edge
*cs
)
1588 struct ipa_node_params
*info
= IPA_NODE_REF (cs
->caller
);
1589 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
1590 gimple call
= cs
->call_stmt
;
1591 int n
, arg_num
= gimple_call_num_args (call
);
1593 if (arg_num
== 0 || args
->jump_functions
)
1595 vec_safe_grow_cleared (args
->jump_functions
, arg_num
);
1597 if (gimple_call_internal_p (call
))
1599 if (ipa_func_spec_opts_forbid_analysis_p (cs
->caller
))
1602 for (n
= 0; n
< arg_num
; n
++)
1604 struct ipa_jump_func
*jfunc
= ipa_get_ith_jump_func (args
, n
);
1605 tree arg
= gimple_call_arg (call
, n
);
1606 tree param_type
= ipa_get_callee_param_type (cs
, n
);
1608 if (is_gimple_ip_invariant (arg
))
1609 ipa_set_jf_constant (jfunc
, arg
, cs
);
1610 else if (!is_gimple_reg_type (TREE_TYPE (arg
))
1611 && TREE_CODE (arg
) == PARM_DECL
)
1613 int index
= ipa_get_param_decl_index (info
, arg
);
1615 gcc_assert (index
>=0);
1616 /* Aggregate passed by value, check for pass-through, otherwise we
1617 will attempt to fill in aggregate contents later in this
1619 if (parm_preserved_before_stmt_p (&parms_ainfo
[index
], call
, arg
))
1621 ipa_set_jf_simple_pass_through (jfunc
, index
, false, false);
1625 else if (TREE_CODE (arg
) == SSA_NAME
)
1627 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
1629 int index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (arg
));
1633 agg_p
= parm_ref_data_pass_through_p (&parms_ainfo
[index
],
1635 if (param_type
&& POINTER_TYPE_P (param_type
))
1636 type_p
= !detect_type_change_ssa (arg
, TREE_TYPE (param_type
),
1640 if (type_p
|| jfunc
->type
== IPA_JF_UNKNOWN
)
1641 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
,
1647 gimple stmt
= SSA_NAME_DEF_STMT (arg
);
1648 if (is_gimple_assign (stmt
))
1649 compute_complex_assign_jump_func (info
, parms_ainfo
, jfunc
,
1650 call
, stmt
, arg
, param_type
);
1651 else if (gimple_code (stmt
) == GIMPLE_PHI
)
1652 compute_complex_ancestor_jump_func (info
, parms_ainfo
, jfunc
,
1653 call
, stmt
, param_type
);
1657 compute_known_type_jump_func (arg
, jfunc
, call
,
1659 && POINTER_TYPE_P (param_type
)
1660 ? TREE_TYPE (param_type
)
1663 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1664 passed (because type conversions are ignored in gimple). Usually we can
1665 safely get type from function declaration, but in case of K&R prototypes or
1666 variadic functions we can try our luck with type of the pointer passed.
1667 TODO: Since we look for actual initialization of the memory object, we may better
1668 work out the type based on the memory stores we find. */
1670 param_type
= TREE_TYPE (arg
);
1672 if ((jfunc
->type
!= IPA_JF_PASS_THROUGH
1673 || !ipa_get_jf_pass_through_agg_preserved (jfunc
))
1674 && (jfunc
->type
!= IPA_JF_ANCESTOR
1675 || !ipa_get_jf_ancestor_agg_preserved (jfunc
))
1676 && (AGGREGATE_TYPE_P (TREE_TYPE (arg
))
1677 || POINTER_TYPE_P (param_type
)))
1678 determine_known_aggregate_parts (call
, arg
, param_type
, jfunc
);
1682 /* Compute jump functions for all edges - both direct and indirect - outgoing
1683 from NODE. Also count the actual arguments in the process. */
1686 ipa_compute_jump_functions (struct cgraph_node
*node
,
1687 struct param_analysis_info
*parms_ainfo
)
1689 struct cgraph_edge
*cs
;
1691 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
1693 struct cgraph_node
*callee
= cgraph_function_or_thunk_node (cs
->callee
,
1695 /* We do not need to bother analyzing calls to unknown
1696 functions unless they may become known during lto/whopr. */
1697 if (!callee
->definition
&& !flag_lto
)
1699 ipa_compute_jump_functions_for_edge (parms_ainfo
, cs
);
1702 for (cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
1703 ipa_compute_jump_functions_for_edge (parms_ainfo
, cs
);
1706 /* If STMT looks like a statement loading a value from a member pointer formal
1707 parameter, return that parameter and store the offset of the field to
1708 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1709 might be clobbered). If USE_DELTA, then we look for a use of the delta
1710 field rather than the pfn. */
1713 ipa_get_stmt_member_ptr_load_param (gimple stmt
, bool use_delta
,
1714 HOST_WIDE_INT
*offset_p
)
1716 tree rhs
, rec
, ref_field
, ref_offset
, fld
, ptr_field
, delta_field
;
1718 if (!gimple_assign_single_p (stmt
))
1721 rhs
= gimple_assign_rhs1 (stmt
);
1722 if (TREE_CODE (rhs
) == COMPONENT_REF
)
1724 ref_field
= TREE_OPERAND (rhs
, 1);
1725 rhs
= TREE_OPERAND (rhs
, 0);
1728 ref_field
= NULL_TREE
;
1729 if (TREE_CODE (rhs
) != MEM_REF
)
1731 rec
= TREE_OPERAND (rhs
, 0);
1732 if (TREE_CODE (rec
) != ADDR_EXPR
)
1734 rec
= TREE_OPERAND (rec
, 0);
1735 if (TREE_CODE (rec
) != PARM_DECL
1736 || !type_like_member_ptr_p (TREE_TYPE (rec
), &ptr_field
, &delta_field
))
1738 ref_offset
= TREE_OPERAND (rhs
, 1);
1745 *offset_p
= int_bit_position (fld
);
1749 if (integer_nonzerop (ref_offset
))
1751 return ref_field
== fld
? rec
: NULL_TREE
;
1754 return tree_int_cst_equal (byte_position (fld
), ref_offset
) ? rec
1758 /* Returns true iff T is an SSA_NAME defined by a statement. */
1761 ipa_is_ssa_with_stmt_def (tree t
)
1763 if (TREE_CODE (t
) == SSA_NAME
1764 && !SSA_NAME_IS_DEFAULT_DEF (t
))
1770 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1771 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1772 indirect call graph edge. */
1774 static struct cgraph_edge
*
1775 ipa_note_param_call (struct cgraph_node
*node
, int param_index
, gimple stmt
)
1777 struct cgraph_edge
*cs
;
1779 cs
= cgraph_edge (node
, stmt
);
1780 cs
->indirect_info
->param_index
= param_index
;
1781 cs
->indirect_info
->agg_contents
= 0;
1782 cs
->indirect_info
->member_ptr
= 0;
1786 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1787 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1788 intermediate information about each formal parameter. Currently it checks
1789 whether the call calls a pointer that is a formal parameter and if so, the
1790 parameter is marked with the called flag and an indirect call graph edge
1791 describing the call is created. This is very simple for ordinary pointers
1792 represented in SSA but not-so-nice when it comes to member pointers. The
1793 ugly part of this function does nothing more than trying to match the
1794 pattern of such a call. An example of such a pattern is the gimple dump
1795 below, the call is on the last line:
1798 f$__delta_5 = f.__delta;
1799 f$__pfn_24 = f.__pfn;
1803 f$__delta_5 = MEM[(struct *)&f];
1804 f$__pfn_24 = MEM[(struct *)&f + 4B];
1806 and a few lines below:
1809 D.2496_3 = (int) f$__pfn_24;
1810 D.2497_4 = D.2496_3 & 1;
1817 D.2500_7 = (unsigned int) f$__delta_5;
1818 D.2501_8 = &S + D.2500_7;
1819 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1820 D.2503_10 = *D.2502_9;
1821 D.2504_12 = f$__pfn_24 + -1;
1822 D.2505_13 = (unsigned int) D.2504_12;
1823 D.2506_14 = D.2503_10 + D.2505_13;
1824 D.2507_15 = *D.2506_14;
1825 iftmp.11_16 = (String:: *) D.2507_15;
1828 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1829 D.2500_19 = (unsigned int) f$__delta_5;
1830 D.2508_20 = &S + D.2500_19;
1831 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1833 Such patterns are results of simple calls to a member pointer:
1835 int doprinting (int (MyString::* f)(int) const)
1837 MyString S ("somestring");
1842 Moreover, the function also looks for called pointers loaded from aggregates
1843 passed by value or reference. */
1846 ipa_analyze_indirect_call_uses (struct cgraph_node
*node
,
1847 struct ipa_node_params
*info
,
1848 struct param_analysis_info
*parms_ainfo
,
1849 gimple call
, tree target
)
1854 tree rec
, rec2
, cond
;
1857 basic_block bb
, virt_bb
, join
;
1858 HOST_WIDE_INT offset
;
1861 if (SSA_NAME_IS_DEFAULT_DEF (target
))
1863 tree var
= SSA_NAME_VAR (target
);
1864 index
= ipa_get_param_decl_index (info
, var
);
1866 ipa_note_param_call (node
, index
, call
);
1870 def
= SSA_NAME_DEF_STMT (target
);
1871 if (gimple_assign_single_p (def
)
1872 && ipa_load_from_parm_agg_1 (info
->descriptors
, parms_ainfo
, def
,
1873 gimple_assign_rhs1 (def
), &index
, &offset
,
1876 struct cgraph_edge
*cs
= ipa_note_param_call (node
, index
, call
);
1877 if (cs
->indirect_info
->offset
!= offset
)
1878 cs
->indirect_info
->outer_type
= NULL
;
1879 cs
->indirect_info
->offset
= offset
;
1880 cs
->indirect_info
->agg_contents
= 1;
1881 cs
->indirect_info
->by_ref
= by_ref
;
1885 /* Now we need to try to match the complex pattern of calling a member
1887 if (gimple_code (def
) != GIMPLE_PHI
1888 || gimple_phi_num_args (def
) != 2
1889 || !POINTER_TYPE_P (TREE_TYPE (target
))
1890 || TREE_CODE (TREE_TYPE (TREE_TYPE (target
))) != METHOD_TYPE
)
1893 /* First, we need to check whether one of these is a load from a member
1894 pointer that is a parameter to this function. */
1895 n1
= PHI_ARG_DEF (def
, 0);
1896 n2
= PHI_ARG_DEF (def
, 1);
1897 if (!ipa_is_ssa_with_stmt_def (n1
) || !ipa_is_ssa_with_stmt_def (n2
))
1899 d1
= SSA_NAME_DEF_STMT (n1
);
1900 d2
= SSA_NAME_DEF_STMT (n2
);
1902 join
= gimple_bb (def
);
1903 if ((rec
= ipa_get_stmt_member_ptr_load_param (d1
, false, &offset
)))
1905 if (ipa_get_stmt_member_ptr_load_param (d2
, false, NULL
))
1908 bb
= EDGE_PRED (join
, 0)->src
;
1909 virt_bb
= gimple_bb (d2
);
1911 else if ((rec
= ipa_get_stmt_member_ptr_load_param (d2
, false, &offset
)))
1913 bb
= EDGE_PRED (join
, 1)->src
;
1914 virt_bb
= gimple_bb (d1
);
1919 /* Second, we need to check that the basic blocks are laid out in the way
1920 corresponding to the pattern. */
1922 if (!single_pred_p (virt_bb
) || !single_succ_p (virt_bb
)
1923 || single_pred (virt_bb
) != bb
1924 || single_succ (virt_bb
) != join
)
1927 /* Third, let's see that the branching is done depending on the least
1928 significant bit of the pfn. */
1930 branch
= last_stmt (bb
);
1931 if (!branch
|| gimple_code (branch
) != GIMPLE_COND
)
1934 if ((gimple_cond_code (branch
) != NE_EXPR
1935 && gimple_cond_code (branch
) != EQ_EXPR
)
1936 || !integer_zerop (gimple_cond_rhs (branch
)))
1939 cond
= gimple_cond_lhs (branch
);
1940 if (!ipa_is_ssa_with_stmt_def (cond
))
1943 def
= SSA_NAME_DEF_STMT (cond
);
1944 if (!is_gimple_assign (def
)
1945 || gimple_assign_rhs_code (def
) != BIT_AND_EXPR
1946 || !integer_onep (gimple_assign_rhs2 (def
)))
1949 cond
= gimple_assign_rhs1 (def
);
1950 if (!ipa_is_ssa_with_stmt_def (cond
))
1953 def
= SSA_NAME_DEF_STMT (cond
);
1955 if (is_gimple_assign (def
)
1956 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
1958 cond
= gimple_assign_rhs1 (def
);
1959 if (!ipa_is_ssa_with_stmt_def (cond
))
1961 def
= SSA_NAME_DEF_STMT (cond
);
1964 rec2
= ipa_get_stmt_member_ptr_load_param (def
,
1965 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1966 == ptrmemfunc_vbit_in_delta
),
1971 index
= ipa_get_param_decl_index (info
, rec
);
1973 && parm_preserved_before_stmt_p (&parms_ainfo
[index
], call
, rec
))
1975 struct cgraph_edge
*cs
= ipa_note_param_call (node
, index
, call
);
1976 if (cs
->indirect_info
->offset
!= offset
)
1977 cs
->indirect_info
->outer_type
= NULL
;
1978 cs
->indirect_info
->offset
= offset
;
1979 cs
->indirect_info
->agg_contents
= 1;
1980 cs
->indirect_info
->member_ptr
= 1;
1986 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1987 object referenced in the expression is a formal parameter of the caller
1988 (described by INFO), create a call note for the statement. */
1991 ipa_analyze_virtual_call_uses (struct cgraph_node
*node
,
1992 struct ipa_node_params
*info
, gimple call
,
1995 struct cgraph_edge
*cs
;
1996 struct cgraph_indirect_call_info
*ii
;
1997 struct ipa_jump_func jfunc
;
1998 tree obj
= OBJ_TYPE_REF_OBJECT (target
);
2000 HOST_WIDE_INT anc_offset
;
2002 if (!flag_devirtualize
)
2005 if (TREE_CODE (obj
) != SSA_NAME
)
2008 if (SSA_NAME_IS_DEFAULT_DEF (obj
))
2010 if (TREE_CODE (SSA_NAME_VAR (obj
)) != PARM_DECL
)
2014 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (obj
));
2015 gcc_assert (index
>= 0);
2016 if (detect_type_change_ssa (obj
, obj_type_ref_class (target
),
2022 gimple stmt
= SSA_NAME_DEF_STMT (obj
);
2025 expr
= get_ancestor_addr_info (stmt
, &obj
, &anc_offset
);
2028 index
= ipa_get_param_decl_index (info
,
2029 SSA_NAME_VAR (TREE_OPERAND (expr
, 0)));
2030 gcc_assert (index
>= 0);
2031 if (detect_type_change (obj
, expr
, obj_type_ref_class (target
),
2032 call
, &jfunc
, anc_offset
))
2036 cs
= ipa_note_param_call (node
, index
, call
);
2037 ii
= cs
->indirect_info
;
2038 ii
->offset
= anc_offset
;
2039 ii
->otr_token
= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target
));
2040 ii
->otr_type
= obj_type_ref_class (target
);
2041 ii
->polymorphic
= 1;
2044 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2045 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2046 containing intermediate information about each formal parameter. */
2049 ipa_analyze_call_uses (struct cgraph_node
*node
,
2050 struct ipa_node_params
*info
,
2051 struct param_analysis_info
*parms_ainfo
, gimple call
)
2053 tree target
= gimple_call_fn (call
);
2054 struct cgraph_edge
*cs
;
2057 || (TREE_CODE (target
) != SSA_NAME
2058 && !virtual_method_call_p (target
)))
2061 /* If we previously turned the call into a direct call, there is
2062 no need to analyze. */
2063 cs
= cgraph_edge (node
, call
);
2064 if (cs
&& !cs
->indirect_unknown_callee
)
2066 if (TREE_CODE (target
) == SSA_NAME
)
2067 ipa_analyze_indirect_call_uses (node
, info
, parms_ainfo
, call
, target
);
2068 else if (virtual_method_call_p (target
))
2069 ipa_analyze_virtual_call_uses (node
, info
, call
, target
);
2073 /* Analyze the call statement STMT with respect to formal parameters (described
2074 in INFO) of caller given by NODE. Currently it only checks whether formal
2075 parameters are called. PARMS_AINFO is a pointer to a vector containing
2076 intermediate information about each formal parameter. */
2079 ipa_analyze_stmt_uses (struct cgraph_node
*node
, struct ipa_node_params
*info
,
2080 struct param_analysis_info
*parms_ainfo
, gimple stmt
)
2082 if (is_gimple_call (stmt
))
2083 ipa_analyze_call_uses (node
, info
, parms_ainfo
, stmt
);
2086 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2087 If OP is a parameter declaration, mark it as used in the info structure
2091 visit_ref_for_mod_analysis (gimple
, tree op
, tree
, void *data
)
2093 struct ipa_node_params
*info
= (struct ipa_node_params
*) data
;
2095 op
= get_base_address (op
);
2097 && TREE_CODE (op
) == PARM_DECL
)
2099 int index
= ipa_get_param_decl_index (info
, op
);
2100 gcc_assert (index
>= 0);
2101 ipa_set_param_used (info
, index
, true);
2107 /* Scan the function body of NODE and inspect the uses of formal parameters.
2108 Store the findings in various structures of the associated ipa_node_params
2109 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
2110 vector containing intermediate information about each formal parameter. */
2113 ipa_analyze_params_uses (struct cgraph_node
*node
,
2114 struct param_analysis_info
*parms_ainfo
)
2116 tree decl
= node
->decl
;
2118 struct function
*func
;
2119 gimple_stmt_iterator gsi
;
2120 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
2123 if (ipa_get_param_count (info
) == 0 || info
->uses_analysis_done
)
2126 info
->uses_analysis_done
= 1;
2127 if (ipa_func_spec_opts_forbid_analysis_p (node
))
2129 for (i
= 0; i
< ipa_get_param_count (info
); i
++)
2131 ipa_set_param_used (info
, i
, true);
2132 ipa_set_controlled_uses (info
, i
, IPA_UNDESCRIBED_USE
);
2137 for (i
= 0; i
< ipa_get_param_count (info
); i
++)
2139 tree parm
= ipa_get_param (info
, i
);
2140 int controlled_uses
= 0;
2142 /* For SSA regs see if parameter is used. For non-SSA we compute
2143 the flag during modification analysis. */
2144 if (is_gimple_reg (parm
))
2146 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
),
2148 if (ddef
&& !has_zero_uses (ddef
))
2150 imm_use_iterator imm_iter
;
2151 use_operand_p use_p
;
2153 ipa_set_param_used (info
, i
, true);
2154 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, ddef
)
2155 if (!is_gimple_call (USE_STMT (use_p
)))
2157 if (!is_gimple_debug (USE_STMT (use_p
)))
2159 controlled_uses
= IPA_UNDESCRIBED_USE
;
2167 controlled_uses
= 0;
2170 controlled_uses
= IPA_UNDESCRIBED_USE
;
2171 ipa_set_controlled_uses (info
, i
, controlled_uses
);
2174 func
= DECL_STRUCT_FUNCTION (decl
);
2175 FOR_EACH_BB_FN (bb
, func
)
2177 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2179 gimple stmt
= gsi_stmt (gsi
);
2181 if (is_gimple_debug (stmt
))
2184 ipa_analyze_stmt_uses (node
, info
, parms_ainfo
, stmt
);
2185 walk_stmt_load_store_addr_ops (stmt
, info
,
2186 visit_ref_for_mod_analysis
,
2187 visit_ref_for_mod_analysis
,
2188 visit_ref_for_mod_analysis
);
2190 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2191 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), info
,
2192 visit_ref_for_mod_analysis
,
2193 visit_ref_for_mod_analysis
,
2194 visit_ref_for_mod_analysis
);
2198 /* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
2201 free_parms_ainfo (struct param_analysis_info
*parms_ainfo
, int param_count
)
2205 for (i
= 0; i
< param_count
; i
++)
2207 if (parms_ainfo
[i
].parm_visited_statements
)
2208 BITMAP_FREE (parms_ainfo
[i
].parm_visited_statements
);
2209 if (parms_ainfo
[i
].pt_visited_statements
)
2210 BITMAP_FREE (parms_ainfo
[i
].pt_visited_statements
);
2214 /* Initialize the array describing properties of of formal parameters
2215 of NODE, analyze their uses and compute jump functions associated
2216 with actual arguments of calls from within NODE. */
2219 ipa_analyze_node (struct cgraph_node
*node
)
2221 struct ipa_node_params
*info
;
2222 struct param_analysis_info
*parms_ainfo
;
2225 ipa_check_create_node_params ();
2226 ipa_check_create_edge_args ();
2227 info
= IPA_NODE_REF (node
);
2228 push_cfun (DECL_STRUCT_FUNCTION (node
->decl
));
2229 ipa_initialize_node_params (node
);
2231 param_count
= ipa_get_param_count (info
);
2232 parms_ainfo
= XALLOCAVEC (struct param_analysis_info
, param_count
);
2233 memset (parms_ainfo
, 0, sizeof (struct param_analysis_info
) * param_count
);
2235 ipa_analyze_params_uses (node
, parms_ainfo
);
2236 ipa_compute_jump_functions (node
, parms_ainfo
);
2238 free_parms_ainfo (parms_ainfo
, param_count
);
2242 /* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2243 attempt a type-based devirtualization. If successful, return the
2244 target function declaration, otherwise return NULL. */
2247 ipa_intraprocedural_devirtualization (gimple call
)
2249 tree binfo
, token
, fndecl
;
2250 struct ipa_jump_func jfunc
;
2251 tree otr
= gimple_call_fn (call
);
2253 jfunc
.type
= IPA_JF_UNKNOWN
;
2254 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr
), &jfunc
,
2255 call
, obj_type_ref_class (otr
));
2256 if (jfunc
.type
!= IPA_JF_KNOWN_TYPE
)
2258 binfo
= ipa_binfo_from_known_type_jfunc (&jfunc
);
2261 token
= OBJ_TYPE_REF_TOKEN (otr
);
2262 fndecl
= gimple_get_virt_method_for_binfo (tree_to_uhwi (token
),
2264 #ifdef ENABLE_CHECKING
2266 gcc_assert (possible_polymorphic_call_target_p
2267 (otr
, cgraph_get_node (fndecl
)));
2272 /* Update the jump function DST when the call graph edge corresponding to SRC is
2273 is being inlined, knowing that DST is of type ancestor and src of known
2277 combine_known_type_and_ancestor_jfs (struct ipa_jump_func
*src
,
2278 struct ipa_jump_func
*dst
)
2280 HOST_WIDE_INT combined_offset
;
2283 if (!ipa_get_jf_ancestor_type_preserved (dst
))
2285 dst
->type
= IPA_JF_UNKNOWN
;
2289 combined_offset
= ipa_get_jf_known_type_offset (src
)
2290 + ipa_get_jf_ancestor_offset (dst
);
2291 combined_type
= ipa_get_jf_ancestor_type (dst
);
2293 ipa_set_jf_known_type (dst
, combined_offset
,
2294 ipa_get_jf_known_type_base_type (src
),
2298 /* Update the jump functions associated with call graph edge E when the call
2299 graph edge CS is being inlined, assuming that E->caller is already (possibly
2300 indirectly) inlined into CS->callee and that E has not been inlined. */
2303 update_jump_functions_after_inlining (struct cgraph_edge
*cs
,
2304 struct cgraph_edge
*e
)
2306 struct ipa_edge_args
*top
= IPA_EDGE_REF (cs
);
2307 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
2308 int count
= ipa_get_cs_argument_count (args
);
2311 for (i
= 0; i
< count
; i
++)
2313 struct ipa_jump_func
*dst
= ipa_get_ith_jump_func (args
, i
);
2315 if (dst
->type
== IPA_JF_ANCESTOR
)
2317 struct ipa_jump_func
*src
;
2318 int dst_fid
= dst
->value
.ancestor
.formal_id
;
2320 /* Variable number of arguments can cause havoc if we try to access
2321 one that does not exist in the inlined edge. So make sure we
2323 if (dst_fid
>= ipa_get_cs_argument_count (top
))
2325 dst
->type
= IPA_JF_UNKNOWN
;
2329 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2332 && (dst
->value
.ancestor
.agg_preserved
|| !src
->agg
.by_ref
))
2334 struct ipa_agg_jf_item
*item
;
2337 /* Currently we do not produce clobber aggregate jump functions,
2338 replace with merging when we do. */
2339 gcc_assert (!dst
->agg
.items
);
2341 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2342 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2343 FOR_EACH_VEC_SAFE_ELT (dst
->agg
.items
, j
, item
)
2344 item
->offset
-= dst
->value
.ancestor
.offset
;
2347 if (src
->type
== IPA_JF_KNOWN_TYPE
)
2348 combine_known_type_and_ancestor_jfs (src
, dst
);
2349 else if (src
->type
== IPA_JF_PASS_THROUGH
2350 && src
->value
.pass_through
.operation
== NOP_EXPR
)
2352 dst
->value
.ancestor
.formal_id
= src
->value
.pass_through
.formal_id
;
2353 dst
->value
.ancestor
.agg_preserved
&=
2354 src
->value
.pass_through
.agg_preserved
;
2355 dst
->value
.ancestor
.type_preserved
&=
2356 src
->value
.pass_through
.type_preserved
;
2358 else if (src
->type
== IPA_JF_ANCESTOR
)
2360 dst
->value
.ancestor
.formal_id
= src
->value
.ancestor
.formal_id
;
2361 dst
->value
.ancestor
.offset
+= src
->value
.ancestor
.offset
;
2362 dst
->value
.ancestor
.agg_preserved
&=
2363 src
->value
.ancestor
.agg_preserved
;
2364 dst
->value
.ancestor
.type_preserved
&=
2365 src
->value
.ancestor
.type_preserved
;
2368 dst
->type
= IPA_JF_UNKNOWN
;
2370 else if (dst
->type
== IPA_JF_PASS_THROUGH
)
2372 struct ipa_jump_func
*src
;
2373 /* We must check range due to calls with variable number of arguments
2374 and we cannot combine jump functions with operations. */
2375 if (dst
->value
.pass_through
.operation
== NOP_EXPR
2376 && (dst
->value
.pass_through
.formal_id
2377 < ipa_get_cs_argument_count (top
)))
2379 int dst_fid
= dst
->value
.pass_through
.formal_id
;
2380 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2381 bool dst_agg_p
= ipa_get_jf_pass_through_agg_preserved (dst
);
2385 case IPA_JF_UNKNOWN
:
2386 dst
->type
= IPA_JF_UNKNOWN
;
2388 case IPA_JF_KNOWN_TYPE
:
2389 if (ipa_get_jf_pass_through_type_preserved (dst
))
2390 ipa_set_jf_known_type (dst
,
2391 ipa_get_jf_known_type_offset (src
),
2392 ipa_get_jf_known_type_base_type (src
),
2393 ipa_get_jf_known_type_base_type (src
));
2395 dst
->type
= IPA_JF_UNKNOWN
;
2398 ipa_set_jf_cst_copy (dst
, src
);
2401 case IPA_JF_PASS_THROUGH
:
2403 int formal_id
= ipa_get_jf_pass_through_formal_id (src
);
2404 enum tree_code operation
;
2405 operation
= ipa_get_jf_pass_through_operation (src
);
2407 if (operation
== NOP_EXPR
)
2411 && ipa_get_jf_pass_through_agg_preserved (src
);
2412 type_p
= ipa_get_jf_pass_through_type_preserved (src
)
2413 && ipa_get_jf_pass_through_type_preserved (dst
);
2414 ipa_set_jf_simple_pass_through (dst
, formal_id
,
2419 tree operand
= ipa_get_jf_pass_through_operand (src
);
2420 ipa_set_jf_arith_pass_through (dst
, formal_id
, operand
,
2425 case IPA_JF_ANCESTOR
:
2429 && ipa_get_jf_ancestor_agg_preserved (src
);
2430 type_p
= ipa_get_jf_ancestor_type_preserved (src
)
2431 && ipa_get_jf_pass_through_type_preserved (dst
);
2432 ipa_set_ancestor_jf (dst
,
2433 ipa_get_jf_ancestor_offset (src
),
2434 ipa_get_jf_ancestor_type (src
),
2435 ipa_get_jf_ancestor_formal_id (src
),
2444 && (dst_agg_p
|| !src
->agg
.by_ref
))
2446 /* Currently we do not produce clobber aggregate jump
2447 functions, replace with merging when we do. */
2448 gcc_assert (!dst
->agg
.items
);
2450 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2451 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2455 dst
->type
= IPA_JF_UNKNOWN
;
2460 /* If TARGET is an addr_expr of a function declaration, make it the destination
2461 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2463 struct cgraph_edge
*
2464 ipa_make_edge_direct_to_target (struct cgraph_edge
*ie
, tree target
)
2466 struct cgraph_node
*callee
;
2467 struct inline_edge_summary
*es
= inline_edge_summary (ie
);
2468 bool unreachable
= false;
2470 if (TREE_CODE (target
) == ADDR_EXPR
)
2471 target
= TREE_OPERAND (target
, 0);
2472 if (TREE_CODE (target
) != FUNCTION_DECL
)
2474 target
= canonicalize_constructor_val (target
, NULL
);
2475 if (!target
|| TREE_CODE (target
) != FUNCTION_DECL
)
2477 if (ie
->indirect_info
->member_ptr
)
2478 /* Member pointer call that goes through a VMT lookup. */
2482 fprintf (dump_file
, "ipa-prop: Discovered direct call to non-function"
2483 " in %s/%i, making it unreachable.\n",
2484 ie
->caller
->name (), ie
->caller
->order
);
2485 target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2486 callee
= cgraph_get_create_node (target
);
2490 callee
= cgraph_get_node (target
);
2493 callee
= cgraph_get_node (target
);
2495 /* Because may-edges are not explicitely represented and vtable may be external,
2496 we may create the first reference to the object in the unit. */
2497 if (!callee
|| callee
->global
.inlined_to
)
2500 /* We are better to ensure we can refer to it.
2501 In the case of static functions we are out of luck, since we already
2502 removed its body. In the case of public functions we may or may
2503 not introduce the reference. */
2504 if (!canonicalize_constructor_val (target
, NULL
)
2505 || !TREE_PUBLIC (target
))
2508 fprintf (dump_file
, "ipa-prop: Discovered call to a known target "
2509 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2510 xstrdup (ie
->caller
->name ()),
2512 xstrdup (ie
->callee
->name ()),
2516 callee
= cgraph_get_create_node (target
);
2518 ipa_check_create_node_params ();
2520 /* We can not make edges to inline clones. It is bug that someone removed
2521 the cgraph node too early. */
2522 gcc_assert (!callee
->global
.inlined_to
);
2524 if (dump_file
&& !unreachable
)
2526 fprintf (dump_file
, "ipa-prop: Discovered %s call to a known target "
2527 "(%s/%i -> %s/%i), for stmt ",
2528 ie
->indirect_info
->polymorphic
? "a virtual" : "an indirect",
2529 xstrdup (ie
->caller
->name ()),
2531 xstrdup (callee
->name ()),
2534 print_gimple_stmt (dump_file
, ie
->call_stmt
, 2, TDF_SLIM
);
2536 fprintf (dump_file
, "with uid %i\n", ie
->lto_stmt_uid
);
2538 ie
= cgraph_make_edge_direct (ie
, callee
);
2539 es
= inline_edge_summary (ie
);
2540 es
->call_stmt_size
-= (eni_size_weights
.indirect_call_cost
2541 - eni_size_weights
.call_cost
);
2542 es
->call_stmt_time
-= (eni_time_weights
.indirect_call_cost
2543 - eni_time_weights
.call_cost
);
2548 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2549 return NULL if there is not any. BY_REF specifies whether the value has to
2550 be passed by reference or by value. */
2553 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function
*agg
,
2554 HOST_WIDE_INT offset
, bool by_ref
)
2556 struct ipa_agg_jf_item
*item
;
2559 if (by_ref
!= agg
->by_ref
)
2562 FOR_EACH_VEC_SAFE_ELT (agg
->items
, i
, item
)
2563 if (item
->offset
== offset
)
2565 /* Currently we do not have clobber values, return NULL for them once
2567 gcc_checking_assert (is_gimple_ip_invariant (item
->value
));
2573 /* Remove a reference to SYMBOL from the list of references of a node given by
2574 reference description RDESC. Return true if the reference has been
2575 successfully found and removed. */
2578 remove_described_reference (symtab_node
*symbol
, struct ipa_cst_ref_desc
*rdesc
)
2580 struct ipa_ref
*to_del
;
2581 struct cgraph_edge
*origin
;
2586 to_del
= ipa_find_reference (origin
->caller
, symbol
,
2587 origin
->call_stmt
, origin
->lto_stmt_uid
);
2591 ipa_remove_reference (to_del
);
2593 fprintf (dump_file
, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2594 xstrdup (origin
->caller
->name ()),
2595 origin
->caller
->order
, xstrdup (symbol
->name ()));
2599 /* If JFUNC has a reference description with refcount different from
2600 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2601 NULL. JFUNC must be a constant jump function. */
2603 static struct ipa_cst_ref_desc
*
2604 jfunc_rdesc_usable (struct ipa_jump_func
*jfunc
)
2606 struct ipa_cst_ref_desc
*rdesc
= ipa_get_jf_constant_rdesc (jfunc
);
2607 if (rdesc
&& rdesc
->refcount
!= IPA_UNDESCRIBED_USE
)
2613 /* If the value of constant jump function JFUNC is an address of a function
2614 declaration, return the associated call graph node. Otherwise return
2617 static cgraph_node
*
2618 cgraph_node_for_jfunc (struct ipa_jump_func
*jfunc
)
2620 gcc_checking_assert (jfunc
->type
== IPA_JF_CONST
);
2621 tree cst
= ipa_get_jf_constant (jfunc
);
2622 if (TREE_CODE (cst
) != ADDR_EXPR
2623 || TREE_CODE (TREE_OPERAND (cst
, 0)) != FUNCTION_DECL
)
2626 return cgraph_get_node (TREE_OPERAND (cst
, 0));
2630 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2631 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2632 the edge specified in the rdesc. Return false if either the symbol or the
2633 reference could not be found, otherwise return true. */
2636 try_decrement_rdesc_refcount (struct ipa_jump_func
*jfunc
)
2638 struct ipa_cst_ref_desc
*rdesc
;
2639 if (jfunc
->type
== IPA_JF_CONST
2640 && (rdesc
= jfunc_rdesc_usable (jfunc
))
2641 && --rdesc
->refcount
== 0)
2643 symtab_node
*symbol
= cgraph_node_for_jfunc (jfunc
);
2647 return remove_described_reference (symbol
, rdesc
);
2652 /* Try to find a destination for indirect edge IE that corresponds to a simple
2653 call or a call of a member function pointer and where the destination is a
2654 pointer formal parameter described by jump function JFUNC. If it can be
2655 determined, return the newly direct edge, otherwise return NULL.
2656 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2658 static struct cgraph_edge
*
2659 try_make_edge_direct_simple_call (struct cgraph_edge
*ie
,
2660 struct ipa_jump_func
*jfunc
,
2661 struct ipa_node_params
*new_root_info
)
2663 struct cgraph_edge
*cs
;
2665 bool agg_contents
= ie
->indirect_info
->agg_contents
;
2667 if (ie
->indirect_info
->agg_contents
)
2668 target
= ipa_find_agg_cst_for_param (&jfunc
->agg
,
2669 ie
->indirect_info
->offset
,
2670 ie
->indirect_info
->by_ref
);
2672 target
= ipa_value_from_jfunc (new_root_info
, jfunc
);
2675 cs
= ipa_make_edge_direct_to_target (ie
, target
);
2677 if (cs
&& !agg_contents
)
2680 gcc_checking_assert (cs
->callee
2682 || jfunc
->type
!= IPA_JF_CONST
2683 || !cgraph_node_for_jfunc (jfunc
)
2684 || cs
->callee
== cgraph_node_for_jfunc (jfunc
)));
2685 ok
= try_decrement_rdesc_refcount (jfunc
);
2686 gcc_checking_assert (ok
);
2692 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2693 call based on a formal parameter which is described by jump function JFUNC
2694 and if it can be determined, make it direct and return the direct edge.
2695 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2698 static struct cgraph_edge
*
2699 try_make_edge_direct_virtual_call (struct cgraph_edge
*ie
,
2700 struct ipa_jump_func
*jfunc
,
2701 struct ipa_node_params
*new_root_info
)
2705 if (!flag_devirtualize
)
2708 /* First try to do lookup via known virtual table pointer value. */
2709 if (!ie
->indirect_info
->by_ref
)
2712 unsigned HOST_WIDE_INT offset
;
2713 tree t
= ipa_find_agg_cst_for_param (&jfunc
->agg
,
2714 ie
->indirect_info
->offset
,
2716 if (t
&& vtable_pointer_value_to_vtable (t
, &vtable
, &offset
))
2718 target
= gimple_get_virt_method_for_vtable (ie
->indirect_info
->otr_token
,
2722 if ((TREE_CODE (TREE_TYPE (target
)) == FUNCTION_TYPE
2723 && DECL_FUNCTION_CODE (target
) == BUILT_IN_UNREACHABLE
)
2724 || !possible_polymorphic_call_target_p
2725 (ie
, cgraph_get_node (target
)))
2729 "Type inconsident devirtualization: %s/%i->%s\n",
2730 ie
->caller
->name (), ie
->caller
->order
,
2731 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target
)));
2732 target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2733 cgraph_get_create_node (target
);
2735 return ipa_make_edge_direct_to_target (ie
, target
);
2740 binfo
= ipa_value_from_jfunc (new_root_info
, jfunc
);
2745 if (TREE_CODE (binfo
) != TREE_BINFO
)
2747 ipa_polymorphic_call_context context
;
2748 vec
<cgraph_node
*>targets
;
2751 if (!get_polymorphic_call_info_from_invariant
2752 (&context
, binfo
, ie
->indirect_info
->otr_type
,
2753 ie
->indirect_info
->offset
))
2755 targets
= possible_polymorphic_call_targets
2756 (ie
->indirect_info
->otr_type
,
2757 ie
->indirect_info
->otr_token
,
2759 if (!final
|| targets
.length () > 1)
2761 if (targets
.length () == 1)
2762 target
= targets
[0]->decl
;
2765 target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2766 cgraph_get_create_node (target
);
2771 binfo
= get_binfo_at_offset (binfo
, ie
->indirect_info
->offset
,
2772 ie
->indirect_info
->otr_type
);
2774 target
= gimple_get_virt_method_for_binfo (ie
->indirect_info
->otr_token
,
2782 #ifdef ENABLE_CHECKING
2783 gcc_assert (possible_polymorphic_call_target_p
2784 (ie
, cgraph_get_node (target
)));
2786 return ipa_make_edge_direct_to_target (ie
, target
);
2792 /* Update the param called notes associated with NODE when CS is being inlined,
2793 assuming NODE is (potentially indirectly) inlined into CS->callee.
2794 Moreover, if the callee is discovered to be constant, create a new cgraph
2795 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2796 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2799 update_indirect_edges_after_inlining (struct cgraph_edge
*cs
,
2800 struct cgraph_node
*node
,
2801 vec
<cgraph_edge_p
> *new_edges
)
2803 struct ipa_edge_args
*top
;
2804 struct cgraph_edge
*ie
, *next_ie
, *new_direct_edge
;
2805 struct ipa_node_params
*new_root_info
;
2808 ipa_check_create_edge_args ();
2809 top
= IPA_EDGE_REF (cs
);
2810 new_root_info
= IPA_NODE_REF (cs
->caller
->global
.inlined_to
2811 ? cs
->caller
->global
.inlined_to
2814 for (ie
= node
->indirect_calls
; ie
; ie
= next_ie
)
2816 struct cgraph_indirect_call_info
*ici
= ie
->indirect_info
;
2817 struct ipa_jump_func
*jfunc
;
2820 next_ie
= ie
->next_callee
;
2822 if (ici
->param_index
== -1)
2825 /* We must check range due to calls with variable number of arguments: */
2826 if (ici
->param_index
>= ipa_get_cs_argument_count (top
))
2828 ici
->param_index
= -1;
2832 param_index
= ici
->param_index
;
2833 jfunc
= ipa_get_ith_jump_func (top
, param_index
);
2835 if (!flag_indirect_inlining
)
2836 new_direct_edge
= NULL
;
2837 else if (ici
->polymorphic
)
2838 new_direct_edge
= try_make_edge_direct_virtual_call (ie
, jfunc
,
2841 new_direct_edge
= try_make_edge_direct_simple_call (ie
, jfunc
,
2843 /* If speculation was removed, then we need to do nothing. */
2844 if (new_direct_edge
&& new_direct_edge
!= ie
)
2846 new_direct_edge
->indirect_inlining_edge
= 1;
2847 top
= IPA_EDGE_REF (cs
);
2850 else if (new_direct_edge
)
2852 new_direct_edge
->indirect_inlining_edge
= 1;
2853 if (new_direct_edge
->call_stmt
)
2854 new_direct_edge
->call_stmt_cannot_inline_p
2855 = !gimple_check_call_matching_types (
2856 new_direct_edge
->call_stmt
,
2857 new_direct_edge
->callee
->decl
, false);
2860 new_edges
->safe_push (new_direct_edge
);
2863 top
= IPA_EDGE_REF (cs
);
2865 else if (jfunc
->type
== IPA_JF_PASS_THROUGH
2866 && ipa_get_jf_pass_through_operation (jfunc
) == NOP_EXPR
)
2868 if (ici
->agg_contents
2869 && !ipa_get_jf_pass_through_agg_preserved (jfunc
))
2870 ici
->param_index
= -1;
2872 ici
->param_index
= ipa_get_jf_pass_through_formal_id (jfunc
);
2874 else if (jfunc
->type
== IPA_JF_ANCESTOR
)
2876 if (ici
->agg_contents
2877 && !ipa_get_jf_ancestor_agg_preserved (jfunc
))
2878 ici
->param_index
= -1;
2881 ici
->param_index
= ipa_get_jf_ancestor_formal_id (jfunc
);
2882 if (ipa_get_jf_ancestor_offset (jfunc
))
2883 ici
->outer_type
= NULL
;
2884 ici
->offset
+= ipa_get_jf_ancestor_offset (jfunc
);
2888 /* Either we can find a destination for this edge now or never. */
2889 ici
->param_index
= -1;
2895 /* Recursively traverse subtree of NODE (including node) made of inlined
2896 cgraph_edges when CS has been inlined and invoke
2897 update_indirect_edges_after_inlining on all nodes and
2898 update_jump_functions_after_inlining on all non-inlined edges that lead out
2899 of this subtree. Newly discovered indirect edges will be added to
2900 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2904 propagate_info_to_inlined_callees (struct cgraph_edge
*cs
,
2905 struct cgraph_node
*node
,
2906 vec
<cgraph_edge_p
> *new_edges
)
2908 struct cgraph_edge
*e
;
2911 res
= update_indirect_edges_after_inlining (cs
, node
, new_edges
);
2913 for (e
= node
->callees
; e
; e
= e
->next_callee
)
2914 if (!e
->inline_failed
)
2915 res
|= propagate_info_to_inlined_callees (cs
, e
->callee
, new_edges
);
2917 update_jump_functions_after_inlining (cs
, e
);
2918 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
2919 update_jump_functions_after_inlining (cs
, e
);
2924 /* Combine two controlled uses counts as done during inlining. */
2927 combine_controlled_uses_counters (int c
, int d
)
2929 if (c
== IPA_UNDESCRIBED_USE
|| d
== IPA_UNDESCRIBED_USE
)
2930 return IPA_UNDESCRIBED_USE
;
2935 /* Propagate number of controlled users from CS->caleee to the new root of the
2936 tree of inlined nodes. */
2939 propagate_controlled_uses (struct cgraph_edge
*cs
)
2941 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
2942 struct cgraph_node
*new_root
= cs
->caller
->global
.inlined_to
2943 ? cs
->caller
->global
.inlined_to
: cs
->caller
;
2944 struct ipa_node_params
*new_root_info
= IPA_NODE_REF (new_root
);
2945 struct ipa_node_params
*old_root_info
= IPA_NODE_REF (cs
->callee
);
2948 count
= MIN (ipa_get_cs_argument_count (args
),
2949 ipa_get_param_count (old_root_info
));
2950 for (i
= 0; i
< count
; i
++)
2952 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
2953 struct ipa_cst_ref_desc
*rdesc
;
2955 if (jf
->type
== IPA_JF_PASS_THROUGH
)
2958 src_idx
= ipa_get_jf_pass_through_formal_id (jf
);
2959 c
= ipa_get_controlled_uses (new_root_info
, src_idx
);
2960 d
= ipa_get_controlled_uses (old_root_info
, i
);
2962 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf
)
2963 == NOP_EXPR
|| c
== IPA_UNDESCRIBED_USE
);
2964 c
= combine_controlled_uses_counters (c
, d
);
2965 ipa_set_controlled_uses (new_root_info
, src_idx
, c
);
2966 if (c
== 0 && new_root_info
->ipcp_orig_node
)
2968 struct cgraph_node
*n
;
2969 struct ipa_ref
*ref
;
2970 tree t
= new_root_info
->known_vals
[src_idx
];
2972 if (t
&& TREE_CODE (t
) == ADDR_EXPR
2973 && TREE_CODE (TREE_OPERAND (t
, 0)) == FUNCTION_DECL
2974 && (n
= cgraph_get_node (TREE_OPERAND (t
, 0)))
2975 && (ref
= ipa_find_reference (new_root
,
2979 fprintf (dump_file
, "ipa-prop: Removing cloning-created "
2980 "reference from %s/%i to %s/%i.\n",
2981 xstrdup (new_root
->name ()),
2983 xstrdup (n
->name ()), n
->order
);
2984 ipa_remove_reference (ref
);
2988 else if (jf
->type
== IPA_JF_CONST
2989 && (rdesc
= jfunc_rdesc_usable (jf
)))
2991 int d
= ipa_get_controlled_uses (old_root_info
, i
);
2992 int c
= rdesc
->refcount
;
2993 rdesc
->refcount
= combine_controlled_uses_counters (c
, d
);
2994 if (rdesc
->refcount
== 0)
2996 tree cst
= ipa_get_jf_constant (jf
);
2997 struct cgraph_node
*n
;
2998 gcc_checking_assert (TREE_CODE (cst
) == ADDR_EXPR
2999 && TREE_CODE (TREE_OPERAND (cst
, 0))
3001 n
= cgraph_get_node (TREE_OPERAND (cst
, 0));
3004 struct cgraph_node
*clone
;
3006 ok
= remove_described_reference (n
, rdesc
);
3007 gcc_checking_assert (ok
);
3010 while (clone
->global
.inlined_to
3011 && clone
!= rdesc
->cs
->caller
3012 && IPA_NODE_REF (clone
)->ipcp_orig_node
)
3014 struct ipa_ref
*ref
;
3015 ref
= ipa_find_reference (clone
,
3020 fprintf (dump_file
, "ipa-prop: Removing "
3021 "cloning-created reference "
3022 "from %s/%i to %s/%i.\n",
3023 xstrdup (clone
->name ()),
3025 xstrdup (n
->name ()),
3027 ipa_remove_reference (ref
);
3029 clone
= clone
->callers
->caller
;
3036 for (i
= ipa_get_param_count (old_root_info
);
3037 i
< ipa_get_cs_argument_count (args
);
3040 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
3042 if (jf
->type
== IPA_JF_CONST
)
3044 struct ipa_cst_ref_desc
*rdesc
= jfunc_rdesc_usable (jf
);
3046 rdesc
->refcount
= IPA_UNDESCRIBED_USE
;
3048 else if (jf
->type
== IPA_JF_PASS_THROUGH
)
3049 ipa_set_controlled_uses (new_root_info
,
3050 jf
->value
.pass_through
.formal_id
,
3051 IPA_UNDESCRIBED_USE
);
3055 /* Update jump functions and call note functions on inlining the call site CS.
3056 CS is expected to lead to a node already cloned by
3057 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3058 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3062 ipa_propagate_indirect_call_infos (struct cgraph_edge
*cs
,
3063 vec
<cgraph_edge_p
> *new_edges
)
3066 /* Do nothing if the preparation phase has not been carried out yet
3067 (i.e. during early inlining). */
3068 if (!ipa_node_params_vector
.exists ())
3070 gcc_assert (ipa_edge_args_vector
);
3072 propagate_controlled_uses (cs
);
3073 changed
= propagate_info_to_inlined_callees (cs
, cs
->callee
, new_edges
);
3078 /* Frees all dynamically allocated structures that the argument info points
3082 ipa_free_edge_args_substructures (struct ipa_edge_args
*args
)
3084 vec_free (args
->jump_functions
);
3085 memset (args
, 0, sizeof (*args
));
3088 /* Free all ipa_edge structures. */
3091 ipa_free_all_edge_args (void)
3094 struct ipa_edge_args
*args
;
3096 if (!ipa_edge_args_vector
)
3099 FOR_EACH_VEC_ELT (*ipa_edge_args_vector
, i
, args
)
3100 ipa_free_edge_args_substructures (args
);
3102 vec_free (ipa_edge_args_vector
);
3105 /* Frees all dynamically allocated structures that the param info points
3109 ipa_free_node_params_substructures (struct ipa_node_params
*info
)
3111 info
->descriptors
.release ();
3112 free (info
->lattices
);
3113 /* Lattice values and their sources are deallocated with their alocation
3115 info
->known_vals
.release ();
3116 memset (info
, 0, sizeof (*info
));
3119 /* Free all ipa_node_params structures. */
3122 ipa_free_all_node_params (void)
3125 struct ipa_node_params
*info
;
3127 FOR_EACH_VEC_ELT (ipa_node_params_vector
, i
, info
)
3128 ipa_free_node_params_substructures (info
);
3130 ipa_node_params_vector
.release ();
3133 /* Set the aggregate replacements of NODE to be AGGVALS. */
3136 ipa_set_node_agg_value_chain (struct cgraph_node
*node
,
3137 struct ipa_agg_replacement_value
*aggvals
)
3139 if (vec_safe_length (ipa_node_agg_replacements
) <= (unsigned) cgraph_max_uid
)
3140 vec_safe_grow_cleared (ipa_node_agg_replacements
, cgraph_max_uid
+ 1);
3142 (*ipa_node_agg_replacements
)[node
->uid
] = aggvals
;
3145 /* Hook that is called by cgraph.c when an edge is removed. */
3148 ipa_edge_removal_hook (struct cgraph_edge
*cs
, void *data ATTRIBUTE_UNUSED
)
3150 struct ipa_edge_args
*args
;
3152 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3153 if (vec_safe_length (ipa_edge_args_vector
) <= (unsigned)cs
->uid
)
3156 args
= IPA_EDGE_REF (cs
);
3157 if (args
->jump_functions
)
3159 struct ipa_jump_func
*jf
;
3161 FOR_EACH_VEC_ELT (*args
->jump_functions
, i
, jf
)
3163 struct ipa_cst_ref_desc
*rdesc
;
3164 try_decrement_rdesc_refcount (jf
);
3165 if (jf
->type
== IPA_JF_CONST
3166 && (rdesc
= ipa_get_jf_constant_rdesc (jf
))
3172 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs
));
3175 /* Hook that is called by cgraph.c when a node is removed. */
3178 ipa_node_removal_hook (struct cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
3180 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3181 if (ipa_node_params_vector
.length () > (unsigned)node
->uid
)
3182 ipa_free_node_params_substructures (IPA_NODE_REF (node
));
3183 if (vec_safe_length (ipa_node_agg_replacements
) > (unsigned)node
->uid
)
3184 (*ipa_node_agg_replacements
)[(unsigned)node
->uid
] = NULL
;
3187 /* Hook that is called by cgraph.c when an edge is duplicated. */
3190 ipa_edge_duplication_hook (struct cgraph_edge
*src
, struct cgraph_edge
*dst
,
3191 __attribute__((unused
)) void *data
)
3193 struct ipa_edge_args
*old_args
, *new_args
;
3196 ipa_check_create_edge_args ();
3198 old_args
= IPA_EDGE_REF (src
);
3199 new_args
= IPA_EDGE_REF (dst
);
3201 new_args
->jump_functions
= vec_safe_copy (old_args
->jump_functions
);
3203 for (i
= 0; i
< vec_safe_length (old_args
->jump_functions
); i
++)
3205 struct ipa_jump_func
*src_jf
= ipa_get_ith_jump_func (old_args
, i
);
3206 struct ipa_jump_func
*dst_jf
= ipa_get_ith_jump_func (new_args
, i
);
3208 dst_jf
->agg
.items
= vec_safe_copy (dst_jf
->agg
.items
);
3210 if (src_jf
->type
== IPA_JF_CONST
)
3212 struct ipa_cst_ref_desc
*src_rdesc
= jfunc_rdesc_usable (src_jf
);
3215 dst_jf
->value
.constant
.rdesc
= NULL
;
3216 else if (src
->caller
== dst
->caller
)
3218 struct ipa_ref
*ref
;
3219 symtab_node
*n
= cgraph_node_for_jfunc (src_jf
);
3220 gcc_checking_assert (n
);
3221 ref
= ipa_find_reference (src
->caller
, n
,
3222 src
->call_stmt
, src
->lto_stmt_uid
);
3223 gcc_checking_assert (ref
);
3224 ipa_clone_ref (ref
, dst
->caller
, ref
->stmt
);
3226 gcc_checking_assert (ipa_refdesc_pool
);
3227 struct ipa_cst_ref_desc
*dst_rdesc
3228 = (struct ipa_cst_ref_desc
*) pool_alloc (ipa_refdesc_pool
);
3229 dst_rdesc
->cs
= dst
;
3230 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3231 dst_rdesc
->next_duplicate
= NULL
;
3232 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3234 else if (src_rdesc
->cs
== src
)
3236 struct ipa_cst_ref_desc
*dst_rdesc
;
3237 gcc_checking_assert (ipa_refdesc_pool
);
3239 = (struct ipa_cst_ref_desc
*) pool_alloc (ipa_refdesc_pool
);
3240 dst_rdesc
->cs
= dst
;
3241 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3242 dst_rdesc
->next_duplicate
= src_rdesc
->next_duplicate
;
3243 src_rdesc
->next_duplicate
= dst_rdesc
;
3244 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3248 struct ipa_cst_ref_desc
*dst_rdesc
;
3249 /* This can happen during inlining, when a JFUNC can refer to a
3250 reference taken in a function up in the tree of inline clones.
3251 We need to find the duplicate that refers to our tree of
3254 gcc_assert (dst
->caller
->global
.inlined_to
);
3255 for (dst_rdesc
= src_rdesc
->next_duplicate
;
3257 dst_rdesc
= dst_rdesc
->next_duplicate
)
3259 struct cgraph_node
*top
;
3260 top
= dst_rdesc
->cs
->caller
->global
.inlined_to
3261 ? dst_rdesc
->cs
->caller
->global
.inlined_to
3262 : dst_rdesc
->cs
->caller
;
3263 if (dst
->caller
->global
.inlined_to
== top
)
3266 gcc_assert (dst_rdesc
);
3267 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3273 /* Hook that is called by cgraph.c when a node is duplicated. */
3276 ipa_node_duplication_hook (struct cgraph_node
*src
, struct cgraph_node
*dst
,
3277 ATTRIBUTE_UNUSED
void *data
)
3279 struct ipa_node_params
*old_info
, *new_info
;
3280 struct ipa_agg_replacement_value
*old_av
, *new_av
;
3282 ipa_check_create_node_params ();
3283 old_info
= IPA_NODE_REF (src
);
3284 new_info
= IPA_NODE_REF (dst
);
3286 new_info
->descriptors
= old_info
->descriptors
.copy ();
3287 new_info
->lattices
= NULL
;
3288 new_info
->ipcp_orig_node
= old_info
->ipcp_orig_node
;
3290 new_info
->uses_analysis_done
= old_info
->uses_analysis_done
;
3291 new_info
->node_enqueued
= old_info
->node_enqueued
;
3293 old_av
= ipa_get_agg_replacements_for_node (src
);
3300 struct ipa_agg_replacement_value
*v
;
3302 v
= ggc_alloc_ipa_agg_replacement_value ();
3303 memcpy (v
, old_av
, sizeof (*v
));
3306 old_av
= old_av
->next
;
3308 ipa_set_node_agg_value_chain (dst
, new_av
);
3312 /* Analyze newly added function into callgraph. */
3315 ipa_add_new_function (struct cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
3317 if (cgraph_function_with_gimple_body_p (node
))
3318 ipa_analyze_node (node
);
3321 /* Register our cgraph hooks if they are not already there. */
3324 ipa_register_cgraph_hooks (void)
3326 if (!edge_removal_hook_holder
)
3327 edge_removal_hook_holder
=
3328 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook
, NULL
);
3329 if (!node_removal_hook_holder
)
3330 node_removal_hook_holder
=
3331 cgraph_add_node_removal_hook (&ipa_node_removal_hook
, NULL
);
3332 if (!edge_duplication_hook_holder
)
3333 edge_duplication_hook_holder
=
3334 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook
, NULL
);
3335 if (!node_duplication_hook_holder
)
3336 node_duplication_hook_holder
=
3337 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook
, NULL
);
3338 function_insertion_hook_holder
=
3339 cgraph_add_function_insertion_hook (&ipa_add_new_function
, NULL
);
3342 /* Unregister our cgraph hooks if they are not already there. */
3345 ipa_unregister_cgraph_hooks (void)
3347 cgraph_remove_edge_removal_hook (edge_removal_hook_holder
);
3348 edge_removal_hook_holder
= NULL
;
3349 cgraph_remove_node_removal_hook (node_removal_hook_holder
);
3350 node_removal_hook_holder
= NULL
;
3351 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder
);
3352 edge_duplication_hook_holder
= NULL
;
3353 cgraph_remove_node_duplication_hook (node_duplication_hook_holder
);
3354 node_duplication_hook_holder
= NULL
;
3355 cgraph_remove_function_insertion_hook (function_insertion_hook_holder
);
3356 function_insertion_hook_holder
= NULL
;
3359 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3360 longer needed after ipa-cp. */
3363 ipa_free_all_structures_after_ipa_cp (void)
3367 ipa_free_all_edge_args ();
3368 ipa_free_all_node_params ();
3369 free_alloc_pool (ipcp_sources_pool
);
3370 free_alloc_pool (ipcp_values_pool
);
3371 free_alloc_pool (ipcp_agg_lattice_pool
);
3372 ipa_unregister_cgraph_hooks ();
3373 if (ipa_refdesc_pool
)
3374 free_alloc_pool (ipa_refdesc_pool
);
3378 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3379 longer needed after indirect inlining. */
3382 ipa_free_all_structures_after_iinln (void)
3384 ipa_free_all_edge_args ();
3385 ipa_free_all_node_params ();
3386 ipa_unregister_cgraph_hooks ();
3387 if (ipcp_sources_pool
)
3388 free_alloc_pool (ipcp_sources_pool
);
3389 if (ipcp_values_pool
)
3390 free_alloc_pool (ipcp_values_pool
);
3391 if (ipcp_agg_lattice_pool
)
3392 free_alloc_pool (ipcp_agg_lattice_pool
);
3393 if (ipa_refdesc_pool
)
3394 free_alloc_pool (ipa_refdesc_pool
);
3397 /* Print ipa_tree_map data structures of all functions in the
3401 ipa_print_node_params (FILE *f
, struct cgraph_node
*node
)
3404 struct ipa_node_params
*info
;
3406 if (!node
->definition
)
3408 info
= IPA_NODE_REF (node
);
3409 fprintf (f
, " function %s/%i parameter descriptors:\n",
3410 node
->name (), node
->order
);
3411 count
= ipa_get_param_count (info
);
3412 for (i
= 0; i
< count
; i
++)
3417 ipa_dump_param (f
, info
, i
);
3418 if (ipa_is_param_used (info
, i
))
3419 fprintf (f
, " used");
3420 c
= ipa_get_controlled_uses (info
, i
);
3421 if (c
== IPA_UNDESCRIBED_USE
)
3422 fprintf (f
, " undescribed_use");
3424 fprintf (f
, " controlled_uses=%i", c
);
3429 /* Print ipa_tree_map data structures of all functions in the
3433 ipa_print_all_params (FILE * f
)
3435 struct cgraph_node
*node
;
3437 fprintf (f
, "\nFunction parameters:\n");
3438 FOR_EACH_FUNCTION (node
)
3439 ipa_print_node_params (f
, node
);
3442 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3445 ipa_get_vector_of_formal_parms (tree fndecl
)
3451 gcc_assert (!flag_wpa
);
3452 count
= count_formal_params (fndecl
);
3453 args
.create (count
);
3454 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
3455 args
.quick_push (parm
);
3460 /* Return a heap allocated vector containing types of formal parameters of
3461 function type FNTYPE. */
3464 ipa_get_vector_of_formal_parm_types (tree fntype
)
3470 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
3473 types
.create (count
);
3474 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
3475 types
.quick_push (TREE_VALUE (t
));
3480 /* Modify the function declaration FNDECL and its type according to the plan in
3481 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3482 to reflect the actual parameters being modified which are determined by the
3483 base_index field. */
3486 ipa_modify_formal_parameters (tree fndecl
, ipa_parm_adjustment_vec adjustments
)
3488 vec
<tree
> oparms
= ipa_get_vector_of_formal_parms (fndecl
);
3489 tree orig_type
= TREE_TYPE (fndecl
);
3490 tree old_arg_types
= TYPE_ARG_TYPES (orig_type
);
3492 /* The following test is an ugly hack, some functions simply don't have any
3493 arguments in their type. This is probably a bug but well... */
3494 bool care_for_types
= (old_arg_types
!= NULL_TREE
);
3495 bool last_parm_void
;
3499 last_parm_void
= (TREE_VALUE (tree_last (old_arg_types
))
3501 otypes
= ipa_get_vector_of_formal_parm_types (orig_type
);
3503 gcc_assert (oparms
.length () + 1 == otypes
.length ());
3505 gcc_assert (oparms
.length () == otypes
.length ());
3509 last_parm_void
= false;
3513 int len
= adjustments
.length ();
3514 tree
*link
= &DECL_ARGUMENTS (fndecl
);
3515 tree new_arg_types
= NULL
;
3516 for (int i
= 0; i
< len
; i
++)
3518 struct ipa_parm_adjustment
*adj
;
3521 adj
= &adjustments
[i
];
3523 if (adj
->op
== IPA_PARM_OP_NEW
)
3526 parm
= oparms
[adj
->base_index
];
3529 if (adj
->op
== IPA_PARM_OP_COPY
)
3532 new_arg_types
= tree_cons (NULL_TREE
, otypes
[adj
->base_index
],
3535 link
= &DECL_CHAIN (parm
);
3537 else if (adj
->op
!= IPA_PARM_OP_REMOVE
)
3543 ptype
= build_pointer_type (adj
->type
);
3547 if (is_gimple_reg_type (ptype
))
3549 unsigned malign
= GET_MODE_ALIGNMENT (TYPE_MODE (ptype
));
3550 if (TYPE_ALIGN (ptype
) < malign
)
3551 ptype
= build_aligned_type (ptype
, malign
);
3556 new_arg_types
= tree_cons (NULL_TREE
, ptype
, new_arg_types
);
3558 new_parm
= build_decl (UNKNOWN_LOCATION
, PARM_DECL
, NULL_TREE
,
3560 const char *prefix
= adj
->arg_prefix
? adj
->arg_prefix
: "SYNTH";
3561 DECL_NAME (new_parm
) = create_tmp_var_name (prefix
);
3562 DECL_ARTIFICIAL (new_parm
) = 1;
3563 DECL_ARG_TYPE (new_parm
) = ptype
;
3564 DECL_CONTEXT (new_parm
) = fndecl
;
3565 TREE_USED (new_parm
) = 1;
3566 DECL_IGNORED_P (new_parm
) = 1;
3567 layout_decl (new_parm
, 0);
3569 if (adj
->op
== IPA_PARM_OP_NEW
)
3573 adj
->new_decl
= new_parm
;
3576 link
= &DECL_CHAIN (new_parm
);
3582 tree new_reversed
= NULL
;
3585 new_reversed
= nreverse (new_arg_types
);
3589 TREE_CHAIN (new_arg_types
) = void_list_node
;
3591 new_reversed
= void_list_node
;
3595 /* Use copy_node to preserve as much as possible from original type
3596 (debug info, attribute lists etc.)
3597 Exception is METHOD_TYPEs must have THIS argument.
3598 When we are asked to remove it, we need to build new FUNCTION_TYPE
3600 tree new_type
= NULL
;
3601 if (TREE_CODE (orig_type
) != METHOD_TYPE
3602 || (adjustments
[0].op
== IPA_PARM_OP_COPY
3603 && adjustments
[0].base_index
== 0))
3605 new_type
= build_distinct_type_copy (orig_type
);
3606 TYPE_ARG_TYPES (new_type
) = new_reversed
;
3611 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type
),
3613 TYPE_CONTEXT (new_type
) = TYPE_CONTEXT (orig_type
);
3614 DECL_VINDEX (fndecl
) = NULL_TREE
;
3617 /* When signature changes, we need to clear builtin info. */
3618 if (DECL_BUILT_IN (fndecl
))
3620 DECL_BUILT_IN_CLASS (fndecl
) = NOT_BUILT_IN
;
3621 DECL_FUNCTION_CODE (fndecl
) = (enum built_in_function
) 0;
3624 /* This is a new type, not a copy of an old type. Need to reassociate
3625 variants. We can handle everything except the main variant lazily. */
3626 tree t
= TYPE_MAIN_VARIANT (orig_type
);
3629 TYPE_MAIN_VARIANT (new_type
) = t
;
3630 TYPE_NEXT_VARIANT (new_type
) = TYPE_NEXT_VARIANT (t
);
3631 TYPE_NEXT_VARIANT (t
) = new_type
;
3635 TYPE_MAIN_VARIANT (new_type
) = new_type
;
3636 TYPE_NEXT_VARIANT (new_type
) = NULL
;
3639 TREE_TYPE (fndecl
) = new_type
;
3640 DECL_VIRTUAL_P (fndecl
) = 0;
3645 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3646 If this is a directly recursive call, CS must be NULL. Otherwise it must
3647 contain the corresponding call graph edge. */
3650 ipa_modify_call_arguments (struct cgraph_edge
*cs
, gimple stmt
,
3651 ipa_parm_adjustment_vec adjustments
)
3653 struct cgraph_node
*current_node
= cgraph_get_node (current_function_decl
);
3655 vec
<tree
, va_gc
> **debug_args
= NULL
;
3657 gimple_stmt_iterator gsi
, prev_gsi
;
3661 len
= adjustments
.length ();
3663 callee_decl
= !cs
? gimple_call_fndecl (stmt
) : cs
->callee
->decl
;
3664 ipa_remove_stmt_references (current_node
, stmt
);
3666 gsi
= gsi_for_stmt (stmt
);
3668 gsi_prev (&prev_gsi
);
3669 for (i
= 0; i
< len
; i
++)
3671 struct ipa_parm_adjustment
*adj
;
3673 adj
= &adjustments
[i
];
3675 if (adj
->op
== IPA_PARM_OP_COPY
)
3677 tree arg
= gimple_call_arg (stmt
, adj
->base_index
);
3679 vargs
.quick_push (arg
);
3681 else if (adj
->op
!= IPA_PARM_OP_REMOVE
)
3683 tree expr
, base
, off
;
3685 unsigned int deref_align
= 0;
3686 bool deref_base
= false;
3688 /* We create a new parameter out of the value of the old one, we can
3689 do the following kind of transformations:
3691 - A scalar passed by reference is converted to a scalar passed by
3692 value. (adj->by_ref is false and the type of the original
3693 actual argument is a pointer to a scalar).
3695 - A part of an aggregate is passed instead of the whole aggregate.
3696 The part can be passed either by value or by reference, this is
3697 determined by value of adj->by_ref. Moreover, the code below
3698 handles both situations when the original aggregate is passed by
3699 value (its type is not a pointer) and when it is passed by
3700 reference (it is a pointer to an aggregate).
3702 When the new argument is passed by reference (adj->by_ref is true)
3703 it must be a part of an aggregate and therefore we form it by
3704 simply taking the address of a reference inside the original
3707 gcc_checking_assert (adj
->offset
% BITS_PER_UNIT
== 0);
3708 base
= gimple_call_arg (stmt
, adj
->base_index
);
3709 loc
= DECL_P (base
) ? DECL_SOURCE_LOCATION (base
)
3710 : EXPR_LOCATION (base
);
3712 if (TREE_CODE (base
) != ADDR_EXPR
3713 && POINTER_TYPE_P (TREE_TYPE (base
)))
3714 off
= build_int_cst (adj
->alias_ptr_type
,
3715 adj
->offset
/ BITS_PER_UNIT
);
3718 HOST_WIDE_INT base_offset
;
3722 if (TREE_CODE (base
) == ADDR_EXPR
)
3724 base
= TREE_OPERAND (base
, 0);
3730 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
3731 /* Aggregate arguments can have non-invariant addresses. */
3734 base
= build_fold_addr_expr (prev_base
);
3735 off
= build_int_cst (adj
->alias_ptr_type
,
3736 adj
->offset
/ BITS_PER_UNIT
);
3738 else if (TREE_CODE (base
) == MEM_REF
)
3743 deref_align
= TYPE_ALIGN (TREE_TYPE (base
));
3745 off
= build_int_cst (adj
->alias_ptr_type
,
3747 + adj
->offset
/ BITS_PER_UNIT
);
3748 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1),
3750 base
= TREE_OPERAND (base
, 0);
3754 off
= build_int_cst (adj
->alias_ptr_type
,
3756 + adj
->offset
/ BITS_PER_UNIT
);
3757 base
= build_fold_addr_expr (base
);
3763 tree type
= adj
->type
;
3765 unsigned HOST_WIDE_INT misalign
;
3769 align
= deref_align
;
3774 get_pointer_alignment_1 (base
, &align
, &misalign
);
3775 if (TYPE_ALIGN (type
) > align
)
3776 align
= TYPE_ALIGN (type
);
3778 misalign
+= (tree_to_double_int (off
)
3779 .sext (TYPE_PRECISION (TREE_TYPE (off
))).low
3781 misalign
= misalign
& (align
- 1);
3783 align
= (misalign
& -misalign
);
3784 if (align
< TYPE_ALIGN (type
))
3785 type
= build_aligned_type (type
, align
);
3786 expr
= fold_build2_loc (loc
, MEM_REF
, type
, base
, off
);
3790 expr
= fold_build2_loc (loc
, MEM_REF
, adj
->type
, base
, off
);
3791 expr
= build_fold_addr_expr (expr
);
3794 expr
= force_gimple_operand_gsi (&gsi
, expr
,
3796 || is_gimple_reg_type (adj
->type
),
3797 NULL
, true, GSI_SAME_STMT
);
3798 vargs
.quick_push (expr
);
3800 if (adj
->op
!= IPA_PARM_OP_COPY
&& MAY_HAVE_DEBUG_STMTS
)
3803 tree ddecl
= NULL_TREE
, origin
= DECL_ORIGIN (adj
->base
), arg
;
3806 arg
= gimple_call_arg (stmt
, adj
->base_index
);
3807 if (!useless_type_conversion_p (TREE_TYPE (origin
), TREE_TYPE (arg
)))
3809 if (!fold_convertible_p (TREE_TYPE (origin
), arg
))
3811 arg
= fold_convert_loc (gimple_location (stmt
),
3812 TREE_TYPE (origin
), arg
);
3814 if (debug_args
== NULL
)
3815 debug_args
= decl_debug_args_insert (callee_decl
);
3816 for (ix
= 0; vec_safe_iterate (*debug_args
, ix
, &ddecl
); ix
+= 2)
3817 if (ddecl
== origin
)
3819 ddecl
= (**debug_args
)[ix
+ 1];
3824 ddecl
= make_node (DEBUG_EXPR_DECL
);
3825 DECL_ARTIFICIAL (ddecl
) = 1;
3826 TREE_TYPE (ddecl
) = TREE_TYPE (origin
);
3827 DECL_MODE (ddecl
) = DECL_MODE (origin
);
3829 vec_safe_push (*debug_args
, origin
);
3830 vec_safe_push (*debug_args
, ddecl
);
3832 def_temp
= gimple_build_debug_bind (ddecl
, unshare_expr (arg
), stmt
);
3833 gsi_insert_before (&gsi
, def_temp
, GSI_SAME_STMT
);
3837 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3839 fprintf (dump_file
, "replacing stmt:");
3840 print_gimple_stmt (dump_file
, gsi_stmt (gsi
), 0, 0);
3843 new_stmt
= gimple_build_call_vec (callee_decl
, vargs
);
3845 if (gimple_call_lhs (stmt
))
3846 gimple_call_set_lhs (new_stmt
, gimple_call_lhs (stmt
));
3848 gimple_set_block (new_stmt
, gimple_block (stmt
));
3849 if (gimple_has_location (stmt
))
3850 gimple_set_location (new_stmt
, gimple_location (stmt
));
3851 gimple_call_set_chain (new_stmt
, gimple_call_chain (stmt
));
3852 gimple_call_copy_flags (new_stmt
, stmt
);
3854 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3856 fprintf (dump_file
, "with stmt:");
3857 print_gimple_stmt (dump_file
, new_stmt
, 0, 0);
3858 fprintf (dump_file
, "\n");
3860 gsi_replace (&gsi
, new_stmt
, true);
3862 cgraph_set_call_stmt (cs
, new_stmt
);
3865 ipa_record_stmt_references (current_node
, gsi_stmt (gsi
));
3868 while ((gsi_end_p (prev_gsi
) && !gsi_end_p (gsi
))
3869 || (!gsi_end_p (prev_gsi
) && gsi_stmt (gsi
) == gsi_stmt (prev_gsi
)));
3871 update_ssa (TODO_update_ssa
);
3872 free_dominance_info (CDI_DOMINATORS
);
3875 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
3876 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
3877 specifies whether the function should care about type incompatibility the
3878 current and new expressions. If it is false, the function will leave
3879 incompatibility issues to the caller. Return true iff the expression
3883 ipa_modify_expr (tree
*expr
, bool convert
,
3884 ipa_parm_adjustment_vec adjustments
)
3886 struct ipa_parm_adjustment
*cand
3887 = ipa_get_adjustment_candidate (&expr
, &convert
, adjustments
, false);
3893 src
= build_simple_mem_ref (cand
->new_decl
);
3895 src
= cand
->new_decl
;
3897 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3899 fprintf (dump_file
, "About to replace expr ");
3900 print_generic_expr (dump_file
, *expr
, 0);
3901 fprintf (dump_file
, " with ");
3902 print_generic_expr (dump_file
, src
, 0);
3903 fprintf (dump_file
, "\n");
3906 if (convert
&& !useless_type_conversion_p (TREE_TYPE (*expr
), cand
->type
))
3908 tree vce
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (*expr
), src
);
3916 /* If T is an SSA_NAME, return NULL if it is not a default def or
3917 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
3918 the base variable is always returned, regardless if it is a default
3919 def. Return T if it is not an SSA_NAME. */
3922 get_ssa_base_param (tree t
, bool ignore_default_def
)
3924 if (TREE_CODE (t
) == SSA_NAME
)
3926 if (ignore_default_def
|| SSA_NAME_IS_DEFAULT_DEF (t
))
3927 return SSA_NAME_VAR (t
);
3934 /* Given an expression, return an adjustment entry specifying the
3935 transformation to be done on EXPR. If no suitable adjustment entry
3936 was found, returns NULL.
3938 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
3939 default def, otherwise bail on them.
3941 If CONVERT is non-NULL, this function will set *CONVERT if the
3942 expression provided is a component reference. ADJUSTMENTS is the
3943 adjustments vector. */
3945 ipa_parm_adjustment
*
3946 ipa_get_adjustment_candidate (tree
**expr
, bool *convert
,
3947 ipa_parm_adjustment_vec adjustments
,
3948 bool ignore_default_def
)
3950 if (TREE_CODE (**expr
) == BIT_FIELD_REF
3951 || TREE_CODE (**expr
) == IMAGPART_EXPR
3952 || TREE_CODE (**expr
) == REALPART_EXPR
)
3954 *expr
= &TREE_OPERAND (**expr
, 0);
3959 HOST_WIDE_INT offset
, size
, max_size
;
3960 tree base
= get_ref_base_and_extent (**expr
, &offset
, &size
, &max_size
);
3961 if (!base
|| size
== -1 || max_size
== -1)
3964 if (TREE_CODE (base
) == MEM_REF
)
3966 offset
+= mem_ref_offset (base
).low
* BITS_PER_UNIT
;
3967 base
= TREE_OPERAND (base
, 0);
3970 base
= get_ssa_base_param (base
, ignore_default_def
);
3971 if (!base
|| TREE_CODE (base
) != PARM_DECL
)
3974 struct ipa_parm_adjustment
*cand
= NULL
;
3975 unsigned int len
= adjustments
.length ();
3976 for (unsigned i
= 0; i
< len
; i
++)
3978 struct ipa_parm_adjustment
*adj
= &adjustments
[i
];
3980 if (adj
->base
== base
3981 && (adj
->offset
== offset
|| adj
->op
== IPA_PARM_OP_REMOVE
))
3988 if (!cand
|| cand
->op
== IPA_PARM_OP_COPY
|| cand
->op
== IPA_PARM_OP_REMOVE
)
3993 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
3996 index_in_adjustments_multiple_times_p (int base_index
,
3997 ipa_parm_adjustment_vec adjustments
)
3999 int i
, len
= adjustments
.length ();
4002 for (i
= 0; i
< len
; i
++)
4004 struct ipa_parm_adjustment
*adj
;
4005 adj
= &adjustments
[i
];
4007 if (adj
->base_index
== base_index
)
4019 /* Return adjustments that should have the same effect on function parameters
4020 and call arguments as if they were first changed according to adjustments in
4021 INNER and then by adjustments in OUTER. */
4023 ipa_parm_adjustment_vec
4024 ipa_combine_adjustments (ipa_parm_adjustment_vec inner
,
4025 ipa_parm_adjustment_vec outer
)
4027 int i
, outlen
= outer
.length ();
4028 int inlen
= inner
.length ();
4030 ipa_parm_adjustment_vec adjustments
, tmp
;
4033 for (i
= 0; i
< inlen
; i
++)
4035 struct ipa_parm_adjustment
*n
;
4038 if (n
->op
== IPA_PARM_OP_REMOVE
)
4042 /* FIXME: Handling of new arguments are not implemented yet. */
4043 gcc_assert (n
->op
!= IPA_PARM_OP_NEW
);
4044 tmp
.quick_push (*n
);
4048 adjustments
.create (outlen
+ removals
);
4049 for (i
= 0; i
< outlen
; i
++)
4051 struct ipa_parm_adjustment r
;
4052 struct ipa_parm_adjustment
*out
= &outer
[i
];
4053 struct ipa_parm_adjustment
*in
= &tmp
[out
->base_index
];
4055 memset (&r
, 0, sizeof (r
));
4056 gcc_assert (in
->op
!= IPA_PARM_OP_REMOVE
);
4057 if (out
->op
== IPA_PARM_OP_REMOVE
)
4059 if (!index_in_adjustments_multiple_times_p (in
->base_index
, tmp
))
4061 r
.op
= IPA_PARM_OP_REMOVE
;
4062 adjustments
.quick_push (r
);
4068 /* FIXME: Handling of new arguments are not implemented yet. */
4069 gcc_assert (out
->op
!= IPA_PARM_OP_NEW
);
4072 r
.base_index
= in
->base_index
;
4075 /* FIXME: Create nonlocal value too. */
4077 if (in
->op
== IPA_PARM_OP_COPY
&& out
->op
== IPA_PARM_OP_COPY
)
4078 r
.op
= IPA_PARM_OP_COPY
;
4079 else if (in
->op
== IPA_PARM_OP_COPY
)
4080 r
.offset
= out
->offset
;
4081 else if (out
->op
== IPA_PARM_OP_COPY
)
4082 r
.offset
= in
->offset
;
4084 r
.offset
= in
->offset
+ out
->offset
;
4085 adjustments
.quick_push (r
);
4088 for (i
= 0; i
< inlen
; i
++)
4090 struct ipa_parm_adjustment
*n
= &inner
[i
];
4092 if (n
->op
== IPA_PARM_OP_REMOVE
)
4093 adjustments
.quick_push (*n
);
4100 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4101 friendly way, assuming they are meant to be applied to FNDECL. */
4104 ipa_dump_param_adjustments (FILE *file
, ipa_parm_adjustment_vec adjustments
,
4107 int i
, len
= adjustments
.length ();
4109 vec
<tree
> parms
= ipa_get_vector_of_formal_parms (fndecl
);
4111 fprintf (file
, "IPA param adjustments: ");
4112 for (i
= 0; i
< len
; i
++)
4114 struct ipa_parm_adjustment
*adj
;
4115 adj
= &adjustments
[i
];
4118 fprintf (file
, " ");
4122 fprintf (file
, "%i. base_index: %i - ", i
, adj
->base_index
);
4123 print_generic_expr (file
, parms
[adj
->base_index
], 0);
4126 fprintf (file
, ", base: ");
4127 print_generic_expr (file
, adj
->base
, 0);
4131 fprintf (file
, ", new_decl: ");
4132 print_generic_expr (file
, adj
->new_decl
, 0);
4134 if (adj
->new_ssa_base
)
4136 fprintf (file
, ", new_ssa_base: ");
4137 print_generic_expr (file
, adj
->new_ssa_base
, 0);
4140 if (adj
->op
== IPA_PARM_OP_COPY
)
4141 fprintf (file
, ", copy_param");
4142 else if (adj
->op
== IPA_PARM_OP_REMOVE
)
4143 fprintf (file
, ", remove_param");
4145 fprintf (file
, ", offset %li", (long) adj
->offset
);
4147 fprintf (file
, ", by_ref");
4148 print_node_brief (file
, ", type: ", adj
->type
, 0);
4149 fprintf (file
, "\n");
4154 /* Dump the AV linked list. */
4157 ipa_dump_agg_replacement_values (FILE *f
, struct ipa_agg_replacement_value
*av
)
4160 fprintf (f
, " Aggregate replacements:");
4161 for (; av
; av
= av
->next
)
4163 fprintf (f
, "%s %i[" HOST_WIDE_INT_PRINT_DEC
"]=", comma
? "," : "",
4164 av
->index
, av
->offset
);
4165 print_generic_expr (f
, av
->value
, 0);
4171 /* Stream out jump function JUMP_FUNC to OB. */
4174 ipa_write_jump_function (struct output_block
*ob
,
4175 struct ipa_jump_func
*jump_func
)
4177 struct ipa_agg_jf_item
*item
;
4178 struct bitpack_d bp
;
4181 streamer_write_uhwi (ob
, jump_func
->type
);
4182 switch (jump_func
->type
)
4184 case IPA_JF_UNKNOWN
:
4186 case IPA_JF_KNOWN_TYPE
:
4187 streamer_write_uhwi (ob
, jump_func
->value
.known_type
.offset
);
4188 stream_write_tree (ob
, jump_func
->value
.known_type
.base_type
, true);
4189 stream_write_tree (ob
, jump_func
->value
.known_type
.component_type
, true);
4193 EXPR_LOCATION (jump_func
->value
.constant
.value
) == UNKNOWN_LOCATION
);
4194 stream_write_tree (ob
, jump_func
->value
.constant
.value
, true);
4196 case IPA_JF_PASS_THROUGH
:
4197 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.operation
);
4198 if (jump_func
->value
.pass_through
.operation
== NOP_EXPR
)
4200 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4201 bp
= bitpack_create (ob
->main_stream
);
4202 bp_pack_value (&bp
, jump_func
->value
.pass_through
.agg_preserved
, 1);
4203 bp_pack_value (&bp
, jump_func
->value
.pass_through
.type_preserved
, 1);
4204 streamer_write_bitpack (&bp
);
4208 stream_write_tree (ob
, jump_func
->value
.pass_through
.operand
, true);
4209 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4212 case IPA_JF_ANCESTOR
:
4213 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.offset
);
4214 stream_write_tree (ob
, jump_func
->value
.ancestor
.type
, true);
4215 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.formal_id
);
4216 bp
= bitpack_create (ob
->main_stream
);
4217 bp_pack_value (&bp
, jump_func
->value
.ancestor
.agg_preserved
, 1);
4218 bp_pack_value (&bp
, jump_func
->value
.ancestor
.type_preserved
, 1);
4219 streamer_write_bitpack (&bp
);
4223 count
= vec_safe_length (jump_func
->agg
.items
);
4224 streamer_write_uhwi (ob
, count
);
4227 bp
= bitpack_create (ob
->main_stream
);
4228 bp_pack_value (&bp
, jump_func
->agg
.by_ref
, 1);
4229 streamer_write_bitpack (&bp
);
4232 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, i
, item
)
4234 streamer_write_uhwi (ob
, item
->offset
);
4235 stream_write_tree (ob
, item
->value
, true);
4239 /* Read in jump function JUMP_FUNC from IB. */
4242 ipa_read_jump_function (struct lto_input_block
*ib
,
4243 struct ipa_jump_func
*jump_func
,
4244 struct cgraph_edge
*cs
,
4245 struct data_in
*data_in
)
4247 enum jump_func_type jftype
;
4248 enum tree_code operation
;
4251 jftype
= (enum jump_func_type
) streamer_read_uhwi (ib
);
4254 case IPA_JF_UNKNOWN
:
4255 jump_func
->type
= IPA_JF_UNKNOWN
;
4257 case IPA_JF_KNOWN_TYPE
:
4259 HOST_WIDE_INT offset
= streamer_read_uhwi (ib
);
4260 tree base_type
= stream_read_tree (ib
, data_in
);
4261 tree component_type
= stream_read_tree (ib
, data_in
);
4263 ipa_set_jf_known_type (jump_func
, offset
, base_type
, component_type
);
4267 ipa_set_jf_constant (jump_func
, stream_read_tree (ib
, data_in
), cs
);
4269 case IPA_JF_PASS_THROUGH
:
4270 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
4271 if (operation
== NOP_EXPR
)
4273 int formal_id
= streamer_read_uhwi (ib
);
4274 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4275 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4276 bool type_preserved
= bp_unpack_value (&bp
, 1);
4277 ipa_set_jf_simple_pass_through (jump_func
, formal_id
, agg_preserved
,
4282 tree operand
= stream_read_tree (ib
, data_in
);
4283 int formal_id
= streamer_read_uhwi (ib
);
4284 ipa_set_jf_arith_pass_through (jump_func
, formal_id
, operand
,
4288 case IPA_JF_ANCESTOR
:
4290 HOST_WIDE_INT offset
= streamer_read_uhwi (ib
);
4291 tree type
= stream_read_tree (ib
, data_in
);
4292 int formal_id
= streamer_read_uhwi (ib
);
4293 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4294 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4295 bool type_preserved
= bp_unpack_value (&bp
, 1);
4297 ipa_set_ancestor_jf (jump_func
, offset
, type
, formal_id
, agg_preserved
,
4303 count
= streamer_read_uhwi (ib
);
4304 vec_alloc (jump_func
->agg
.items
, count
);
4307 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4308 jump_func
->agg
.by_ref
= bp_unpack_value (&bp
, 1);
4310 for (i
= 0; i
< count
; i
++)
4312 struct ipa_agg_jf_item item
;
4313 item
.offset
= streamer_read_uhwi (ib
);
4314 item
.value
= stream_read_tree (ib
, data_in
);
4315 jump_func
->agg
.items
->quick_push (item
);
4319 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4320 relevant to indirect inlining to OB. */
4323 ipa_write_indirect_edge_info (struct output_block
*ob
,
4324 struct cgraph_edge
*cs
)
4326 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4327 struct bitpack_d bp
;
4329 streamer_write_hwi (ob
, ii
->param_index
);
4330 streamer_write_hwi (ob
, ii
->offset
);
4331 bp
= bitpack_create (ob
->main_stream
);
4332 bp_pack_value (&bp
, ii
->polymorphic
, 1);
4333 bp_pack_value (&bp
, ii
->agg_contents
, 1);
4334 bp_pack_value (&bp
, ii
->member_ptr
, 1);
4335 bp_pack_value (&bp
, ii
->by_ref
, 1);
4336 bp_pack_value (&bp
, ii
->maybe_in_construction
, 1);
4337 bp_pack_value (&bp
, ii
->maybe_derived_type
, 1);
4338 streamer_write_bitpack (&bp
);
4340 if (ii
->polymorphic
)
4342 streamer_write_hwi (ob
, ii
->otr_token
);
4343 stream_write_tree (ob
, ii
->otr_type
, true);
4344 stream_write_tree (ob
, ii
->outer_type
, true);
4348 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4349 relevant to indirect inlining from IB. */
4352 ipa_read_indirect_edge_info (struct lto_input_block
*ib
,
4353 struct data_in
*data_in ATTRIBUTE_UNUSED
,
4354 struct cgraph_edge
*cs
)
4356 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4357 struct bitpack_d bp
;
4359 ii
->param_index
= (int) streamer_read_hwi (ib
);
4360 ii
->offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4361 bp
= streamer_read_bitpack (ib
);
4362 ii
->polymorphic
= bp_unpack_value (&bp
, 1);
4363 ii
->agg_contents
= bp_unpack_value (&bp
, 1);
4364 ii
->member_ptr
= bp_unpack_value (&bp
, 1);
4365 ii
->by_ref
= bp_unpack_value (&bp
, 1);
4366 ii
->maybe_in_construction
= bp_unpack_value (&bp
, 1);
4367 ii
->maybe_derived_type
= bp_unpack_value (&bp
, 1);
4368 if (ii
->polymorphic
)
4370 ii
->otr_token
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4371 ii
->otr_type
= stream_read_tree (ib
, data_in
);
4372 ii
->outer_type
= stream_read_tree (ib
, data_in
);
4376 /* Stream out NODE info to OB. */
4379 ipa_write_node_info (struct output_block
*ob
, struct cgraph_node
*node
)
4382 lto_symtab_encoder_t encoder
;
4383 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
4385 struct cgraph_edge
*e
;
4386 struct bitpack_d bp
;
4388 encoder
= ob
->decl_state
->symtab_node_encoder
;
4389 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4390 streamer_write_uhwi (ob
, node_ref
);
4392 streamer_write_uhwi (ob
, ipa_get_param_count (info
));
4393 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4394 streamer_write_uhwi (ob
, ipa_get_param_move_cost (info
, j
));
4395 bp
= bitpack_create (ob
->main_stream
);
4396 gcc_assert (info
->uses_analysis_done
4397 || ipa_get_param_count (info
) == 0);
4398 gcc_assert (!info
->node_enqueued
);
4399 gcc_assert (!info
->ipcp_orig_node
);
4400 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4401 bp_pack_value (&bp
, ipa_is_param_used (info
, j
), 1);
4402 streamer_write_bitpack (&bp
);
4403 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4404 streamer_write_hwi (ob
, ipa_get_controlled_uses (info
, j
));
4405 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4407 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4409 streamer_write_uhwi (ob
, ipa_get_cs_argument_count (args
));
4410 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4411 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4413 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4415 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4417 streamer_write_uhwi (ob
, ipa_get_cs_argument_count (args
));
4418 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4419 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4420 ipa_write_indirect_edge_info (ob
, e
);
4424 /* Stream in NODE info from IB. */
4427 ipa_read_node_info (struct lto_input_block
*ib
, struct cgraph_node
*node
,
4428 struct data_in
*data_in
)
4430 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
4432 struct cgraph_edge
*e
;
4433 struct bitpack_d bp
;
4435 ipa_alloc_node_params (node
, streamer_read_uhwi (ib
));
4437 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4438 info
->descriptors
[k
].move_cost
= streamer_read_uhwi (ib
);
4440 bp
= streamer_read_bitpack (ib
);
4441 if (ipa_get_param_count (info
) != 0)
4442 info
->uses_analysis_done
= true;
4443 info
->node_enqueued
= false;
4444 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4445 ipa_set_param_used (info
, k
, bp_unpack_value (&bp
, 1));
4446 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4447 ipa_set_controlled_uses (info
, k
, streamer_read_hwi (ib
));
4448 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4450 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4451 int count
= streamer_read_uhwi (ib
);
4455 vec_safe_grow_cleared (args
->jump_functions
, count
);
4457 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
4458 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4461 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4463 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4464 int count
= streamer_read_uhwi (ib
);
4468 vec_safe_grow_cleared (args
->jump_functions
, count
);
4469 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
4470 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4473 ipa_read_indirect_edge_info (ib
, data_in
, e
);
4477 /* Write jump functions for nodes in SET. */
4480 ipa_prop_write_jump_functions (void)
4482 struct cgraph_node
*node
;
4483 struct output_block
*ob
;
4484 unsigned int count
= 0;
4485 lto_symtab_encoder_iterator lsei
;
4486 lto_symtab_encoder_t encoder
;
4489 if (!ipa_node_params_vector
.exists ())
4492 ob
= create_output_block (LTO_section_jump_functions
);
4493 encoder
= ob
->decl_state
->symtab_node_encoder
;
4494 ob
->cgraph_node
= NULL
;
4495 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4496 lsei_next_function_in_partition (&lsei
))
4498 node
= lsei_cgraph_node (lsei
);
4499 if (cgraph_function_with_gimple_body_p (node
)
4500 && IPA_NODE_REF (node
) != NULL
)
4504 streamer_write_uhwi (ob
, count
);
4506 /* Process all of the functions. */
4507 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4508 lsei_next_function_in_partition (&lsei
))
4510 node
= lsei_cgraph_node (lsei
);
4511 if (cgraph_function_with_gimple_body_p (node
)
4512 && IPA_NODE_REF (node
) != NULL
)
4513 ipa_write_node_info (ob
, node
);
4515 streamer_write_char_stream (ob
->main_stream
, 0);
4516 produce_asm (ob
, NULL
);
4517 destroy_output_block (ob
);
4520 /* Read section in file FILE_DATA of length LEN with data DATA. */
4523 ipa_prop_read_section (struct lto_file_decl_data
*file_data
, const char *data
,
4526 const struct lto_function_header
*header
=
4527 (const struct lto_function_header
*) data
;
4528 const int cfg_offset
= sizeof (struct lto_function_header
);
4529 const int main_offset
= cfg_offset
+ header
->cfg_size
;
4530 const int string_offset
= main_offset
+ header
->main_size
;
4531 struct data_in
*data_in
;
4532 struct lto_input_block ib_main
;
4536 LTO_INIT_INPUT_BLOCK (ib_main
, (const char *) data
+ main_offset
, 0,
4540 lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
4541 header
->string_size
, vNULL
);
4542 count
= streamer_read_uhwi (&ib_main
);
4544 for (i
= 0; i
< count
; i
++)
4547 struct cgraph_node
*node
;
4548 lto_symtab_encoder_t encoder
;
4550 index
= streamer_read_uhwi (&ib_main
);
4551 encoder
= file_data
->symtab_node_encoder
;
4552 node
= cgraph (lto_symtab_encoder_deref (encoder
, index
));
4553 gcc_assert (node
->definition
);
4554 ipa_read_node_info (&ib_main
, node
, data_in
);
4556 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
4558 lto_data_in_delete (data_in
);
4561 /* Read ipcp jump functions. */
4564 ipa_prop_read_jump_functions (void)
4566 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
4567 struct lto_file_decl_data
*file_data
;
4570 ipa_check_create_node_params ();
4571 ipa_check_create_edge_args ();
4572 ipa_register_cgraph_hooks ();
4574 while ((file_data
= file_data_vec
[j
++]))
4577 const char *data
= lto_get_section_data (file_data
, LTO_section_jump_functions
, NULL
, &len
);
4580 ipa_prop_read_section (file_data
, data
, len
);
4584 /* After merging units, we can get mismatch in argument counts.
4585 Also decl merging might've rendered parameter lists obsolete.
4586 Also compute called_with_variable_arg info. */
4589 ipa_update_after_lto_read (void)
4591 ipa_check_create_node_params ();
4592 ipa_check_create_edge_args ();
4596 write_agg_replacement_chain (struct output_block
*ob
, struct cgraph_node
*node
)
4599 unsigned int count
= 0;
4600 lto_symtab_encoder_t encoder
;
4601 struct ipa_agg_replacement_value
*aggvals
, *av
;
4603 aggvals
= ipa_get_agg_replacements_for_node (node
);
4604 encoder
= ob
->decl_state
->symtab_node_encoder
;
4605 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4606 streamer_write_uhwi (ob
, node_ref
);
4608 for (av
= aggvals
; av
; av
= av
->next
)
4610 streamer_write_uhwi (ob
, count
);
4612 for (av
= aggvals
; av
; av
= av
->next
)
4614 struct bitpack_d bp
;
4616 streamer_write_uhwi (ob
, av
->offset
);
4617 streamer_write_uhwi (ob
, av
->index
);
4618 stream_write_tree (ob
, av
->value
, true);
4620 bp
= bitpack_create (ob
->main_stream
);
4621 bp_pack_value (&bp
, av
->by_ref
, 1);
4622 streamer_write_bitpack (&bp
);
4626 /* Stream in the aggregate value replacement chain for NODE from IB. */
4629 read_agg_replacement_chain (struct lto_input_block
*ib
,
4630 struct cgraph_node
*node
,
4631 struct data_in
*data_in
)
4633 struct ipa_agg_replacement_value
*aggvals
= NULL
;
4634 unsigned int count
, i
;
4636 count
= streamer_read_uhwi (ib
);
4637 for (i
= 0; i
<count
; i
++)
4639 struct ipa_agg_replacement_value
*av
;
4640 struct bitpack_d bp
;
4642 av
= ggc_alloc_ipa_agg_replacement_value ();
4643 av
->offset
= streamer_read_uhwi (ib
);
4644 av
->index
= streamer_read_uhwi (ib
);
4645 av
->value
= stream_read_tree (ib
, data_in
);
4646 bp
= streamer_read_bitpack (ib
);
4647 av
->by_ref
= bp_unpack_value (&bp
, 1);
4651 ipa_set_node_agg_value_chain (node
, aggvals
);
4654 /* Write all aggregate replacement for nodes in set. */
4657 ipa_prop_write_all_agg_replacement (void)
4659 struct cgraph_node
*node
;
4660 struct output_block
*ob
;
4661 unsigned int count
= 0;
4662 lto_symtab_encoder_iterator lsei
;
4663 lto_symtab_encoder_t encoder
;
4665 if (!ipa_node_agg_replacements
)
4668 ob
= create_output_block (LTO_section_ipcp_transform
);
4669 encoder
= ob
->decl_state
->symtab_node_encoder
;
4670 ob
->cgraph_node
= NULL
;
4671 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4672 lsei_next_function_in_partition (&lsei
))
4674 node
= lsei_cgraph_node (lsei
);
4675 if (cgraph_function_with_gimple_body_p (node
)
4676 && ipa_get_agg_replacements_for_node (node
) != NULL
)
4680 streamer_write_uhwi (ob
, count
);
4682 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4683 lsei_next_function_in_partition (&lsei
))
4685 node
= lsei_cgraph_node (lsei
);
4686 if (cgraph_function_with_gimple_body_p (node
)
4687 && ipa_get_agg_replacements_for_node (node
) != NULL
)
4688 write_agg_replacement_chain (ob
, node
);
4690 streamer_write_char_stream (ob
->main_stream
, 0);
4691 produce_asm (ob
, NULL
);
4692 destroy_output_block (ob
);
4695 /* Read replacements section in file FILE_DATA of length LEN with data
4699 read_replacements_section (struct lto_file_decl_data
*file_data
,
4703 const struct lto_function_header
*header
=
4704 (const struct lto_function_header
*) data
;
4705 const int cfg_offset
= sizeof (struct lto_function_header
);
4706 const int main_offset
= cfg_offset
+ header
->cfg_size
;
4707 const int string_offset
= main_offset
+ header
->main_size
;
4708 struct data_in
*data_in
;
4709 struct lto_input_block ib_main
;
4713 LTO_INIT_INPUT_BLOCK (ib_main
, (const char *) data
+ main_offset
, 0,
4716 data_in
= lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
4717 header
->string_size
, vNULL
);
4718 count
= streamer_read_uhwi (&ib_main
);
4720 for (i
= 0; i
< count
; i
++)
4723 struct cgraph_node
*node
;
4724 lto_symtab_encoder_t encoder
;
4726 index
= streamer_read_uhwi (&ib_main
);
4727 encoder
= file_data
->symtab_node_encoder
;
4728 node
= cgraph (lto_symtab_encoder_deref (encoder
, index
));
4729 gcc_assert (node
->definition
);
4730 read_agg_replacement_chain (&ib_main
, node
, data_in
);
4732 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
4734 lto_data_in_delete (data_in
);
4737 /* Read IPA-CP aggregate replacements. */
4740 ipa_prop_read_all_agg_replacement (void)
4742 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
4743 struct lto_file_decl_data
*file_data
;
4746 while ((file_data
= file_data_vec
[j
++]))
4749 const char *data
= lto_get_section_data (file_data
,
4750 LTO_section_ipcp_transform
,
4753 read_replacements_section (file_data
, data
, len
);
4757 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4761 adjust_agg_replacement_values (struct cgraph_node
*node
,
4762 struct ipa_agg_replacement_value
*aggval
)
4764 struct ipa_agg_replacement_value
*v
;
4765 int i
, c
= 0, d
= 0, *adj
;
4767 if (!node
->clone
.combined_args_to_skip
)
4770 for (v
= aggval
; v
; v
= v
->next
)
4772 gcc_assert (v
->index
>= 0);
4778 adj
= XALLOCAVEC (int, c
);
4779 for (i
= 0; i
< c
; i
++)
4780 if (bitmap_bit_p (node
->clone
.combined_args_to_skip
, i
))
4788 for (v
= aggval
; v
; v
= v
->next
)
4789 v
->index
= adj
[v
->index
];
4793 /* Function body transformation phase. */
4796 ipcp_transform_function (struct cgraph_node
*node
)
4798 vec
<ipa_param_descriptor
> descriptors
= vNULL
;
4799 struct param_analysis_info
*parms_ainfo
;
4800 struct ipa_agg_replacement_value
*aggval
;
4801 gimple_stmt_iterator gsi
;
4804 bool cfg_changed
= false, something_changed
= false;
4806 gcc_checking_assert (cfun
);
4807 gcc_checking_assert (current_function_decl
);
4810 fprintf (dump_file
, "Modification phase of node %s/%i\n",
4811 node
->name (), node
->order
);
4813 aggval
= ipa_get_agg_replacements_for_node (node
);
4816 param_count
= count_formal_params (node
->decl
);
4817 if (param_count
== 0)
4819 adjust_agg_replacement_values (node
, aggval
);
4821 ipa_dump_agg_replacement_values (dump_file
, aggval
);
4822 parms_ainfo
= XALLOCAVEC (struct param_analysis_info
, param_count
);
4823 memset (parms_ainfo
, 0, sizeof (struct param_analysis_info
) * param_count
);
4824 descriptors
.safe_grow_cleared (param_count
);
4825 ipa_populate_param_decls (node
, descriptors
);
4827 FOR_EACH_BB_FN (bb
, cfun
)
4828 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4830 struct ipa_agg_replacement_value
*v
;
4831 gimple stmt
= gsi_stmt (gsi
);
4833 HOST_WIDE_INT offset
, size
;
4837 if (!gimple_assign_load_p (stmt
))
4839 rhs
= gimple_assign_rhs1 (stmt
);
4840 if (!is_gimple_reg_type (TREE_TYPE (rhs
)))
4845 while (handled_component_p (t
))
4847 /* V_C_E can do things like convert an array of integers to one
4848 bigger integer and similar things we do not handle below. */
4849 if (TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
)
4854 t
= TREE_OPERAND (t
, 0);
4859 if (!ipa_load_from_parm_agg_1 (descriptors
, parms_ainfo
, stmt
,
4860 rhs
, &index
, &offset
, &size
, &by_ref
))
4862 for (v
= aggval
; v
; v
= v
->next
)
4863 if (v
->index
== index
4864 && v
->offset
== offset
)
4867 || v
->by_ref
!= by_ref
4868 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v
->value
))) != size
)
4871 gcc_checking_assert (is_gimple_ip_invariant (v
->value
));
4872 if (!useless_type_conversion_p (TREE_TYPE (rhs
), TREE_TYPE (v
->value
)))
4874 if (fold_convertible_p (TREE_TYPE (rhs
), v
->value
))
4875 val
= fold_build1 (NOP_EXPR
, TREE_TYPE (rhs
), v
->value
);
4876 else if (TYPE_SIZE (TREE_TYPE (rhs
))
4877 == TYPE_SIZE (TREE_TYPE (v
->value
)))
4878 val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (rhs
), v
->value
);
4883 fprintf (dump_file
, " const ");
4884 print_generic_expr (dump_file
, v
->value
, 0);
4885 fprintf (dump_file
, " can't be converted to type of ");
4886 print_generic_expr (dump_file
, rhs
, 0);
4887 fprintf (dump_file
, "\n");
4895 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4897 fprintf (dump_file
, "Modifying stmt:\n ");
4898 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4900 gimple_assign_set_rhs_from_tree (&gsi
, val
);
4903 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4905 fprintf (dump_file
, "into:\n ");
4906 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4907 fprintf (dump_file
, "\n");
4910 something_changed
= true;
4911 if (maybe_clean_eh_stmt (stmt
)
4912 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
4916 (*ipa_node_agg_replacements
)[node
->uid
] = NULL
;
4917 free_parms_ainfo (parms_ainfo
, param_count
);
4918 descriptors
.release ();
4920 if (!something_changed
)
4922 else if (cfg_changed
)
4923 return TODO_update_ssa_only_virtuals
| TODO_cleanup_cfg
;
4925 return TODO_update_ssa_only_virtuals
;