1 /* Interprocedural analyses.
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
24 #include "basic-block.h"
25 #include "tree-ssa-alias.h"
26 #include "internal-fn.h"
27 #include "gimple-fold.h"
29 #include "gimple-expr.h"
33 #include "stor-layout.h"
34 #include "print-tree.h"
36 #include "gimple-iterator.h"
37 #include "gimplify-me.h"
38 #include "gimple-walk.h"
39 #include "langhooks.h"
43 #include "gimple-ssa.h"
45 #include "tree-phinodes.h"
46 #include "ssa-iterators.h"
47 #include "tree-into-ssa.h"
49 #include "tree-pass.h"
50 #include "tree-inline.h"
51 #include "ipa-inline.h"
53 #include "diagnostic.h"
54 #include "gimple-pretty-print.h"
55 #include "lto-streamer.h"
56 #include "data-streamer.h"
57 #include "tree-streamer.h"
59 #include "ipa-utils.h"
61 /* Intermediate information about a parameter that is only useful during the
62 run of ipa_analyze_node and is not kept afterwards. */
64 struct param_analysis_info
66 bool parm_modified
, ref_modified
, pt_modified
;
67 bitmap parm_visited_statements
, pt_visited_statements
;
70 /* Vector where the parameter infos are actually stored. */
71 vec
<ipa_node_params
> ipa_node_params_vector
;
72 /* Vector of known aggregate values in cloned nodes. */
73 vec
<ipa_agg_replacement_value_p
, va_gc
> *ipa_node_agg_replacements
;
74 /* Vector where the parameter infos are actually stored. */
75 vec
<ipa_edge_args
, va_gc
> *ipa_edge_args_vector
;
77 /* Holders of ipa cgraph hooks: */
78 static struct cgraph_edge_hook_list
*edge_removal_hook_holder
;
79 static struct cgraph_node_hook_list
*node_removal_hook_holder
;
80 static struct cgraph_2edge_hook_list
*edge_duplication_hook_holder
;
81 static struct cgraph_2node_hook_list
*node_duplication_hook_holder
;
82 static struct cgraph_node_hook_list
*function_insertion_hook_holder
;
84 /* Description of a reference to an IPA constant. */
85 struct ipa_cst_ref_desc
87 /* Edge that corresponds to the statement which took the reference. */
88 struct cgraph_edge
*cs
;
89 /* Linked list of duplicates created when call graph edges are cloned. */
90 struct ipa_cst_ref_desc
*next_duplicate
;
91 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
96 /* Allocation pool for reference descriptions. */
98 static alloc_pool ipa_refdesc_pool
;
100 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
101 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
104 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node
*node
)
106 tree fs_opts
= DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node
->decl
);
107 struct cl_optimization
*os
;
111 os
= TREE_OPTIMIZATION (fs_opts
);
112 return !os
->x_optimize
|| !os
->x_flag_ipa_cp
;
115 /* Return index of the formal whose tree is PTREE in function which corresponds
119 ipa_get_param_decl_index_1 (vec
<ipa_param_descriptor
> descriptors
, tree ptree
)
123 count
= descriptors
.length ();
124 for (i
= 0; i
< count
; i
++)
125 if (descriptors
[i
].decl
== ptree
)
131 /* Return index of the formal whose tree is PTREE in function which corresponds
135 ipa_get_param_decl_index (struct ipa_node_params
*info
, tree ptree
)
137 return ipa_get_param_decl_index_1 (info
->descriptors
, ptree
);
140 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
144 ipa_populate_param_decls (struct cgraph_node
*node
,
145 vec
<ipa_param_descriptor
> &descriptors
)
153 gcc_assert (gimple_has_body_p (fndecl
));
154 fnargs
= DECL_ARGUMENTS (fndecl
);
156 for (parm
= fnargs
; parm
; parm
= DECL_CHAIN (parm
))
158 descriptors
[param_num
].decl
= parm
;
159 descriptors
[param_num
].move_cost
= estimate_move_cost (TREE_TYPE (parm
));
164 /* Return how many formal parameters FNDECL has. */
167 count_formal_params (tree fndecl
)
171 gcc_assert (gimple_has_body_p (fndecl
));
173 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
179 /* Return the declaration of Ith formal parameter of the function corresponding
180 to INFO. Note there is no setter function as this array is built just once
181 using ipa_initialize_node_params. */
184 ipa_dump_param (FILE *file
, struct ipa_node_params
*info
, int i
)
186 fprintf (file
, "param #%i", i
);
187 if (info
->descriptors
[i
].decl
)
190 print_generic_expr (file
, info
->descriptors
[i
].decl
, 0);
194 /* Initialize the ipa_node_params structure associated with NODE
195 to hold PARAM_COUNT parameters. */
198 ipa_alloc_node_params (struct cgraph_node
*node
, int param_count
)
200 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
202 if (!info
->descriptors
.exists () && param_count
)
203 info
->descriptors
.safe_grow_cleared (param_count
);
206 /* Initialize the ipa_node_params structure associated with NODE by counting
207 the function parameters, creating the descriptors and populating their
211 ipa_initialize_node_params (struct cgraph_node
*node
)
213 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
215 if (!info
->descriptors
.exists ())
217 ipa_alloc_node_params (node
, count_formal_params (node
->decl
));
218 ipa_populate_param_decls (node
, info
->descriptors
);
222 /* Print the jump functions associated with call graph edge CS to file F. */
225 ipa_print_node_jump_functions_for_edge (FILE *f
, struct cgraph_edge
*cs
)
229 count
= ipa_get_cs_argument_count (IPA_EDGE_REF (cs
));
230 for (i
= 0; i
< count
; i
++)
232 struct ipa_jump_func
*jump_func
;
233 enum jump_func_type type
;
235 jump_func
= ipa_get_ith_jump_func (IPA_EDGE_REF (cs
), i
);
236 type
= jump_func
->type
;
238 fprintf (f
, " param %d: ", i
);
239 if (type
== IPA_JF_UNKNOWN
)
240 fprintf (f
, "UNKNOWN\n");
241 else if (type
== IPA_JF_KNOWN_TYPE
)
243 fprintf (f
, "KNOWN TYPE: base ");
244 print_generic_expr (f
, jump_func
->value
.known_type
.base_type
, 0);
245 fprintf (f
, ", offset "HOST_WIDE_INT_PRINT_DEC
", component ",
246 jump_func
->value
.known_type
.offset
);
247 print_generic_expr (f
, jump_func
->value
.known_type
.component_type
, 0);
250 else if (type
== IPA_JF_CONST
)
252 tree val
= jump_func
->value
.constant
.value
;
253 fprintf (f
, "CONST: ");
254 print_generic_expr (f
, val
, 0);
255 if (TREE_CODE (val
) == ADDR_EXPR
256 && TREE_CODE (TREE_OPERAND (val
, 0)) == CONST_DECL
)
259 print_generic_expr (f
, DECL_INITIAL (TREE_OPERAND (val
, 0)),
264 else if (type
== IPA_JF_PASS_THROUGH
)
266 fprintf (f
, "PASS THROUGH: ");
267 fprintf (f
, "%d, op %s",
268 jump_func
->value
.pass_through
.formal_id
,
269 get_tree_code_name(jump_func
->value
.pass_through
.operation
));
270 if (jump_func
->value
.pass_through
.operation
!= NOP_EXPR
)
273 print_generic_expr (f
,
274 jump_func
->value
.pass_through
.operand
, 0);
276 if (jump_func
->value
.pass_through
.agg_preserved
)
277 fprintf (f
, ", agg_preserved");
278 if (jump_func
->value
.pass_through
.type_preserved
)
279 fprintf (f
, ", type_preserved");
282 else if (type
== IPA_JF_ANCESTOR
)
284 fprintf (f
, "ANCESTOR: ");
285 fprintf (f
, "%d, offset "HOST_WIDE_INT_PRINT_DEC
", ",
286 jump_func
->value
.ancestor
.formal_id
,
287 jump_func
->value
.ancestor
.offset
);
288 print_generic_expr (f
, jump_func
->value
.ancestor
.type
, 0);
289 if (jump_func
->value
.ancestor
.agg_preserved
)
290 fprintf (f
, ", agg_preserved");
291 if (jump_func
->value
.ancestor
.type_preserved
)
292 fprintf (f
, ", type_preserved");
296 if (jump_func
->agg
.items
)
298 struct ipa_agg_jf_item
*item
;
301 fprintf (f
, " Aggregate passed by %s:\n",
302 jump_func
->agg
.by_ref
? "reference" : "value");
303 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, j
, item
)
305 fprintf (f
, " offset: " HOST_WIDE_INT_PRINT_DEC
", ",
307 if (TYPE_P (item
->value
))
308 fprintf (f
, "clobber of " HOST_WIDE_INT_PRINT_DEC
" bits",
309 tree_to_uhwi (TYPE_SIZE (item
->value
)));
312 fprintf (f
, "cst: ");
313 print_generic_expr (f
, item
->value
, 0);
322 /* Print the jump functions of all arguments on all call graph edges going from
326 ipa_print_node_jump_functions (FILE *f
, struct cgraph_node
*node
)
328 struct cgraph_edge
*cs
;
330 fprintf (f
, " Jump functions of caller %s/%i:\n", node
->name (),
332 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
334 if (!ipa_edge_args_info_available_for_edge_p (cs
))
337 fprintf (f
, " callsite %s/%i -> %s/%i : \n",
338 xstrdup (node
->name ()), node
->order
,
339 xstrdup (cs
->callee
->name ()),
341 ipa_print_node_jump_functions_for_edge (f
, cs
);
344 for (cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
346 struct cgraph_indirect_call_info
*ii
;
347 if (!ipa_edge_args_info_available_for_edge_p (cs
))
350 ii
= cs
->indirect_info
;
351 if (ii
->agg_contents
)
352 fprintf (f
, " indirect %s callsite, calling param %i, "
353 "offset " HOST_WIDE_INT_PRINT_DEC
", %s",
354 ii
->member_ptr
? "member ptr" : "aggregate",
355 ii
->param_index
, ii
->offset
,
356 ii
->by_ref
? "by reference" : "by_value");
358 fprintf (f
, " indirect %s callsite, calling param %i",
359 ii
->polymorphic
? "polymorphic" : "simple", ii
->param_index
);
363 fprintf (f
, ", for stmt ");
364 print_gimple_stmt (f
, cs
->call_stmt
, 0, TDF_SLIM
);
368 ipa_print_node_jump_functions_for_edge (f
, cs
);
372 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
375 ipa_print_all_jump_functions (FILE *f
)
377 struct cgraph_node
*node
;
379 fprintf (f
, "\nJump functions:\n");
380 FOR_EACH_FUNCTION (node
)
382 ipa_print_node_jump_functions (f
, node
);
386 /* Set JFUNC to be a known type jump function. */
389 ipa_set_jf_known_type (struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
,
390 tree base_type
, tree component_type
)
392 gcc_assert (TREE_CODE (component_type
) == RECORD_TYPE
393 && TYPE_BINFO (component_type
));
394 jfunc
->type
= IPA_JF_KNOWN_TYPE
;
395 jfunc
->value
.known_type
.offset
= offset
,
396 jfunc
->value
.known_type
.base_type
= base_type
;
397 jfunc
->value
.known_type
.component_type
= component_type
;
398 gcc_assert (component_type
);
401 /* Set JFUNC to be a copy of another jmp (to be used by jump function
402 combination code). The two functions will share their rdesc. */
405 ipa_set_jf_cst_copy (struct ipa_jump_func
*dst
,
406 struct ipa_jump_func
*src
)
409 gcc_checking_assert (src
->type
== IPA_JF_CONST
);
410 dst
->type
= IPA_JF_CONST
;
411 dst
->value
.constant
= src
->value
.constant
;
414 /* Set JFUNC to be a constant jmp function. */
417 ipa_set_jf_constant (struct ipa_jump_func
*jfunc
, tree constant
,
418 struct cgraph_edge
*cs
)
420 constant
= unshare_expr (constant
);
421 if (constant
&& EXPR_P (constant
))
422 SET_EXPR_LOCATION (constant
, UNKNOWN_LOCATION
);
423 jfunc
->type
= IPA_JF_CONST
;
424 jfunc
->value
.constant
.value
= unshare_expr_without_location (constant
);
426 if (TREE_CODE (constant
) == ADDR_EXPR
427 && TREE_CODE (TREE_OPERAND (constant
, 0)) == FUNCTION_DECL
)
429 struct ipa_cst_ref_desc
*rdesc
;
430 if (!ipa_refdesc_pool
)
431 ipa_refdesc_pool
= create_alloc_pool ("IPA-PROP ref descriptions",
432 sizeof (struct ipa_cst_ref_desc
), 32);
434 rdesc
= (struct ipa_cst_ref_desc
*) pool_alloc (ipa_refdesc_pool
);
436 rdesc
->next_duplicate
= NULL
;
438 jfunc
->value
.constant
.rdesc
= rdesc
;
441 jfunc
->value
.constant
.rdesc
= NULL
;
444 /* Set JFUNC to be a simple pass-through jump function. */
446 ipa_set_jf_simple_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
447 bool agg_preserved
, bool type_preserved
)
449 jfunc
->type
= IPA_JF_PASS_THROUGH
;
450 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
451 jfunc
->value
.pass_through
.formal_id
= formal_id
;
452 jfunc
->value
.pass_through
.operation
= NOP_EXPR
;
453 jfunc
->value
.pass_through
.agg_preserved
= agg_preserved
;
454 jfunc
->value
.pass_through
.type_preserved
= type_preserved
;
457 /* Set JFUNC to be an arithmetic pass through jump function. */
460 ipa_set_jf_arith_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
461 tree operand
, enum tree_code operation
)
463 jfunc
->type
= IPA_JF_PASS_THROUGH
;
464 jfunc
->value
.pass_through
.operand
= unshare_expr_without_location (operand
);
465 jfunc
->value
.pass_through
.formal_id
= formal_id
;
466 jfunc
->value
.pass_through
.operation
= operation
;
467 jfunc
->value
.pass_through
.agg_preserved
= false;
468 jfunc
->value
.pass_through
.type_preserved
= false;
471 /* Set JFUNC to be an ancestor jump function. */
474 ipa_set_ancestor_jf (struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
,
475 tree type
, int formal_id
, bool agg_preserved
,
478 jfunc
->type
= IPA_JF_ANCESTOR
;
479 jfunc
->value
.ancestor
.formal_id
= formal_id
;
480 jfunc
->value
.ancestor
.offset
= offset
;
481 jfunc
->value
.ancestor
.type
= type
;
482 jfunc
->value
.ancestor
.agg_preserved
= agg_preserved
;
483 jfunc
->value
.ancestor
.type_preserved
= type_preserved
;
486 /* Extract the acual BINFO being described by JFUNC which must be a known type
490 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func
*jfunc
)
492 tree base_binfo
= TYPE_BINFO (jfunc
->value
.known_type
.base_type
);
495 return get_binfo_at_offset (base_binfo
,
496 jfunc
->value
.known_type
.offset
,
497 jfunc
->value
.known_type
.component_type
);
500 /* Structure to be passed in between detect_type_change and
501 check_stmt_for_type_change. */
503 struct type_change_info
505 /* Offset into the object where there is the virtual method pointer we are
507 HOST_WIDE_INT offset
;
508 /* The declaration or SSA_NAME pointer of the base that we are checking for
511 /* If we actually can tell the type that the object has changed to, it is
512 stored in this field. Otherwise it remains NULL_TREE. */
513 tree known_current_type
;
514 /* Set to true if dynamic type change has been detected. */
515 bool type_maybe_changed
;
516 /* Set to true if multiple types have been encountered. known_current_type
517 must be disregarded in that case. */
518 bool multiple_types_encountered
;
521 /* Return true if STMT can modify a virtual method table pointer.
523 This function makes special assumptions about both constructors and
524 destructors which are all the functions that are allowed to alter the VMT
525 pointers. It assumes that destructors begin with assignment into all VMT
526 pointers and that constructors essentially look in the following way:
528 1) The very first thing they do is that they call constructors of ancestor
529 sub-objects that have them.
531 2) Then VMT pointers of this and all its ancestors is set to new values
532 corresponding to the type corresponding to the constructor.
534 3) Only afterwards, other stuff such as constructor of member sub-objects
535 and the code written by the user is run. Only this may include calling
536 virtual functions, directly or indirectly.
538 There is no way to call a constructor of an ancestor sub-object in any
541 This means that we do not have to care whether constructors get the correct
542 type information because they will always change it (in fact, if we define
543 the type to be given by the VMT pointer, it is undefined).
545 The most important fact to derive from the above is that if, for some
546 statement in the section 3, we try to detect whether the dynamic type has
547 changed, we can safely ignore all calls as we examine the function body
548 backwards until we reach statements in section 2 because these calls cannot
549 be ancestor constructors or destructors (if the input is not bogus) and so
550 do not change the dynamic type (this holds true only for automatically
551 allocated objects but at the moment we devirtualize only these). We then
552 must detect that statements in section 2 change the dynamic type and can try
553 to derive the new type. That is enough and we can stop, we will never see
554 the calls into constructors of sub-objects in this code. Therefore we can
555 safely ignore all call statements that we traverse.
559 stmt_may_be_vtbl_ptr_store (gimple stmt
)
561 if (is_gimple_call (stmt
))
563 else if (is_gimple_assign (stmt
))
565 tree lhs
= gimple_assign_lhs (stmt
);
567 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs
)))
569 if (flag_strict_aliasing
570 && !POINTER_TYPE_P (TREE_TYPE (lhs
)))
573 if (TREE_CODE (lhs
) == COMPONENT_REF
574 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
576 /* In the future we might want to use get_base_ref_and_offset to find
577 if there is a field corresponding to the offset and if so, proceed
578 almost like if it was a component ref. */
584 /* If STMT can be proved to be an assignment to the virtual method table
585 pointer of ANALYZED_OBJ and the type associated with the new table
586 identified, return the type. Otherwise return NULL_TREE. */
589 extr_type_from_vtbl_ptr_store (gimple stmt
, struct type_change_info
*tci
)
591 HOST_WIDE_INT offset
, size
, max_size
;
594 if (!gimple_assign_single_p (stmt
))
597 lhs
= gimple_assign_lhs (stmt
);
598 rhs
= gimple_assign_rhs1 (stmt
);
599 if (TREE_CODE (lhs
) != COMPONENT_REF
600 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1))
601 || TREE_CODE (rhs
) != ADDR_EXPR
)
603 rhs
= get_base_address (TREE_OPERAND (rhs
, 0));
605 || TREE_CODE (rhs
) != VAR_DECL
606 || !DECL_VIRTUAL_P (rhs
))
609 base
= get_ref_base_and_extent (lhs
, &offset
, &size
, &max_size
);
610 if (offset
!= tci
->offset
611 || size
!= POINTER_SIZE
612 || max_size
!= POINTER_SIZE
)
614 if (TREE_CODE (base
) == MEM_REF
)
616 if (TREE_CODE (tci
->object
) != MEM_REF
617 || TREE_OPERAND (tci
->object
, 0) != TREE_OPERAND (base
, 0)
618 || !tree_int_cst_equal (TREE_OPERAND (tci
->object
, 1),
619 TREE_OPERAND (base
, 1)))
622 else if (tci
->object
!= base
)
625 return DECL_CONTEXT (rhs
);
628 /* Callback of walk_aliased_vdefs and a helper function for
629 detect_type_change to check whether a particular statement may modify
630 the virtual table pointer, and if possible also determine the new type of
631 the (sub-)object. It stores its result into DATA, which points to a
632 type_change_info structure. */
635 check_stmt_for_type_change (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef
, void *data
)
637 gimple stmt
= SSA_NAME_DEF_STMT (vdef
);
638 struct type_change_info
*tci
= (struct type_change_info
*) data
;
640 if (stmt_may_be_vtbl_ptr_store (stmt
))
643 type
= extr_type_from_vtbl_ptr_store (stmt
, tci
);
644 if (tci
->type_maybe_changed
645 && type
!= tci
->known_current_type
)
646 tci
->multiple_types_encountered
= true;
647 tci
->known_current_type
= type
;
648 tci
->type_maybe_changed
= true;
657 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
658 callsite CALL) by looking for assignments to its virtual table pointer. If
659 it is, return true and fill in the jump function JFUNC with relevant type
660 information or set it to unknown. ARG is the object itself (not a pointer
661 to it, unless dereferenced). BASE is the base of the memory access as
662 returned by get_ref_base_and_extent, as is the offset. */
665 detect_type_change (tree arg
, tree base
, tree comp_type
, gimple call
,
666 struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
)
668 struct type_change_info tci
;
671 gcc_checking_assert (DECL_P (arg
)
672 || TREE_CODE (arg
) == MEM_REF
673 || handled_component_p (arg
));
674 /* Const calls cannot call virtual methods through VMT and so type changes do
676 if (!flag_devirtualize
|| !gimple_vuse (call
)
677 /* Be sure expected_type is polymorphic. */
679 || TREE_CODE (comp_type
) != RECORD_TYPE
680 || !TYPE_BINFO (comp_type
)
681 || !BINFO_VTABLE (TYPE_BINFO (comp_type
)))
684 ao_ref_init (&ao
, arg
);
687 ao
.size
= POINTER_SIZE
;
688 ao
.max_size
= ao
.size
;
691 tci
.object
= get_base_address (arg
);
692 tci
.known_current_type
= NULL_TREE
;
693 tci
.type_maybe_changed
= false;
694 tci
.multiple_types_encountered
= false;
696 walk_aliased_vdefs (&ao
, gimple_vuse (call
), check_stmt_for_type_change
,
698 if (!tci
.type_maybe_changed
)
701 if (!tci
.known_current_type
702 || tci
.multiple_types_encountered
704 jfunc
->type
= IPA_JF_UNKNOWN
;
706 ipa_set_jf_known_type (jfunc
, 0, tci
.known_current_type
, comp_type
);
711 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
712 SSA name (its dereference will become the base and the offset is assumed to
716 detect_type_change_ssa (tree arg
, tree comp_type
,
717 gimple call
, struct ipa_jump_func
*jfunc
)
719 gcc_checking_assert (TREE_CODE (arg
) == SSA_NAME
);
720 if (!flag_devirtualize
721 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
724 arg
= build2 (MEM_REF
, ptr_type_node
, arg
,
725 build_int_cst (ptr_type_node
, 0));
727 return detect_type_change (arg
, arg
, comp_type
, call
, jfunc
, 0);
730 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
731 boolean variable pointed to by DATA. */
734 mark_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
737 bool *b
= (bool *) data
;
742 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
743 a value known not to be modified in this function before reaching the
744 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
745 information about the parameter. */
748 parm_preserved_before_stmt_p (struct param_analysis_info
*parm_ainfo
,
749 gimple stmt
, tree parm_load
)
751 bool modified
= false;
752 bitmap
*visited_stmts
;
755 if (parm_ainfo
&& parm_ainfo
->parm_modified
)
758 gcc_checking_assert (gimple_vuse (stmt
) != NULL_TREE
);
759 ao_ref_init (&refd
, parm_load
);
760 /* We can cache visited statements only when parm_ainfo is available and when
761 we are looking at a naked load of the whole parameter. */
762 if (!parm_ainfo
|| TREE_CODE (parm_load
) != PARM_DECL
)
763 visited_stmts
= NULL
;
765 visited_stmts
= &parm_ainfo
->parm_visited_statements
;
766 walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
, &modified
,
768 if (parm_ainfo
&& modified
)
769 parm_ainfo
->parm_modified
= true;
773 /* If STMT is an assignment that loads a value from an parameter declaration,
774 return the index of the parameter in ipa_node_params which has not been
775 modified. Otherwise return -1. */
778 load_from_unmodified_param (vec
<ipa_param_descriptor
> descriptors
,
779 struct param_analysis_info
*parms_ainfo
,
785 if (!gimple_assign_single_p (stmt
))
788 op1
= gimple_assign_rhs1 (stmt
);
789 if (TREE_CODE (op1
) != PARM_DECL
)
792 index
= ipa_get_param_decl_index_1 (descriptors
, op1
);
794 || !parm_preserved_before_stmt_p (parms_ainfo
? &parms_ainfo
[index
]
801 /* Return true if memory reference REF loads data that are known to be
802 unmodified in this function before reaching statement STMT. PARM_AINFO, if
803 non-NULL, is a pointer to a structure containing temporary information about
807 parm_ref_data_preserved_p (struct param_analysis_info
*parm_ainfo
,
808 gimple stmt
, tree ref
)
810 bool modified
= false;
813 gcc_checking_assert (gimple_vuse (stmt
));
814 if (parm_ainfo
&& parm_ainfo
->ref_modified
)
817 ao_ref_init (&refd
, ref
);
818 walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
, &modified
,
820 if (parm_ainfo
&& modified
)
821 parm_ainfo
->ref_modified
= true;
825 /* Return true if the data pointed to by PARM is known to be unmodified in this
826 function before reaching call statement CALL into which it is passed.
827 PARM_AINFO is a pointer to a structure containing temporary information
831 parm_ref_data_pass_through_p (struct param_analysis_info
*parm_ainfo
,
832 gimple call
, tree parm
)
834 bool modified
= false;
837 /* It's unnecessary to calculate anything about memory contnets for a const
838 function because it is not goin to use it. But do not cache the result
839 either. Also, no such calculations for non-pointers. */
840 if (!gimple_vuse (call
)
841 || !POINTER_TYPE_P (TREE_TYPE (parm
)))
844 if (parm_ainfo
->pt_modified
)
847 ao_ref_init_from_ptr_and_size (&refd
, parm
, NULL_TREE
);
848 walk_aliased_vdefs (&refd
, gimple_vuse (call
), mark_modified
, &modified
,
849 parm_ainfo
? &parm_ainfo
->pt_visited_statements
: NULL
);
851 parm_ainfo
->pt_modified
= true;
855 /* Return true if we can prove that OP is a memory reference loading unmodified
856 data from an aggregate passed as a parameter and if the aggregate is passed
857 by reference, that the alias type of the load corresponds to the type of the
858 formal parameter (so that we can rely on this type for TBAA in callers).
859 INFO and PARMS_AINFO describe parameters of the current function (but the
860 latter can be NULL), STMT is the load statement. If function returns true,
861 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
862 within the aggregate and whether it is a load from a value passed by
863 reference respectively. */
866 ipa_load_from_parm_agg_1 (vec
<ipa_param_descriptor
> descriptors
,
867 struct param_analysis_info
*parms_ainfo
, gimple stmt
,
868 tree op
, int *index_p
, HOST_WIDE_INT
*offset_p
,
869 HOST_WIDE_INT
*size_p
, bool *by_ref_p
)
872 HOST_WIDE_INT size
, max_size
;
873 tree base
= get_ref_base_and_extent (op
, offset_p
, &size
, &max_size
);
875 if (max_size
== -1 || max_size
!= size
|| *offset_p
< 0)
880 int index
= ipa_get_param_decl_index_1 (descriptors
, base
);
882 && parm_preserved_before_stmt_p (parms_ainfo
? &parms_ainfo
[index
]
894 if (TREE_CODE (base
) != MEM_REF
895 || TREE_CODE (TREE_OPERAND (base
, 0)) != SSA_NAME
896 || !integer_zerop (TREE_OPERAND (base
, 1)))
899 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base
, 0)))
901 tree parm
= SSA_NAME_VAR (TREE_OPERAND (base
, 0));
902 index
= ipa_get_param_decl_index_1 (descriptors
, parm
);
906 /* This branch catches situations where a pointer parameter is not a
907 gimple register, for example:
909 void hip7(S*) (struct S * p)
911 void (*<T2e4>) (struct S *) D.1867;
921 gimple def
= SSA_NAME_DEF_STMT (TREE_OPERAND (base
, 0));
922 index
= load_from_unmodified_param (descriptors
, parms_ainfo
, def
);
926 && parm_ref_data_preserved_p (parms_ainfo
? &parms_ainfo
[index
] : NULL
,
938 /* Just like the previous function, just without the param_analysis_info
939 pointer, for users outside of this file. */
942 ipa_load_from_parm_agg (struct ipa_node_params
*info
, gimple stmt
,
943 tree op
, int *index_p
, HOST_WIDE_INT
*offset_p
,
946 return ipa_load_from_parm_agg_1 (info
->descriptors
, NULL
, stmt
, op
, index_p
,
947 offset_p
, NULL
, by_ref_p
);
950 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
951 of an assignment statement STMT, try to determine whether we are actually
952 handling any of the following cases and construct an appropriate jump
953 function into JFUNC if so:
955 1) The passed value is loaded from a formal parameter which is not a gimple
956 register (most probably because it is addressable, the value has to be
957 scalar) and we can guarantee the value has not changed. This case can
958 therefore be described by a simple pass-through jump function. For example:
967 2) The passed value can be described by a simple arithmetic pass-through
974 D.2064_4 = a.1(D) + 4;
977 This case can also occur in combination of the previous one, e.g.:
985 D.2064_4 = a.0_3 + 4;
988 3) The passed value is an address of an object within another one (which
989 also passed by reference). Such situations are described by an ancestor
990 jump function and describe situations such as:
992 B::foo() (struct B * const this)
996 D.1845_2 = &this_1(D)->D.1748;
999 INFO is the structure describing individual parameters access different
1000 stages of IPA optimizations. PARMS_AINFO contains the information that is
1001 only needed for intraprocedural analysis. */
1004 compute_complex_assign_jump_func (struct ipa_node_params
*info
,
1005 struct param_analysis_info
*parms_ainfo
,
1006 struct ipa_jump_func
*jfunc
,
1007 gimple call
, gimple stmt
, tree name
,
1010 HOST_WIDE_INT offset
, size
, max_size
;
1011 tree op1
, tc_ssa
, base
, ssa
;
1014 op1
= gimple_assign_rhs1 (stmt
);
1016 if (TREE_CODE (op1
) == SSA_NAME
)
1018 if (SSA_NAME_IS_DEFAULT_DEF (op1
))
1019 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (op1
));
1021 index
= load_from_unmodified_param (info
->descriptors
, parms_ainfo
,
1022 SSA_NAME_DEF_STMT (op1
));
1027 index
= load_from_unmodified_param (info
->descriptors
, parms_ainfo
, stmt
);
1028 tc_ssa
= gimple_assign_lhs (stmt
);
1033 tree op2
= gimple_assign_rhs2 (stmt
);
1037 if (!is_gimple_ip_invariant (op2
)
1038 || (TREE_CODE_CLASS (gimple_expr_code (stmt
)) != tcc_comparison
1039 && !useless_type_conversion_p (TREE_TYPE (name
),
1043 ipa_set_jf_arith_pass_through (jfunc
, index
, op2
,
1044 gimple_assign_rhs_code (stmt
));
1046 else if (gimple_assign_single_p (stmt
))
1048 bool agg_p
= parm_ref_data_pass_through_p (&parms_ainfo
[index
],
1050 bool type_p
= false;
1052 if (param_type
&& POINTER_TYPE_P (param_type
))
1053 type_p
= !detect_type_change_ssa (tc_ssa
, TREE_TYPE (param_type
),
1055 if (type_p
|| jfunc
->type
== IPA_JF_UNKNOWN
)
1056 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
, type_p
);
1061 if (TREE_CODE (op1
) != ADDR_EXPR
)
1063 op1
= TREE_OPERAND (op1
, 0);
1064 if (TREE_CODE (TREE_TYPE (op1
)) != RECORD_TYPE
)
1066 base
= get_ref_base_and_extent (op1
, &offset
, &size
, &max_size
);
1067 if (TREE_CODE (base
) != MEM_REF
1068 /* If this is a varying address, punt. */
1070 || max_size
!= size
)
1072 offset
+= mem_ref_offset (base
).low
* BITS_PER_UNIT
;
1073 ssa
= TREE_OPERAND (base
, 0);
1074 if (TREE_CODE (ssa
) != SSA_NAME
1075 || !SSA_NAME_IS_DEFAULT_DEF (ssa
)
1079 /* Dynamic types are changed in constructors and destructors. */
1080 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (ssa
));
1081 if (index
>= 0 && param_type
&& POINTER_TYPE_P (param_type
))
1083 bool type_p
= !detect_type_change (op1
, base
, TREE_TYPE (param_type
),
1084 call
, jfunc
, offset
);
1085 if (type_p
|| jfunc
->type
== IPA_JF_UNKNOWN
)
1086 ipa_set_ancestor_jf (jfunc
, offset
, TREE_TYPE (op1
), index
,
1087 parm_ref_data_pass_through_p (&parms_ainfo
[index
],
1088 call
, ssa
), type_p
);
1092 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1095 iftmp.1_3 = &obj_2(D)->D.1762;
1097 The base of the MEM_REF must be a default definition SSA NAME of a
1098 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1099 whole MEM_REF expression is returned and the offset calculated from any
1100 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1101 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1104 get_ancestor_addr_info (gimple assign
, tree
*obj_p
, HOST_WIDE_INT
*offset
)
1106 HOST_WIDE_INT size
, max_size
;
1107 tree expr
, parm
, obj
;
1109 if (!gimple_assign_single_p (assign
))
1111 expr
= gimple_assign_rhs1 (assign
);
1113 if (TREE_CODE (expr
) != ADDR_EXPR
)
1115 expr
= TREE_OPERAND (expr
, 0);
1117 expr
= get_ref_base_and_extent (expr
, offset
, &size
, &max_size
);
1119 if (TREE_CODE (expr
) != MEM_REF
1120 /* If this is a varying address, punt. */
1125 parm
= TREE_OPERAND (expr
, 0);
1126 if (TREE_CODE (parm
) != SSA_NAME
1127 || !SSA_NAME_IS_DEFAULT_DEF (parm
)
1128 || TREE_CODE (SSA_NAME_VAR (parm
)) != PARM_DECL
)
1131 *offset
+= mem_ref_offset (expr
).low
* BITS_PER_UNIT
;
1137 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1138 statement PHI, try to find out whether NAME is in fact a
1139 multiple-inheritance typecast from a descendant into an ancestor of a formal
1140 parameter and thus can be described by an ancestor jump function and if so,
1141 write the appropriate function into JFUNC.
1143 Essentially we want to match the following pattern:
1151 iftmp.1_3 = &obj_2(D)->D.1762;
1154 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1155 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1159 compute_complex_ancestor_jump_func (struct ipa_node_params
*info
,
1160 struct param_analysis_info
*parms_ainfo
,
1161 struct ipa_jump_func
*jfunc
,
1162 gimple call
, gimple phi
, tree param_type
)
1164 HOST_WIDE_INT offset
;
1165 gimple assign
, cond
;
1166 basic_block phi_bb
, assign_bb
, cond_bb
;
1167 tree tmp
, parm
, expr
, obj
;
1170 if (gimple_phi_num_args (phi
) != 2)
1173 if (integer_zerop (PHI_ARG_DEF (phi
, 1)))
1174 tmp
= PHI_ARG_DEF (phi
, 0);
1175 else if (integer_zerop (PHI_ARG_DEF (phi
, 0)))
1176 tmp
= PHI_ARG_DEF (phi
, 1);
1179 if (TREE_CODE (tmp
) != SSA_NAME
1180 || SSA_NAME_IS_DEFAULT_DEF (tmp
)
1181 || !POINTER_TYPE_P (TREE_TYPE (tmp
))
1182 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp
))) != RECORD_TYPE
)
1185 assign
= SSA_NAME_DEF_STMT (tmp
);
1186 assign_bb
= gimple_bb (assign
);
1187 if (!single_pred_p (assign_bb
))
1189 expr
= get_ancestor_addr_info (assign
, &obj
, &offset
);
1192 parm
= TREE_OPERAND (expr
, 0);
1193 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (parm
));
1194 gcc_assert (index
>= 0);
1196 cond_bb
= single_pred (assign_bb
);
1197 cond
= last_stmt (cond_bb
);
1199 || gimple_code (cond
) != GIMPLE_COND
1200 || gimple_cond_code (cond
) != NE_EXPR
1201 || gimple_cond_lhs (cond
) != parm
1202 || !integer_zerop (gimple_cond_rhs (cond
)))
1205 phi_bb
= gimple_bb (phi
);
1206 for (i
= 0; i
< 2; i
++)
1208 basic_block pred
= EDGE_PRED (phi_bb
, i
)->src
;
1209 if (pred
!= assign_bb
&& pred
!= cond_bb
)
1213 bool type_p
= false;
1214 if (param_type
&& POINTER_TYPE_P (param_type
))
1215 type_p
= !detect_type_change (obj
, expr
, TREE_TYPE (param_type
),
1216 call
, jfunc
, offset
);
1217 if (type_p
|| jfunc
->type
== IPA_JF_UNKNOWN
)
1218 ipa_set_ancestor_jf (jfunc
, offset
, TREE_TYPE (obj
), index
,
1219 parm_ref_data_pass_through_p (&parms_ainfo
[index
],
1220 call
, parm
), type_p
);
1223 /* Given OP which is passed as an actual argument to a called function,
1224 determine if it is possible to construct a KNOWN_TYPE jump function for it
1225 and if so, create one and store it to JFUNC.
1226 EXPECTED_TYPE represents a type the argument should be in */
1229 compute_known_type_jump_func (tree op
, struct ipa_jump_func
*jfunc
,
1230 gimple call
, tree expected_type
)
1232 HOST_WIDE_INT offset
, size
, max_size
;
1235 if (!flag_devirtualize
1236 || TREE_CODE (op
) != ADDR_EXPR
1237 || TREE_CODE (TREE_TYPE (TREE_TYPE (op
))) != RECORD_TYPE
1238 /* Be sure expected_type is polymorphic. */
1240 || TREE_CODE (expected_type
) != RECORD_TYPE
1241 || !TYPE_BINFO (expected_type
)
1242 || !BINFO_VTABLE (TYPE_BINFO (expected_type
)))
1245 op
= TREE_OPERAND (op
, 0);
1246 base
= get_ref_base_and_extent (op
, &offset
, &size
, &max_size
);
1250 || TREE_CODE (TREE_TYPE (base
)) != RECORD_TYPE
1251 || is_global_var (base
))
1254 if (detect_type_change (op
, base
, expected_type
, call
, jfunc
, offset
))
1257 ipa_set_jf_known_type (jfunc
, offset
, TREE_TYPE (base
),
1261 /* Inspect the given TYPE and return true iff it has the same structure (the
1262 same number of fields of the same types) as a C++ member pointer. If
1263 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1264 corresponding fields there. */
1267 type_like_member_ptr_p (tree type
, tree
*method_ptr
, tree
*delta
)
1271 if (TREE_CODE (type
) != RECORD_TYPE
)
1274 fld
= TYPE_FIELDS (type
);
1275 if (!fld
|| !POINTER_TYPE_P (TREE_TYPE (fld
))
1276 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld
))) != METHOD_TYPE
1277 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1283 fld
= DECL_CHAIN (fld
);
1284 if (!fld
|| INTEGRAL_TYPE_P (fld
)
1285 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1290 if (DECL_CHAIN (fld
))
1296 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1297 return the rhs of its defining statement. Otherwise return RHS as it
1301 get_ssa_def_if_simple_copy (tree rhs
)
1303 while (TREE_CODE (rhs
) == SSA_NAME
&& !SSA_NAME_IS_DEFAULT_DEF (rhs
))
1305 gimple def_stmt
= SSA_NAME_DEF_STMT (rhs
);
1307 if (gimple_assign_single_p (def_stmt
))
1308 rhs
= gimple_assign_rhs1 (def_stmt
);
1315 /* Simple linked list, describing known contents of an aggregate beforere
1318 struct ipa_known_agg_contents_list
1320 /* Offset and size of the described part of the aggregate. */
1321 HOST_WIDE_INT offset
, size
;
1322 /* Known constant value or NULL if the contents is known to be unknown. */
1324 /* Pointer to the next structure in the list. */
1325 struct ipa_known_agg_contents_list
*next
;
1328 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1329 in ARG is filled in with constant values. ARG can either be an aggregate
1330 expression or a pointer to an aggregate. JFUNC is the jump function into
1331 which the constants are subsequently stored. */
1334 determine_known_aggregate_parts (gimple call
, tree arg
,
1335 struct ipa_jump_func
*jfunc
)
1337 struct ipa_known_agg_contents_list
*list
= NULL
;
1338 int item_count
= 0, const_count
= 0;
1339 HOST_WIDE_INT arg_offset
, arg_size
;
1340 gimple_stmt_iterator gsi
;
1342 bool check_ref
, by_ref
;
1345 /* The function operates in three stages. First, we prepare check_ref, r,
1346 arg_base and arg_offset based on what is actually passed as an actual
1349 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
1352 if (TREE_CODE (arg
) == SSA_NAME
)
1355 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg
)))))
1360 type_size
= TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg
)));
1361 arg_size
= tree_to_uhwi (type_size
);
1362 ao_ref_init_from_ptr_and_size (&r
, arg_base
, NULL_TREE
);
1364 else if (TREE_CODE (arg
) == ADDR_EXPR
)
1366 HOST_WIDE_INT arg_max_size
;
1368 arg
= TREE_OPERAND (arg
, 0);
1369 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1371 if (arg_max_size
== -1
1372 || arg_max_size
!= arg_size
1375 if (DECL_P (arg_base
))
1379 size
= build_int_cst (integer_type_node
, arg_size
);
1380 ao_ref_init_from_ptr_and_size (&r
, arg_base
, size
);
1390 HOST_WIDE_INT arg_max_size
;
1392 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg
)));
1396 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1398 if (arg_max_size
== -1
1399 || arg_max_size
!= arg_size
1403 ao_ref_init (&r
, arg
);
1406 /* Second stage walks back the BB, looks at individual statements and as long
1407 as it is confident of how the statements affect contents of the
1408 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1410 gsi
= gsi_for_stmt (call
);
1412 for (; !gsi_end_p (gsi
); gsi_prev (&gsi
))
1414 struct ipa_known_agg_contents_list
*n
, **p
;
1415 gimple stmt
= gsi_stmt (gsi
);
1416 HOST_WIDE_INT lhs_offset
, lhs_size
, lhs_max_size
;
1417 tree lhs
, rhs
, lhs_base
;
1418 bool partial_overlap
;
1420 if (!stmt_may_clobber_ref_p_1 (stmt
, &r
))
1422 if (!gimple_assign_single_p (stmt
))
1425 lhs
= gimple_assign_lhs (stmt
);
1426 rhs
= gimple_assign_rhs1 (stmt
);
1427 if (!is_gimple_reg_type (TREE_TYPE (rhs
))
1428 || TREE_CODE (lhs
) == BIT_FIELD_REF
1429 || contains_bitfld_component_ref_p (lhs
))
1432 lhs_base
= get_ref_base_and_extent (lhs
, &lhs_offset
, &lhs_size
,
1434 if (lhs_max_size
== -1
1435 || lhs_max_size
!= lhs_size
1436 || (lhs_offset
< arg_offset
1437 && lhs_offset
+ lhs_size
> arg_offset
)
1438 || (lhs_offset
< arg_offset
+ arg_size
1439 && lhs_offset
+ lhs_size
> arg_offset
+ arg_size
))
1444 if (TREE_CODE (lhs_base
) != MEM_REF
1445 || TREE_OPERAND (lhs_base
, 0) != arg_base
1446 || !integer_zerop (TREE_OPERAND (lhs_base
, 1)))
1449 else if (lhs_base
!= arg_base
)
1451 if (DECL_P (lhs_base
))
1457 if (lhs_offset
+ lhs_size
< arg_offset
1458 || lhs_offset
>= (arg_offset
+ arg_size
))
1461 partial_overlap
= false;
1463 while (*p
&& (*p
)->offset
< lhs_offset
)
1465 if ((*p
)->offset
+ (*p
)->size
> lhs_offset
)
1467 partial_overlap
= true;
1472 if (partial_overlap
)
1474 if (*p
&& (*p
)->offset
< lhs_offset
+ lhs_size
)
1476 if ((*p
)->offset
== lhs_offset
&& (*p
)->size
== lhs_size
)
1477 /* We already know this value is subsequently overwritten with
1481 /* Otherwise this is a partial overlap which we cannot
1486 rhs
= get_ssa_def_if_simple_copy (rhs
);
1487 n
= XALLOCA (struct ipa_known_agg_contents_list
);
1489 n
->offset
= lhs_offset
;
1490 if (is_gimple_ip_invariant (rhs
))
1496 n
->constant
= NULL_TREE
;
1501 if (const_count
== PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
)
1502 || item_count
== 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
))
1506 /* Third stage just goes over the list and creates an appropriate vector of
1507 ipa_agg_jf_item structures out of it, of sourse only if there are
1508 any known constants to begin with. */
1512 jfunc
->agg
.by_ref
= by_ref
;
1513 vec_alloc (jfunc
->agg
.items
, const_count
);
1518 struct ipa_agg_jf_item item
;
1519 item
.offset
= list
->offset
- arg_offset
;
1520 gcc_assert ((item
.offset
% BITS_PER_UNIT
) == 0);
1521 item
.value
= unshare_expr_without_location (list
->constant
);
1522 jfunc
->agg
.items
->quick_push (item
);
1530 ipa_get_callee_param_type (struct cgraph_edge
*e
, int i
)
1533 tree type
= (e
->callee
1534 ? TREE_TYPE (e
->callee
->decl
)
1535 : gimple_call_fntype (e
->call_stmt
));
1536 tree t
= TYPE_ARG_TYPES (type
);
1538 for (n
= 0; n
< i
; n
++)
1545 return TREE_VALUE (t
);
1548 t
= DECL_ARGUMENTS (e
->callee
->decl
);
1549 for (n
= 0; n
< i
; n
++)
1556 return TREE_TYPE (t
);
1560 /* Compute jump function for all arguments of callsite CS and insert the
1561 information in the jump_functions array in the ipa_edge_args corresponding
1562 to this callsite. */
1565 ipa_compute_jump_functions_for_edge (struct param_analysis_info
*parms_ainfo
,
1566 struct cgraph_edge
*cs
)
1568 struct ipa_node_params
*info
= IPA_NODE_REF (cs
->caller
);
1569 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
1570 gimple call
= cs
->call_stmt
;
1571 int n
, arg_num
= gimple_call_num_args (call
);
1573 if (arg_num
== 0 || args
->jump_functions
)
1575 vec_safe_grow_cleared (args
->jump_functions
, arg_num
);
1577 if (gimple_call_internal_p (call
))
1579 if (ipa_func_spec_opts_forbid_analysis_p (cs
->caller
))
1582 for (n
= 0; n
< arg_num
; n
++)
1584 struct ipa_jump_func
*jfunc
= ipa_get_ith_jump_func (args
, n
);
1585 tree arg
= gimple_call_arg (call
, n
);
1586 tree param_type
= ipa_get_callee_param_type (cs
, n
);
1588 if (is_gimple_ip_invariant (arg
))
1589 ipa_set_jf_constant (jfunc
, arg
, cs
);
1590 else if (!is_gimple_reg_type (TREE_TYPE (arg
))
1591 && TREE_CODE (arg
) == PARM_DECL
)
1593 int index
= ipa_get_param_decl_index (info
, arg
);
1595 gcc_assert (index
>=0);
1596 /* Aggregate passed by value, check for pass-through, otherwise we
1597 will attempt to fill in aggregate contents later in this
1599 if (parm_preserved_before_stmt_p (&parms_ainfo
[index
], call
, arg
))
1601 ipa_set_jf_simple_pass_through (jfunc
, index
, false, false);
1605 else if (TREE_CODE (arg
) == SSA_NAME
)
1607 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
1609 int index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (arg
));
1613 agg_p
= parm_ref_data_pass_through_p (&parms_ainfo
[index
],
1615 if (param_type
&& POINTER_TYPE_P (param_type
))
1616 type_p
= !detect_type_change_ssa (arg
, TREE_TYPE (param_type
),
1620 if (type_p
|| jfunc
->type
== IPA_JF_UNKNOWN
)
1621 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
,
1627 gimple stmt
= SSA_NAME_DEF_STMT (arg
);
1628 if (is_gimple_assign (stmt
))
1629 compute_complex_assign_jump_func (info
, parms_ainfo
, jfunc
,
1630 call
, stmt
, arg
, param_type
);
1631 else if (gimple_code (stmt
) == GIMPLE_PHI
)
1632 compute_complex_ancestor_jump_func (info
, parms_ainfo
, jfunc
,
1633 call
, stmt
, param_type
);
1637 compute_known_type_jump_func (arg
, jfunc
, call
,
1639 && POINTER_TYPE_P (param_type
)
1640 ? TREE_TYPE (param_type
)
1643 if ((jfunc
->type
!= IPA_JF_PASS_THROUGH
1644 || !ipa_get_jf_pass_through_agg_preserved (jfunc
))
1645 && (jfunc
->type
!= IPA_JF_ANCESTOR
1646 || !ipa_get_jf_ancestor_agg_preserved (jfunc
))
1647 && (AGGREGATE_TYPE_P (TREE_TYPE (arg
))
1648 || (POINTER_TYPE_P (TREE_TYPE (arg
)))))
1649 determine_known_aggregate_parts (call
, arg
, jfunc
);
1653 /* Compute jump functions for all edges - both direct and indirect - outgoing
1654 from NODE. Also count the actual arguments in the process. */
1657 ipa_compute_jump_functions (struct cgraph_node
*node
,
1658 struct param_analysis_info
*parms_ainfo
)
1660 struct cgraph_edge
*cs
;
1662 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
1664 struct cgraph_node
*callee
= cgraph_function_or_thunk_node (cs
->callee
,
1666 /* We do not need to bother analyzing calls to unknown
1667 functions unless they may become known during lto/whopr. */
1668 if (!callee
->definition
&& !flag_lto
)
1670 ipa_compute_jump_functions_for_edge (parms_ainfo
, cs
);
1673 for (cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
1674 ipa_compute_jump_functions_for_edge (parms_ainfo
, cs
);
1677 /* If STMT looks like a statement loading a value from a member pointer formal
1678 parameter, return that parameter and store the offset of the field to
1679 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1680 might be clobbered). If USE_DELTA, then we look for a use of the delta
1681 field rather than the pfn. */
1684 ipa_get_stmt_member_ptr_load_param (gimple stmt
, bool use_delta
,
1685 HOST_WIDE_INT
*offset_p
)
1687 tree rhs
, rec
, ref_field
, ref_offset
, fld
, ptr_field
, delta_field
;
1689 if (!gimple_assign_single_p (stmt
))
1692 rhs
= gimple_assign_rhs1 (stmt
);
1693 if (TREE_CODE (rhs
) == COMPONENT_REF
)
1695 ref_field
= TREE_OPERAND (rhs
, 1);
1696 rhs
= TREE_OPERAND (rhs
, 0);
1699 ref_field
= NULL_TREE
;
1700 if (TREE_CODE (rhs
) != MEM_REF
)
1702 rec
= TREE_OPERAND (rhs
, 0);
1703 if (TREE_CODE (rec
) != ADDR_EXPR
)
1705 rec
= TREE_OPERAND (rec
, 0);
1706 if (TREE_CODE (rec
) != PARM_DECL
1707 || !type_like_member_ptr_p (TREE_TYPE (rec
), &ptr_field
, &delta_field
))
1709 ref_offset
= TREE_OPERAND (rhs
, 1);
1716 *offset_p
= int_bit_position (fld
);
1720 if (integer_nonzerop (ref_offset
))
1722 return ref_field
== fld
? rec
: NULL_TREE
;
1725 return tree_int_cst_equal (byte_position (fld
), ref_offset
) ? rec
1729 /* Returns true iff T is an SSA_NAME defined by a statement. */
1732 ipa_is_ssa_with_stmt_def (tree t
)
1734 if (TREE_CODE (t
) == SSA_NAME
1735 && !SSA_NAME_IS_DEFAULT_DEF (t
))
1741 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1742 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1743 indirect call graph edge. */
1745 static struct cgraph_edge
*
1746 ipa_note_param_call (struct cgraph_node
*node
, int param_index
, gimple stmt
)
1748 struct cgraph_edge
*cs
;
1750 cs
= cgraph_edge (node
, stmt
);
1751 cs
->indirect_info
->param_index
= param_index
;
1752 cs
->indirect_info
->agg_contents
= 0;
1753 cs
->indirect_info
->member_ptr
= 0;
1757 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1758 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1759 intermediate information about each formal parameter. Currently it checks
1760 whether the call calls a pointer that is a formal parameter and if so, the
1761 parameter is marked with the called flag and an indirect call graph edge
1762 describing the call is created. This is very simple for ordinary pointers
1763 represented in SSA but not-so-nice when it comes to member pointers. The
1764 ugly part of this function does nothing more than trying to match the
1765 pattern of such a call. An example of such a pattern is the gimple dump
1766 below, the call is on the last line:
1769 f$__delta_5 = f.__delta;
1770 f$__pfn_24 = f.__pfn;
1774 f$__delta_5 = MEM[(struct *)&f];
1775 f$__pfn_24 = MEM[(struct *)&f + 4B];
1777 and a few lines below:
1780 D.2496_3 = (int) f$__pfn_24;
1781 D.2497_4 = D.2496_3 & 1;
1788 D.2500_7 = (unsigned int) f$__delta_5;
1789 D.2501_8 = &S + D.2500_7;
1790 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1791 D.2503_10 = *D.2502_9;
1792 D.2504_12 = f$__pfn_24 + -1;
1793 D.2505_13 = (unsigned int) D.2504_12;
1794 D.2506_14 = D.2503_10 + D.2505_13;
1795 D.2507_15 = *D.2506_14;
1796 iftmp.11_16 = (String:: *) D.2507_15;
1799 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1800 D.2500_19 = (unsigned int) f$__delta_5;
1801 D.2508_20 = &S + D.2500_19;
1802 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1804 Such patterns are results of simple calls to a member pointer:
1806 int doprinting (int (MyString::* f)(int) const)
1808 MyString S ("somestring");
1813 Moreover, the function also looks for called pointers loaded from aggregates
1814 passed by value or reference. */
1817 ipa_analyze_indirect_call_uses (struct cgraph_node
*node
,
1818 struct ipa_node_params
*info
,
1819 struct param_analysis_info
*parms_ainfo
,
1820 gimple call
, tree target
)
1825 tree rec
, rec2
, cond
;
1828 basic_block bb
, virt_bb
, join
;
1829 HOST_WIDE_INT offset
;
1832 if (SSA_NAME_IS_DEFAULT_DEF (target
))
1834 tree var
= SSA_NAME_VAR (target
);
1835 index
= ipa_get_param_decl_index (info
, var
);
1837 ipa_note_param_call (node
, index
, call
);
1841 def
= SSA_NAME_DEF_STMT (target
);
1842 if (gimple_assign_single_p (def
)
1843 && ipa_load_from_parm_agg_1 (info
->descriptors
, parms_ainfo
, def
,
1844 gimple_assign_rhs1 (def
), &index
, &offset
,
1847 struct cgraph_edge
*cs
= ipa_note_param_call (node
, index
, call
);
1848 if (cs
->indirect_info
->offset
!= offset
)
1849 cs
->indirect_info
->outer_type
= NULL
;
1850 cs
->indirect_info
->offset
= offset
;
1851 cs
->indirect_info
->agg_contents
= 1;
1852 cs
->indirect_info
->by_ref
= by_ref
;
1856 /* Now we need to try to match the complex pattern of calling a member
1858 if (gimple_code (def
) != GIMPLE_PHI
1859 || gimple_phi_num_args (def
) != 2
1860 || !POINTER_TYPE_P (TREE_TYPE (target
))
1861 || TREE_CODE (TREE_TYPE (TREE_TYPE (target
))) != METHOD_TYPE
)
1864 /* First, we need to check whether one of these is a load from a member
1865 pointer that is a parameter to this function. */
1866 n1
= PHI_ARG_DEF (def
, 0);
1867 n2
= PHI_ARG_DEF (def
, 1);
1868 if (!ipa_is_ssa_with_stmt_def (n1
) || !ipa_is_ssa_with_stmt_def (n2
))
1870 d1
= SSA_NAME_DEF_STMT (n1
);
1871 d2
= SSA_NAME_DEF_STMT (n2
);
1873 join
= gimple_bb (def
);
1874 if ((rec
= ipa_get_stmt_member_ptr_load_param (d1
, false, &offset
)))
1876 if (ipa_get_stmt_member_ptr_load_param (d2
, false, NULL
))
1879 bb
= EDGE_PRED (join
, 0)->src
;
1880 virt_bb
= gimple_bb (d2
);
1882 else if ((rec
= ipa_get_stmt_member_ptr_load_param (d2
, false, &offset
)))
1884 bb
= EDGE_PRED (join
, 1)->src
;
1885 virt_bb
= gimple_bb (d1
);
1890 /* Second, we need to check that the basic blocks are laid out in the way
1891 corresponding to the pattern. */
1893 if (!single_pred_p (virt_bb
) || !single_succ_p (virt_bb
)
1894 || single_pred (virt_bb
) != bb
1895 || single_succ (virt_bb
) != join
)
1898 /* Third, let's see that the branching is done depending on the least
1899 significant bit of the pfn. */
1901 branch
= last_stmt (bb
);
1902 if (!branch
|| gimple_code (branch
) != GIMPLE_COND
)
1905 if ((gimple_cond_code (branch
) != NE_EXPR
1906 && gimple_cond_code (branch
) != EQ_EXPR
)
1907 || !integer_zerop (gimple_cond_rhs (branch
)))
1910 cond
= gimple_cond_lhs (branch
);
1911 if (!ipa_is_ssa_with_stmt_def (cond
))
1914 def
= SSA_NAME_DEF_STMT (cond
);
1915 if (!is_gimple_assign (def
)
1916 || gimple_assign_rhs_code (def
) != BIT_AND_EXPR
1917 || !integer_onep (gimple_assign_rhs2 (def
)))
1920 cond
= gimple_assign_rhs1 (def
);
1921 if (!ipa_is_ssa_with_stmt_def (cond
))
1924 def
= SSA_NAME_DEF_STMT (cond
);
1926 if (is_gimple_assign (def
)
1927 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
1929 cond
= gimple_assign_rhs1 (def
);
1930 if (!ipa_is_ssa_with_stmt_def (cond
))
1932 def
= SSA_NAME_DEF_STMT (cond
);
1935 rec2
= ipa_get_stmt_member_ptr_load_param (def
,
1936 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1937 == ptrmemfunc_vbit_in_delta
),
1942 index
= ipa_get_param_decl_index (info
, rec
);
1944 && parm_preserved_before_stmt_p (&parms_ainfo
[index
], call
, rec
))
1946 struct cgraph_edge
*cs
= ipa_note_param_call (node
, index
, call
);
1947 if (cs
->indirect_info
->offset
!= offset
)
1948 cs
->indirect_info
->outer_type
= NULL
;
1949 cs
->indirect_info
->offset
= offset
;
1950 cs
->indirect_info
->agg_contents
= 1;
1951 cs
->indirect_info
->member_ptr
= 1;
1957 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1958 object referenced in the expression is a formal parameter of the caller
1959 (described by INFO), create a call note for the statement. */
1962 ipa_analyze_virtual_call_uses (struct cgraph_node
*node
,
1963 struct ipa_node_params
*info
, gimple call
,
1966 struct cgraph_edge
*cs
;
1967 struct cgraph_indirect_call_info
*ii
;
1968 struct ipa_jump_func jfunc
;
1969 tree obj
= OBJ_TYPE_REF_OBJECT (target
);
1971 HOST_WIDE_INT anc_offset
;
1973 if (!flag_devirtualize
)
1976 if (TREE_CODE (obj
) != SSA_NAME
)
1979 if (SSA_NAME_IS_DEFAULT_DEF (obj
))
1981 if (TREE_CODE (SSA_NAME_VAR (obj
)) != PARM_DECL
)
1985 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (obj
));
1986 gcc_assert (index
>= 0);
1987 if (detect_type_change_ssa (obj
, obj_type_ref_class (target
),
1993 gimple stmt
= SSA_NAME_DEF_STMT (obj
);
1996 expr
= get_ancestor_addr_info (stmt
, &obj
, &anc_offset
);
1999 index
= ipa_get_param_decl_index (info
,
2000 SSA_NAME_VAR (TREE_OPERAND (expr
, 0)));
2001 gcc_assert (index
>= 0);
2002 if (detect_type_change (obj
, expr
, obj_type_ref_class (target
),
2003 call
, &jfunc
, anc_offset
))
2007 cs
= ipa_note_param_call (node
, index
, call
);
2008 ii
= cs
->indirect_info
;
2009 ii
->offset
= anc_offset
;
2010 ii
->otr_token
= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target
));
2011 ii
->otr_type
= obj_type_ref_class (target
);
2012 ii
->polymorphic
= 1;
2015 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2016 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2017 containing intermediate information about each formal parameter. */
2020 ipa_analyze_call_uses (struct cgraph_node
*node
,
2021 struct ipa_node_params
*info
,
2022 struct param_analysis_info
*parms_ainfo
, gimple call
)
2024 tree target
= gimple_call_fn (call
);
2028 if (TREE_CODE (target
) == SSA_NAME
)
2029 ipa_analyze_indirect_call_uses (node
, info
, parms_ainfo
, call
, target
);
2030 else if (virtual_method_call_p (target
))
2031 ipa_analyze_virtual_call_uses (node
, info
, call
, target
);
2035 /* Analyze the call statement STMT with respect to formal parameters (described
2036 in INFO) of caller given by NODE. Currently it only checks whether formal
2037 parameters are called. PARMS_AINFO is a pointer to a vector containing
2038 intermediate information about each formal parameter. */
2041 ipa_analyze_stmt_uses (struct cgraph_node
*node
, struct ipa_node_params
*info
,
2042 struct param_analysis_info
*parms_ainfo
, gimple stmt
)
2044 if (is_gimple_call (stmt
))
2045 ipa_analyze_call_uses (node
, info
, parms_ainfo
, stmt
);
2048 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2049 If OP is a parameter declaration, mark it as used in the info structure
2053 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED
,
2054 tree op
, void *data
)
2056 struct ipa_node_params
*info
= (struct ipa_node_params
*) data
;
2058 op
= get_base_address (op
);
2060 && TREE_CODE (op
) == PARM_DECL
)
2062 int index
= ipa_get_param_decl_index (info
, op
);
2063 gcc_assert (index
>= 0);
2064 ipa_set_param_used (info
, index
, true);
2070 /* Scan the function body of NODE and inspect the uses of formal parameters.
2071 Store the findings in various structures of the associated ipa_node_params
2072 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
2073 vector containing intermediate information about each formal parameter. */
2076 ipa_analyze_params_uses (struct cgraph_node
*node
,
2077 struct param_analysis_info
*parms_ainfo
)
2079 tree decl
= node
->decl
;
2081 struct function
*func
;
2082 gimple_stmt_iterator gsi
;
2083 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
2086 if (ipa_get_param_count (info
) == 0 || info
->uses_analysis_done
)
2089 info
->uses_analysis_done
= 1;
2090 if (ipa_func_spec_opts_forbid_analysis_p (node
))
2092 for (i
= 0; i
< ipa_get_param_count (info
); i
++)
2094 ipa_set_param_used (info
, i
, true);
2095 ipa_set_controlled_uses (info
, i
, IPA_UNDESCRIBED_USE
);
2100 for (i
= 0; i
< ipa_get_param_count (info
); i
++)
2102 tree parm
= ipa_get_param (info
, i
);
2103 int controlled_uses
= 0;
2105 /* For SSA regs see if parameter is used. For non-SSA we compute
2106 the flag during modification analysis. */
2107 if (is_gimple_reg (parm
))
2109 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
),
2111 if (ddef
&& !has_zero_uses (ddef
))
2113 imm_use_iterator imm_iter
;
2114 use_operand_p use_p
;
2116 ipa_set_param_used (info
, i
, true);
2117 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, ddef
)
2118 if (!is_gimple_call (USE_STMT (use_p
)))
2120 controlled_uses
= IPA_UNDESCRIBED_USE
;
2127 controlled_uses
= 0;
2130 controlled_uses
= IPA_UNDESCRIBED_USE
;
2131 ipa_set_controlled_uses (info
, i
, controlled_uses
);
2134 func
= DECL_STRUCT_FUNCTION (decl
);
2135 FOR_EACH_BB_FN (bb
, func
)
2137 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2139 gimple stmt
= gsi_stmt (gsi
);
2141 if (is_gimple_debug (stmt
))
2144 ipa_analyze_stmt_uses (node
, info
, parms_ainfo
, stmt
);
2145 walk_stmt_load_store_addr_ops (stmt
, info
,
2146 visit_ref_for_mod_analysis
,
2147 visit_ref_for_mod_analysis
,
2148 visit_ref_for_mod_analysis
);
2150 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2151 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), info
,
2152 visit_ref_for_mod_analysis
,
2153 visit_ref_for_mod_analysis
,
2154 visit_ref_for_mod_analysis
);
2158 /* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
2161 free_parms_ainfo (struct param_analysis_info
*parms_ainfo
, int param_count
)
2165 for (i
= 0; i
< param_count
; i
++)
2167 if (parms_ainfo
[i
].parm_visited_statements
)
2168 BITMAP_FREE (parms_ainfo
[i
].parm_visited_statements
);
2169 if (parms_ainfo
[i
].pt_visited_statements
)
2170 BITMAP_FREE (parms_ainfo
[i
].pt_visited_statements
);
2174 /* Initialize the array describing properties of of formal parameters
2175 of NODE, analyze their uses and compute jump functions associated
2176 with actual arguments of calls from within NODE. */
2179 ipa_analyze_node (struct cgraph_node
*node
)
2181 struct ipa_node_params
*info
;
2182 struct param_analysis_info
*parms_ainfo
;
2185 ipa_check_create_node_params ();
2186 ipa_check_create_edge_args ();
2187 info
= IPA_NODE_REF (node
);
2188 push_cfun (DECL_STRUCT_FUNCTION (node
->decl
));
2189 ipa_initialize_node_params (node
);
2191 param_count
= ipa_get_param_count (info
);
2192 parms_ainfo
= XALLOCAVEC (struct param_analysis_info
, param_count
);
2193 memset (parms_ainfo
, 0, sizeof (struct param_analysis_info
) * param_count
);
2195 ipa_analyze_params_uses (node
, parms_ainfo
);
2196 ipa_compute_jump_functions (node
, parms_ainfo
);
2198 free_parms_ainfo (parms_ainfo
, param_count
);
2202 /* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2203 attempt a type-based devirtualization. If successful, return the
2204 target function declaration, otherwise return NULL. */
2207 ipa_intraprocedural_devirtualization (gimple call
)
2209 tree binfo
, token
, fndecl
;
2210 struct ipa_jump_func jfunc
;
2211 tree otr
= gimple_call_fn (call
);
2213 jfunc
.type
= IPA_JF_UNKNOWN
;
2214 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr
), &jfunc
,
2215 call
, obj_type_ref_class (otr
));
2216 if (jfunc
.type
!= IPA_JF_KNOWN_TYPE
)
2218 binfo
= ipa_binfo_from_known_type_jfunc (&jfunc
);
2221 token
= OBJ_TYPE_REF_TOKEN (otr
);
2222 fndecl
= gimple_get_virt_method_for_binfo (tree_to_uhwi (token
),
2224 #ifdef ENABLE_CHECKING
2226 gcc_assert (possible_polymorphic_call_target_p
2227 (otr
, cgraph_get_node (fndecl
)));
2232 /* Update the jump function DST when the call graph edge corresponding to SRC is
2233 is being inlined, knowing that DST is of type ancestor and src of known
2237 combine_known_type_and_ancestor_jfs (struct ipa_jump_func
*src
,
2238 struct ipa_jump_func
*dst
)
2240 HOST_WIDE_INT combined_offset
;
2243 if (!ipa_get_jf_ancestor_type_preserved (dst
))
2245 dst
->type
= IPA_JF_UNKNOWN
;
2249 combined_offset
= ipa_get_jf_known_type_offset (src
)
2250 + ipa_get_jf_ancestor_offset (dst
);
2251 combined_type
= ipa_get_jf_ancestor_type (dst
);
2253 ipa_set_jf_known_type (dst
, combined_offset
,
2254 ipa_get_jf_known_type_base_type (src
),
2258 /* Update the jump functions associated with call graph edge E when the call
2259 graph edge CS is being inlined, assuming that E->caller is already (possibly
2260 indirectly) inlined into CS->callee and that E has not been inlined. */
2263 update_jump_functions_after_inlining (struct cgraph_edge
*cs
,
2264 struct cgraph_edge
*e
)
2266 struct ipa_edge_args
*top
= IPA_EDGE_REF (cs
);
2267 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
2268 int count
= ipa_get_cs_argument_count (args
);
2271 for (i
= 0; i
< count
; i
++)
2273 struct ipa_jump_func
*dst
= ipa_get_ith_jump_func (args
, i
);
2275 if (dst
->type
== IPA_JF_ANCESTOR
)
2277 struct ipa_jump_func
*src
;
2278 int dst_fid
= dst
->value
.ancestor
.formal_id
;
2280 /* Variable number of arguments can cause havoc if we try to access
2281 one that does not exist in the inlined edge. So make sure we
2283 if (dst_fid
>= ipa_get_cs_argument_count (top
))
2285 dst
->type
= IPA_JF_UNKNOWN
;
2289 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2292 && (dst
->value
.ancestor
.agg_preserved
|| !src
->agg
.by_ref
))
2294 struct ipa_agg_jf_item
*item
;
2297 /* Currently we do not produce clobber aggregate jump functions,
2298 replace with merging when we do. */
2299 gcc_assert (!dst
->agg
.items
);
2301 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2302 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2303 FOR_EACH_VEC_SAFE_ELT (dst
->agg
.items
, j
, item
)
2304 item
->offset
-= dst
->value
.ancestor
.offset
;
2307 if (src
->type
== IPA_JF_KNOWN_TYPE
)
2308 combine_known_type_and_ancestor_jfs (src
, dst
);
2309 else if (src
->type
== IPA_JF_PASS_THROUGH
2310 && src
->value
.pass_through
.operation
== NOP_EXPR
)
2312 dst
->value
.ancestor
.formal_id
= src
->value
.pass_through
.formal_id
;
2313 dst
->value
.ancestor
.agg_preserved
&=
2314 src
->value
.pass_through
.agg_preserved
;
2315 dst
->value
.ancestor
.type_preserved
&=
2316 src
->value
.pass_through
.type_preserved
;
2318 else if (src
->type
== IPA_JF_ANCESTOR
)
2320 dst
->value
.ancestor
.formal_id
= src
->value
.ancestor
.formal_id
;
2321 dst
->value
.ancestor
.offset
+= src
->value
.ancestor
.offset
;
2322 dst
->value
.ancestor
.agg_preserved
&=
2323 src
->value
.ancestor
.agg_preserved
;
2324 dst
->value
.ancestor
.type_preserved
&=
2325 src
->value
.ancestor
.type_preserved
;
2328 dst
->type
= IPA_JF_UNKNOWN
;
2330 else if (dst
->type
== IPA_JF_PASS_THROUGH
)
2332 struct ipa_jump_func
*src
;
2333 /* We must check range due to calls with variable number of arguments
2334 and we cannot combine jump functions with operations. */
2335 if (dst
->value
.pass_through
.operation
== NOP_EXPR
2336 && (dst
->value
.pass_through
.formal_id
2337 < ipa_get_cs_argument_count (top
)))
2339 int dst_fid
= dst
->value
.pass_through
.formal_id
;
2340 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2341 bool dst_agg_p
= ipa_get_jf_pass_through_agg_preserved (dst
);
2345 case IPA_JF_UNKNOWN
:
2346 dst
->type
= IPA_JF_UNKNOWN
;
2348 case IPA_JF_KNOWN_TYPE
:
2349 ipa_set_jf_known_type (dst
,
2350 ipa_get_jf_known_type_offset (src
),
2351 ipa_get_jf_known_type_base_type (src
),
2352 ipa_get_jf_known_type_base_type (src
));
2355 ipa_set_jf_cst_copy (dst
, src
);
2358 case IPA_JF_PASS_THROUGH
:
2360 int formal_id
= ipa_get_jf_pass_through_formal_id (src
);
2361 enum tree_code operation
;
2362 operation
= ipa_get_jf_pass_through_operation (src
);
2364 if (operation
== NOP_EXPR
)
2368 && ipa_get_jf_pass_through_agg_preserved (src
);
2369 type_p
= ipa_get_jf_pass_through_type_preserved (src
)
2370 && ipa_get_jf_pass_through_type_preserved (dst
);
2371 ipa_set_jf_simple_pass_through (dst
, formal_id
,
2376 tree operand
= ipa_get_jf_pass_through_operand (src
);
2377 ipa_set_jf_arith_pass_through (dst
, formal_id
, operand
,
2382 case IPA_JF_ANCESTOR
:
2386 && ipa_get_jf_ancestor_agg_preserved (src
);
2387 type_p
= ipa_get_jf_ancestor_type_preserved (src
)
2388 && ipa_get_jf_pass_through_type_preserved (dst
);
2389 ipa_set_ancestor_jf (dst
,
2390 ipa_get_jf_ancestor_offset (src
),
2391 ipa_get_jf_ancestor_type (src
),
2392 ipa_get_jf_ancestor_formal_id (src
),
2401 && (dst_agg_p
|| !src
->agg
.by_ref
))
2403 /* Currently we do not produce clobber aggregate jump
2404 functions, replace with merging when we do. */
2405 gcc_assert (!dst
->agg
.items
);
2407 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2408 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2412 dst
->type
= IPA_JF_UNKNOWN
;
2417 /* If TARGET is an addr_expr of a function declaration, make it the destination
2418 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2420 struct cgraph_edge
*
2421 ipa_make_edge_direct_to_target (struct cgraph_edge
*ie
, tree target
)
2423 struct cgraph_node
*callee
;
2424 struct inline_edge_summary
*es
= inline_edge_summary (ie
);
2425 bool unreachable
= false;
2427 if (TREE_CODE (target
) == ADDR_EXPR
)
2428 target
= TREE_OPERAND (target
, 0);
2429 if (TREE_CODE (target
) != FUNCTION_DECL
)
2431 target
= canonicalize_constructor_val (target
, NULL
);
2432 if (!target
|| TREE_CODE (target
) != FUNCTION_DECL
)
2434 if (ie
->indirect_info
->member_ptr
)
2435 /* Member pointer call that goes through a VMT lookup. */
2439 fprintf (dump_file
, "ipa-prop: Discovered direct call to non-function"
2440 " in %s/%i, making it unreachable.\n",
2441 ie
->caller
->name (), ie
->caller
->order
);
2442 target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2443 callee
= cgraph_get_create_node (target
);
2447 callee
= cgraph_get_node (target
);
2450 callee
= cgraph_get_node (target
);
2452 /* Because may-edges are not explicitely represented and vtable may be external,
2453 we may create the first reference to the object in the unit. */
2454 if (!callee
|| callee
->global
.inlined_to
)
2457 /* We are better to ensure we can refer to it.
2458 In the case of static functions we are out of luck, since we already
2459 removed its body. In the case of public functions we may or may
2460 not introduce the reference. */
2461 if (!canonicalize_constructor_val (target
, NULL
)
2462 || !TREE_PUBLIC (target
))
2465 fprintf (dump_file
, "ipa-prop: Discovered call to a known target "
2466 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2467 xstrdup (ie
->caller
->name ()),
2469 xstrdup (ie
->callee
->name ()),
2473 callee
= cgraph_get_create_node (target
);
2475 ipa_check_create_node_params ();
2477 /* We can not make edges to inline clones. It is bug that someone removed
2478 the cgraph node too early. */
2479 gcc_assert (!callee
->global
.inlined_to
);
2481 if (dump_file
&& !unreachable
)
2483 fprintf (dump_file
, "ipa-prop: Discovered %s call to a known target "
2484 "(%s/%i -> %s/%i), for stmt ",
2485 ie
->indirect_info
->polymorphic
? "a virtual" : "an indirect",
2486 xstrdup (ie
->caller
->name ()),
2488 xstrdup (callee
->name ()),
2491 print_gimple_stmt (dump_file
, ie
->call_stmt
, 2, TDF_SLIM
);
2493 fprintf (dump_file
, "with uid %i\n", ie
->lto_stmt_uid
);
2495 ie
= cgraph_make_edge_direct (ie
, callee
);
2496 es
= inline_edge_summary (ie
);
2497 es
->call_stmt_size
-= (eni_size_weights
.indirect_call_cost
2498 - eni_size_weights
.call_cost
);
2499 es
->call_stmt_time
-= (eni_time_weights
.indirect_call_cost
2500 - eni_time_weights
.call_cost
);
2505 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2506 return NULL if there is not any. BY_REF specifies whether the value has to
2507 be passed by reference or by value. */
2510 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function
*agg
,
2511 HOST_WIDE_INT offset
, bool by_ref
)
2513 struct ipa_agg_jf_item
*item
;
2516 if (by_ref
!= agg
->by_ref
)
2519 FOR_EACH_VEC_SAFE_ELT (agg
->items
, i
, item
)
2520 if (item
->offset
== offset
)
2522 /* Currently we do not have clobber values, return NULL for them once
2524 gcc_checking_assert (is_gimple_ip_invariant (item
->value
));
2530 /* Remove a reference to SYMBOL from the list of references of a node given by
2531 reference description RDESC. Return true if the reference has been
2532 successfully found and removed. */
2535 remove_described_reference (symtab_node
*symbol
, struct ipa_cst_ref_desc
*rdesc
)
2537 struct ipa_ref
*to_del
;
2538 struct cgraph_edge
*origin
;
2543 to_del
= ipa_find_reference (origin
->caller
, symbol
,
2544 origin
->call_stmt
, origin
->lto_stmt_uid
);
2548 ipa_remove_reference (to_del
);
2550 fprintf (dump_file
, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2551 xstrdup (origin
->caller
->name ()),
2552 origin
->caller
->order
, xstrdup (symbol
->name ()));
2556 /* If JFUNC has a reference description with refcount different from
2557 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2558 NULL. JFUNC must be a constant jump function. */
2560 static struct ipa_cst_ref_desc
*
2561 jfunc_rdesc_usable (struct ipa_jump_func
*jfunc
)
2563 struct ipa_cst_ref_desc
*rdesc
= ipa_get_jf_constant_rdesc (jfunc
);
2564 if (rdesc
&& rdesc
->refcount
!= IPA_UNDESCRIBED_USE
)
2570 /* If the value of constant jump function JFUNC is an address of a function
2571 declaration, return the associated call graph node. Otherwise return
2574 static cgraph_node
*
2575 cgraph_node_for_jfunc (struct ipa_jump_func
*jfunc
)
2577 gcc_checking_assert (jfunc
->type
== IPA_JF_CONST
);
2578 tree cst
= ipa_get_jf_constant (jfunc
);
2579 if (TREE_CODE (cst
) != ADDR_EXPR
2580 || TREE_CODE (TREE_OPERAND (cst
, 0)) != FUNCTION_DECL
)
2583 return cgraph_get_node (TREE_OPERAND (cst
, 0));
2587 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2588 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2589 the edge specified in the rdesc. Return false if either the symbol or the
2590 reference could not be found, otherwise return true. */
2593 try_decrement_rdesc_refcount (struct ipa_jump_func
*jfunc
)
2595 struct ipa_cst_ref_desc
*rdesc
;
2596 if (jfunc
->type
== IPA_JF_CONST
2597 && (rdesc
= jfunc_rdesc_usable (jfunc
))
2598 && --rdesc
->refcount
== 0)
2600 symtab_node
*symbol
= cgraph_node_for_jfunc (jfunc
);
2604 return remove_described_reference (symbol
, rdesc
);
2609 /* Try to find a destination for indirect edge IE that corresponds to a simple
2610 call or a call of a member function pointer and where the destination is a
2611 pointer formal parameter described by jump function JFUNC. If it can be
2612 determined, return the newly direct edge, otherwise return NULL.
2613 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2615 static struct cgraph_edge
*
2616 try_make_edge_direct_simple_call (struct cgraph_edge
*ie
,
2617 struct ipa_jump_func
*jfunc
,
2618 struct ipa_node_params
*new_root_info
)
2620 struct cgraph_edge
*cs
;
2622 bool agg_contents
= ie
->indirect_info
->agg_contents
;
2624 if (ie
->indirect_info
->agg_contents
)
2625 target
= ipa_find_agg_cst_for_param (&jfunc
->agg
,
2626 ie
->indirect_info
->offset
,
2627 ie
->indirect_info
->by_ref
);
2629 target
= ipa_value_from_jfunc (new_root_info
, jfunc
);
2632 cs
= ipa_make_edge_direct_to_target (ie
, target
);
2634 if (cs
&& !agg_contents
)
2637 gcc_checking_assert (cs
->callee
2639 || jfunc
->type
!= IPA_JF_CONST
2640 || !cgraph_node_for_jfunc (jfunc
)
2641 || cs
->callee
== cgraph_node_for_jfunc (jfunc
)));
2642 ok
= try_decrement_rdesc_refcount (jfunc
);
2643 gcc_checking_assert (ok
);
2649 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2650 call based on a formal parameter which is described by jump function JFUNC
2651 and if it can be determined, make it direct and return the direct edge.
2652 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2655 static struct cgraph_edge
*
2656 try_make_edge_direct_virtual_call (struct cgraph_edge
*ie
,
2657 struct ipa_jump_func
*jfunc
,
2658 struct ipa_node_params
*new_root_info
)
2662 binfo
= ipa_value_from_jfunc (new_root_info
, jfunc
);
2667 if (TREE_CODE (binfo
) != TREE_BINFO
)
2669 binfo
= gimple_extract_devirt_binfo_from_cst
2670 (binfo
, ie
->indirect_info
->otr_type
);
2675 binfo
= get_binfo_at_offset (binfo
, ie
->indirect_info
->offset
,
2676 ie
->indirect_info
->otr_type
);
2678 target
= gimple_get_virt_method_for_binfo (ie
->indirect_info
->otr_token
,
2685 #ifdef ENABLE_CHECKING
2686 gcc_assert (possible_polymorphic_call_target_p
2687 (ie
, cgraph_get_node (target
)));
2689 return ipa_make_edge_direct_to_target (ie
, target
);
2695 /* Update the param called notes associated with NODE when CS is being inlined,
2696 assuming NODE is (potentially indirectly) inlined into CS->callee.
2697 Moreover, if the callee is discovered to be constant, create a new cgraph
2698 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2699 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2702 update_indirect_edges_after_inlining (struct cgraph_edge
*cs
,
2703 struct cgraph_node
*node
,
2704 vec
<cgraph_edge_p
> *new_edges
)
2706 struct ipa_edge_args
*top
;
2707 struct cgraph_edge
*ie
, *next_ie
, *new_direct_edge
;
2708 struct ipa_node_params
*new_root_info
;
2711 ipa_check_create_edge_args ();
2712 top
= IPA_EDGE_REF (cs
);
2713 new_root_info
= IPA_NODE_REF (cs
->caller
->global
.inlined_to
2714 ? cs
->caller
->global
.inlined_to
2717 for (ie
= node
->indirect_calls
; ie
; ie
= next_ie
)
2719 struct cgraph_indirect_call_info
*ici
= ie
->indirect_info
;
2720 struct ipa_jump_func
*jfunc
;
2723 next_ie
= ie
->next_callee
;
2725 if (ici
->param_index
== -1)
2728 /* We must check range due to calls with variable number of arguments: */
2729 if (ici
->param_index
>= ipa_get_cs_argument_count (top
))
2731 ici
->param_index
= -1;
2735 param_index
= ici
->param_index
;
2736 jfunc
= ipa_get_ith_jump_func (top
, param_index
);
2738 if (!flag_indirect_inlining
)
2739 new_direct_edge
= NULL
;
2740 else if (ici
->polymorphic
)
2741 new_direct_edge
= try_make_edge_direct_virtual_call (ie
, jfunc
,
2744 new_direct_edge
= try_make_edge_direct_simple_call (ie
, jfunc
,
2746 /* If speculation was removed, then we need to do nothing. */
2747 if (new_direct_edge
&& new_direct_edge
!= ie
)
2749 new_direct_edge
->indirect_inlining_edge
= 1;
2750 top
= IPA_EDGE_REF (cs
);
2753 else if (new_direct_edge
)
2755 new_direct_edge
->indirect_inlining_edge
= 1;
2756 if (new_direct_edge
->call_stmt
)
2757 new_direct_edge
->call_stmt_cannot_inline_p
2758 = !gimple_check_call_matching_types (
2759 new_direct_edge
->call_stmt
,
2760 new_direct_edge
->callee
->decl
, false);
2763 new_edges
->safe_push (new_direct_edge
);
2766 top
= IPA_EDGE_REF (cs
);
2768 else if (jfunc
->type
== IPA_JF_PASS_THROUGH
2769 && ipa_get_jf_pass_through_operation (jfunc
) == NOP_EXPR
)
2771 if (ici
->agg_contents
2772 && !ipa_get_jf_pass_through_agg_preserved (jfunc
))
2773 ici
->param_index
= -1;
2775 ici
->param_index
= ipa_get_jf_pass_through_formal_id (jfunc
);
2777 else if (jfunc
->type
== IPA_JF_ANCESTOR
)
2779 if (ici
->agg_contents
2780 && !ipa_get_jf_ancestor_agg_preserved (jfunc
))
2781 ici
->param_index
= -1;
2784 ici
->param_index
= ipa_get_jf_ancestor_formal_id (jfunc
);
2785 if (ipa_get_jf_ancestor_offset (jfunc
))
2786 ici
->outer_type
= NULL
;
2787 ici
->offset
+= ipa_get_jf_ancestor_offset (jfunc
);
2791 /* Either we can find a destination for this edge now or never. */
2792 ici
->param_index
= -1;
2798 /* Recursively traverse subtree of NODE (including node) made of inlined
2799 cgraph_edges when CS has been inlined and invoke
2800 update_indirect_edges_after_inlining on all nodes and
2801 update_jump_functions_after_inlining on all non-inlined edges that lead out
2802 of this subtree. Newly discovered indirect edges will be added to
2803 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2807 propagate_info_to_inlined_callees (struct cgraph_edge
*cs
,
2808 struct cgraph_node
*node
,
2809 vec
<cgraph_edge_p
> *new_edges
)
2811 struct cgraph_edge
*e
;
2814 res
= update_indirect_edges_after_inlining (cs
, node
, new_edges
);
2816 for (e
= node
->callees
; e
; e
= e
->next_callee
)
2817 if (!e
->inline_failed
)
2818 res
|= propagate_info_to_inlined_callees (cs
, e
->callee
, new_edges
);
2820 update_jump_functions_after_inlining (cs
, e
);
2821 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
2822 update_jump_functions_after_inlining (cs
, e
);
2827 /* Combine two controlled uses counts as done during inlining. */
2830 combine_controlled_uses_counters (int c
, int d
)
2832 if (c
== IPA_UNDESCRIBED_USE
|| d
== IPA_UNDESCRIBED_USE
)
2833 return IPA_UNDESCRIBED_USE
;
2838 /* Propagate number of controlled users from CS->caleee to the new root of the
2839 tree of inlined nodes. */
2842 propagate_controlled_uses (struct cgraph_edge
*cs
)
2844 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
2845 struct cgraph_node
*new_root
= cs
->caller
->global
.inlined_to
2846 ? cs
->caller
->global
.inlined_to
: cs
->caller
;
2847 struct ipa_node_params
*new_root_info
= IPA_NODE_REF (new_root
);
2848 struct ipa_node_params
*old_root_info
= IPA_NODE_REF (cs
->callee
);
2851 count
= MIN (ipa_get_cs_argument_count (args
),
2852 ipa_get_param_count (old_root_info
));
2853 for (i
= 0; i
< count
; i
++)
2855 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
2856 struct ipa_cst_ref_desc
*rdesc
;
2858 if (jf
->type
== IPA_JF_PASS_THROUGH
)
2861 src_idx
= ipa_get_jf_pass_through_formal_id (jf
);
2862 c
= ipa_get_controlled_uses (new_root_info
, src_idx
);
2863 d
= ipa_get_controlled_uses (old_root_info
, i
);
2865 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf
)
2866 == NOP_EXPR
|| c
== IPA_UNDESCRIBED_USE
);
2867 c
= combine_controlled_uses_counters (c
, d
);
2868 ipa_set_controlled_uses (new_root_info
, src_idx
, c
);
2869 if (c
== 0 && new_root_info
->ipcp_orig_node
)
2871 struct cgraph_node
*n
;
2872 struct ipa_ref
*ref
;
2873 tree t
= new_root_info
->known_vals
[src_idx
];
2875 if (t
&& TREE_CODE (t
) == ADDR_EXPR
2876 && TREE_CODE (TREE_OPERAND (t
, 0)) == FUNCTION_DECL
2877 && (n
= cgraph_get_node (TREE_OPERAND (t
, 0)))
2878 && (ref
= ipa_find_reference (new_root
,
2882 fprintf (dump_file
, "ipa-prop: Removing cloning-created "
2883 "reference from %s/%i to %s/%i.\n",
2884 xstrdup (new_root
->name ()),
2886 xstrdup (n
->name ()), n
->order
);
2887 ipa_remove_reference (ref
);
2891 else if (jf
->type
== IPA_JF_CONST
2892 && (rdesc
= jfunc_rdesc_usable (jf
)))
2894 int d
= ipa_get_controlled_uses (old_root_info
, i
);
2895 int c
= rdesc
->refcount
;
2896 rdesc
->refcount
= combine_controlled_uses_counters (c
, d
);
2897 if (rdesc
->refcount
== 0)
2899 tree cst
= ipa_get_jf_constant (jf
);
2900 struct cgraph_node
*n
;
2901 gcc_checking_assert (TREE_CODE (cst
) == ADDR_EXPR
2902 && TREE_CODE (TREE_OPERAND (cst
, 0))
2904 n
= cgraph_get_node (TREE_OPERAND (cst
, 0));
2907 struct cgraph_node
*clone
;
2909 ok
= remove_described_reference (n
, rdesc
);
2910 gcc_checking_assert (ok
);
2913 while (clone
->global
.inlined_to
2914 && clone
!= rdesc
->cs
->caller
2915 && IPA_NODE_REF (clone
)->ipcp_orig_node
)
2917 struct ipa_ref
*ref
;
2918 ref
= ipa_find_reference (clone
,
2923 fprintf (dump_file
, "ipa-prop: Removing "
2924 "cloning-created reference "
2925 "from %s/%i to %s/%i.\n",
2926 xstrdup (clone
->name ()),
2928 xstrdup (n
->name ()),
2930 ipa_remove_reference (ref
);
2932 clone
= clone
->callers
->caller
;
2939 for (i
= ipa_get_param_count (old_root_info
);
2940 i
< ipa_get_cs_argument_count (args
);
2943 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
2945 if (jf
->type
== IPA_JF_CONST
)
2947 struct ipa_cst_ref_desc
*rdesc
= jfunc_rdesc_usable (jf
);
2949 rdesc
->refcount
= IPA_UNDESCRIBED_USE
;
2951 else if (jf
->type
== IPA_JF_PASS_THROUGH
)
2952 ipa_set_controlled_uses (new_root_info
,
2953 jf
->value
.pass_through
.formal_id
,
2954 IPA_UNDESCRIBED_USE
);
2958 /* Update jump functions and call note functions on inlining the call site CS.
2959 CS is expected to lead to a node already cloned by
2960 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
2961 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2965 ipa_propagate_indirect_call_infos (struct cgraph_edge
*cs
,
2966 vec
<cgraph_edge_p
> *new_edges
)
2969 /* Do nothing if the preparation phase has not been carried out yet
2970 (i.e. during early inlining). */
2971 if (!ipa_node_params_vector
.exists ())
2973 gcc_assert (ipa_edge_args_vector
);
2975 propagate_controlled_uses (cs
);
2976 changed
= propagate_info_to_inlined_callees (cs
, cs
->callee
, new_edges
);
2981 /* Frees all dynamically allocated structures that the argument info points
2985 ipa_free_edge_args_substructures (struct ipa_edge_args
*args
)
2987 vec_free (args
->jump_functions
);
2988 memset (args
, 0, sizeof (*args
));
2991 /* Free all ipa_edge structures. */
2994 ipa_free_all_edge_args (void)
2997 struct ipa_edge_args
*args
;
2999 if (!ipa_edge_args_vector
)
3002 FOR_EACH_VEC_ELT (*ipa_edge_args_vector
, i
, args
)
3003 ipa_free_edge_args_substructures (args
);
3005 vec_free (ipa_edge_args_vector
);
3008 /* Frees all dynamically allocated structures that the param info points
3012 ipa_free_node_params_substructures (struct ipa_node_params
*info
)
3014 info
->descriptors
.release ();
3015 free (info
->lattices
);
3016 /* Lattice values and their sources are deallocated with their alocation
3018 info
->known_vals
.release ();
3019 memset (info
, 0, sizeof (*info
));
3022 /* Free all ipa_node_params structures. */
3025 ipa_free_all_node_params (void)
3028 struct ipa_node_params
*info
;
3030 FOR_EACH_VEC_ELT (ipa_node_params_vector
, i
, info
)
3031 ipa_free_node_params_substructures (info
);
3033 ipa_node_params_vector
.release ();
3036 /* Set the aggregate replacements of NODE to be AGGVALS. */
3039 ipa_set_node_agg_value_chain (struct cgraph_node
*node
,
3040 struct ipa_agg_replacement_value
*aggvals
)
3042 if (vec_safe_length (ipa_node_agg_replacements
) <= (unsigned) cgraph_max_uid
)
3043 vec_safe_grow_cleared (ipa_node_agg_replacements
, cgraph_max_uid
+ 1);
3045 (*ipa_node_agg_replacements
)[node
->uid
] = aggvals
;
3048 /* Hook that is called by cgraph.c when an edge is removed. */
3051 ipa_edge_removal_hook (struct cgraph_edge
*cs
, void *data ATTRIBUTE_UNUSED
)
3053 struct ipa_edge_args
*args
;
3055 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3056 if (vec_safe_length (ipa_edge_args_vector
) <= (unsigned)cs
->uid
)
3059 args
= IPA_EDGE_REF (cs
);
3060 if (args
->jump_functions
)
3062 struct ipa_jump_func
*jf
;
3064 FOR_EACH_VEC_ELT (*args
->jump_functions
, i
, jf
)
3066 struct ipa_cst_ref_desc
*rdesc
;
3067 try_decrement_rdesc_refcount (jf
);
3068 if (jf
->type
== IPA_JF_CONST
3069 && (rdesc
= ipa_get_jf_constant_rdesc (jf
))
3075 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs
));
3078 /* Hook that is called by cgraph.c when a node is removed. */
3081 ipa_node_removal_hook (struct cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
3083 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3084 if (ipa_node_params_vector
.length () > (unsigned)node
->uid
)
3085 ipa_free_node_params_substructures (IPA_NODE_REF (node
));
3086 if (vec_safe_length (ipa_node_agg_replacements
) > (unsigned)node
->uid
)
3087 (*ipa_node_agg_replacements
)[(unsigned)node
->uid
] = NULL
;
3090 /* Hook that is called by cgraph.c when an edge is duplicated. */
3093 ipa_edge_duplication_hook (struct cgraph_edge
*src
, struct cgraph_edge
*dst
,
3094 __attribute__((unused
)) void *data
)
3096 struct ipa_edge_args
*old_args
, *new_args
;
3099 ipa_check_create_edge_args ();
3101 old_args
= IPA_EDGE_REF (src
);
3102 new_args
= IPA_EDGE_REF (dst
);
3104 new_args
->jump_functions
= vec_safe_copy (old_args
->jump_functions
);
3106 for (i
= 0; i
< vec_safe_length (old_args
->jump_functions
); i
++)
3108 struct ipa_jump_func
*src_jf
= ipa_get_ith_jump_func (old_args
, i
);
3109 struct ipa_jump_func
*dst_jf
= ipa_get_ith_jump_func (new_args
, i
);
3111 dst_jf
->agg
.items
= vec_safe_copy (dst_jf
->agg
.items
);
3113 if (src_jf
->type
== IPA_JF_CONST
)
3115 struct ipa_cst_ref_desc
*src_rdesc
= jfunc_rdesc_usable (src_jf
);
3118 dst_jf
->value
.constant
.rdesc
= NULL
;
3119 else if (src
->caller
== dst
->caller
)
3121 struct ipa_ref
*ref
;
3122 symtab_node
*n
= cgraph_node_for_jfunc (src_jf
);
3123 gcc_checking_assert (n
);
3124 ref
= ipa_find_reference (src
->caller
, n
,
3125 src
->call_stmt
, src
->lto_stmt_uid
);
3126 gcc_checking_assert (ref
);
3127 ipa_clone_ref (ref
, dst
->caller
, ref
->stmt
);
3129 gcc_checking_assert (ipa_refdesc_pool
);
3130 struct ipa_cst_ref_desc
*dst_rdesc
3131 = (struct ipa_cst_ref_desc
*) pool_alloc (ipa_refdesc_pool
);
3132 dst_rdesc
->cs
= dst
;
3133 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3134 dst_rdesc
->next_duplicate
= NULL
;
3135 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3137 else if (src_rdesc
->cs
== src
)
3139 struct ipa_cst_ref_desc
*dst_rdesc
;
3140 gcc_checking_assert (ipa_refdesc_pool
);
3142 = (struct ipa_cst_ref_desc
*) pool_alloc (ipa_refdesc_pool
);
3143 dst_rdesc
->cs
= dst
;
3144 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3145 dst_rdesc
->next_duplicate
= src_rdesc
->next_duplicate
;
3146 src_rdesc
->next_duplicate
= dst_rdesc
;
3147 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3151 struct ipa_cst_ref_desc
*dst_rdesc
;
3152 /* This can happen during inlining, when a JFUNC can refer to a
3153 reference taken in a function up in the tree of inline clones.
3154 We need to find the duplicate that refers to our tree of
3157 gcc_assert (dst
->caller
->global
.inlined_to
);
3158 for (dst_rdesc
= src_rdesc
->next_duplicate
;
3160 dst_rdesc
= dst_rdesc
->next_duplicate
)
3162 struct cgraph_node
*top
;
3163 top
= dst_rdesc
->cs
->caller
->global
.inlined_to
3164 ? dst_rdesc
->cs
->caller
->global
.inlined_to
3165 : dst_rdesc
->cs
->caller
;
3166 if (dst
->caller
->global
.inlined_to
== top
)
3169 gcc_assert (dst_rdesc
);
3170 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3176 /* Hook that is called by cgraph.c when a node is duplicated. */
3179 ipa_node_duplication_hook (struct cgraph_node
*src
, struct cgraph_node
*dst
,
3180 ATTRIBUTE_UNUSED
void *data
)
3182 struct ipa_node_params
*old_info
, *new_info
;
3183 struct ipa_agg_replacement_value
*old_av
, *new_av
;
3185 ipa_check_create_node_params ();
3186 old_info
= IPA_NODE_REF (src
);
3187 new_info
= IPA_NODE_REF (dst
);
3189 new_info
->descriptors
= old_info
->descriptors
.copy ();
3190 new_info
->lattices
= NULL
;
3191 new_info
->ipcp_orig_node
= old_info
->ipcp_orig_node
;
3193 new_info
->uses_analysis_done
= old_info
->uses_analysis_done
;
3194 new_info
->node_enqueued
= old_info
->node_enqueued
;
3196 old_av
= ipa_get_agg_replacements_for_node (src
);
3203 struct ipa_agg_replacement_value
*v
;
3205 v
= ggc_alloc_ipa_agg_replacement_value ();
3206 memcpy (v
, old_av
, sizeof (*v
));
3209 old_av
= old_av
->next
;
3211 ipa_set_node_agg_value_chain (dst
, new_av
);
3215 /* Analyze newly added function into callgraph. */
3218 ipa_add_new_function (struct cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
3220 if (cgraph_function_with_gimple_body_p (node
))
3221 ipa_analyze_node (node
);
3224 /* Register our cgraph hooks if they are not already there. */
3227 ipa_register_cgraph_hooks (void)
3229 if (!edge_removal_hook_holder
)
3230 edge_removal_hook_holder
=
3231 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook
, NULL
);
3232 if (!node_removal_hook_holder
)
3233 node_removal_hook_holder
=
3234 cgraph_add_node_removal_hook (&ipa_node_removal_hook
, NULL
);
3235 if (!edge_duplication_hook_holder
)
3236 edge_duplication_hook_holder
=
3237 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook
, NULL
);
3238 if (!node_duplication_hook_holder
)
3239 node_duplication_hook_holder
=
3240 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook
, NULL
);
3241 function_insertion_hook_holder
=
3242 cgraph_add_function_insertion_hook (&ipa_add_new_function
, NULL
);
3245 /* Unregister our cgraph hooks if they are not already there. */
3248 ipa_unregister_cgraph_hooks (void)
3250 cgraph_remove_edge_removal_hook (edge_removal_hook_holder
);
3251 edge_removal_hook_holder
= NULL
;
3252 cgraph_remove_node_removal_hook (node_removal_hook_holder
);
3253 node_removal_hook_holder
= NULL
;
3254 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder
);
3255 edge_duplication_hook_holder
= NULL
;
3256 cgraph_remove_node_duplication_hook (node_duplication_hook_holder
);
3257 node_duplication_hook_holder
= NULL
;
3258 cgraph_remove_function_insertion_hook (function_insertion_hook_holder
);
3259 function_insertion_hook_holder
= NULL
;
3262 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3263 longer needed after ipa-cp. */
3266 ipa_free_all_structures_after_ipa_cp (void)
3270 ipa_free_all_edge_args ();
3271 ipa_free_all_node_params ();
3272 free_alloc_pool (ipcp_sources_pool
);
3273 free_alloc_pool (ipcp_values_pool
);
3274 free_alloc_pool (ipcp_agg_lattice_pool
);
3275 ipa_unregister_cgraph_hooks ();
3276 if (ipa_refdesc_pool
)
3277 free_alloc_pool (ipa_refdesc_pool
);
3281 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3282 longer needed after indirect inlining. */
3285 ipa_free_all_structures_after_iinln (void)
3287 ipa_free_all_edge_args ();
3288 ipa_free_all_node_params ();
3289 ipa_unregister_cgraph_hooks ();
3290 if (ipcp_sources_pool
)
3291 free_alloc_pool (ipcp_sources_pool
);
3292 if (ipcp_values_pool
)
3293 free_alloc_pool (ipcp_values_pool
);
3294 if (ipcp_agg_lattice_pool
)
3295 free_alloc_pool (ipcp_agg_lattice_pool
);
3296 if (ipa_refdesc_pool
)
3297 free_alloc_pool (ipa_refdesc_pool
);
3300 /* Print ipa_tree_map data structures of all functions in the
3304 ipa_print_node_params (FILE *f
, struct cgraph_node
*node
)
3307 struct ipa_node_params
*info
;
3309 if (!node
->definition
)
3311 info
= IPA_NODE_REF (node
);
3312 fprintf (f
, " function %s/%i parameter descriptors:\n",
3313 node
->name (), node
->order
);
3314 count
= ipa_get_param_count (info
);
3315 for (i
= 0; i
< count
; i
++)
3319 ipa_dump_param (f
, info
, i
);
3320 if (ipa_is_param_used (info
, i
))
3321 fprintf (f
, " used");
3322 c
= ipa_get_controlled_uses (info
, i
);
3323 if (c
== IPA_UNDESCRIBED_USE
)
3324 fprintf (f
, " undescribed_use");
3326 fprintf (f
, " controlled_uses=%i", c
);
3331 /* Print ipa_tree_map data structures of all functions in the
3335 ipa_print_all_params (FILE * f
)
3337 struct cgraph_node
*node
;
3339 fprintf (f
, "\nFunction parameters:\n");
3340 FOR_EACH_FUNCTION (node
)
3341 ipa_print_node_params (f
, node
);
3344 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3347 ipa_get_vector_of_formal_parms (tree fndecl
)
3353 gcc_assert (!flag_wpa
);
3354 count
= count_formal_params (fndecl
);
3355 args
.create (count
);
3356 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
3357 args
.quick_push (parm
);
3362 /* Return a heap allocated vector containing types of formal parameters of
3363 function type FNTYPE. */
3366 ipa_get_vector_of_formal_parm_types (tree fntype
)
3372 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
3375 types
.create (count
);
3376 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
3377 types
.quick_push (TREE_VALUE (t
));
3382 /* Modify the function declaration FNDECL and its type according to the plan in
3383 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3384 to reflect the actual parameters being modified which are determined by the
3385 base_index field. */
3388 ipa_modify_formal_parameters (tree fndecl
, ipa_parm_adjustment_vec adjustments
)
3390 vec
<tree
> oparms
= ipa_get_vector_of_formal_parms (fndecl
);
3391 tree orig_type
= TREE_TYPE (fndecl
);
3392 tree old_arg_types
= TYPE_ARG_TYPES (orig_type
);
3394 /* The following test is an ugly hack, some functions simply don't have any
3395 arguments in their type. This is probably a bug but well... */
3396 bool care_for_types
= (old_arg_types
!= NULL_TREE
);
3397 bool last_parm_void
;
3401 last_parm_void
= (TREE_VALUE (tree_last (old_arg_types
))
3403 otypes
= ipa_get_vector_of_formal_parm_types (orig_type
);
3405 gcc_assert (oparms
.length () + 1 == otypes
.length ());
3407 gcc_assert (oparms
.length () == otypes
.length ());
3411 last_parm_void
= false;
3415 int len
= adjustments
.length ();
3416 tree
*link
= &DECL_ARGUMENTS (fndecl
);
3417 tree new_arg_types
= NULL
;
3418 for (int i
= 0; i
< len
; i
++)
3420 struct ipa_parm_adjustment
*adj
;
3423 adj
= &adjustments
[i
];
3425 if (adj
->op
== IPA_PARM_OP_NEW
)
3428 parm
= oparms
[adj
->base_index
];
3431 if (adj
->op
== IPA_PARM_OP_COPY
)
3434 new_arg_types
= tree_cons (NULL_TREE
, otypes
[adj
->base_index
],
3437 link
= &DECL_CHAIN (parm
);
3439 else if (adj
->op
!= IPA_PARM_OP_REMOVE
)
3445 ptype
= build_pointer_type (adj
->type
);
3449 if (is_gimple_reg_type (ptype
))
3451 unsigned malign
= GET_MODE_ALIGNMENT (TYPE_MODE (ptype
));
3452 if (TYPE_ALIGN (ptype
) < malign
)
3453 ptype
= build_aligned_type (ptype
, malign
);
3458 new_arg_types
= tree_cons (NULL_TREE
, ptype
, new_arg_types
);
3460 new_parm
= build_decl (UNKNOWN_LOCATION
, PARM_DECL
, NULL_TREE
,
3462 const char *prefix
= adj
->arg_prefix
? adj
->arg_prefix
: "SYNTH";
3463 DECL_NAME (new_parm
) = create_tmp_var_name (prefix
);
3464 DECL_ARTIFICIAL (new_parm
) = 1;
3465 DECL_ARG_TYPE (new_parm
) = ptype
;
3466 DECL_CONTEXT (new_parm
) = fndecl
;
3467 TREE_USED (new_parm
) = 1;
3468 DECL_IGNORED_P (new_parm
) = 1;
3469 layout_decl (new_parm
, 0);
3471 if (adj
->op
== IPA_PARM_OP_NEW
)
3475 adj
->new_decl
= new_parm
;
3478 link
= &DECL_CHAIN (new_parm
);
3484 tree new_reversed
= NULL
;
3487 new_reversed
= nreverse (new_arg_types
);
3491 TREE_CHAIN (new_arg_types
) = void_list_node
;
3493 new_reversed
= void_list_node
;
3497 /* Use copy_node to preserve as much as possible from original type
3498 (debug info, attribute lists etc.)
3499 Exception is METHOD_TYPEs must have THIS argument.
3500 When we are asked to remove it, we need to build new FUNCTION_TYPE
3502 tree new_type
= NULL
;
3503 if (TREE_CODE (orig_type
) != METHOD_TYPE
3504 || (adjustments
[0].op
== IPA_PARM_OP_COPY
3505 && adjustments
[0].base_index
== 0))
3507 new_type
= build_distinct_type_copy (orig_type
);
3508 TYPE_ARG_TYPES (new_type
) = new_reversed
;
3513 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type
),
3515 TYPE_CONTEXT (new_type
) = TYPE_CONTEXT (orig_type
);
3516 DECL_VINDEX (fndecl
) = NULL_TREE
;
3519 /* When signature changes, we need to clear builtin info. */
3520 if (DECL_BUILT_IN (fndecl
))
3522 DECL_BUILT_IN_CLASS (fndecl
) = NOT_BUILT_IN
;
3523 DECL_FUNCTION_CODE (fndecl
) = (enum built_in_function
) 0;
3526 /* This is a new type, not a copy of an old type. Need to reassociate
3527 variants. We can handle everything except the main variant lazily. */
3528 tree t
= TYPE_MAIN_VARIANT (orig_type
);
3531 TYPE_MAIN_VARIANT (new_type
) = t
;
3532 TYPE_NEXT_VARIANT (new_type
) = TYPE_NEXT_VARIANT (t
);
3533 TYPE_NEXT_VARIANT (t
) = new_type
;
3537 TYPE_MAIN_VARIANT (new_type
) = new_type
;
3538 TYPE_NEXT_VARIANT (new_type
) = NULL
;
3541 TREE_TYPE (fndecl
) = new_type
;
3542 DECL_VIRTUAL_P (fndecl
) = 0;
3547 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3548 If this is a directly recursive call, CS must be NULL. Otherwise it must
3549 contain the corresponding call graph edge. */
3552 ipa_modify_call_arguments (struct cgraph_edge
*cs
, gimple stmt
,
3553 ipa_parm_adjustment_vec adjustments
)
3555 struct cgraph_node
*current_node
= cgraph_get_node (current_function_decl
);
3557 vec
<tree
, va_gc
> **debug_args
= NULL
;
3559 gimple_stmt_iterator gsi
, prev_gsi
;
3563 len
= adjustments
.length ();
3565 callee_decl
= !cs
? gimple_call_fndecl (stmt
) : cs
->callee
->decl
;
3566 ipa_remove_stmt_references (current_node
, stmt
);
3568 gsi
= gsi_for_stmt (stmt
);
3570 gsi_prev (&prev_gsi
);
3571 for (i
= 0; i
< len
; i
++)
3573 struct ipa_parm_adjustment
*adj
;
3575 adj
= &adjustments
[i
];
3577 if (adj
->op
== IPA_PARM_OP_COPY
)
3579 tree arg
= gimple_call_arg (stmt
, adj
->base_index
);
3581 vargs
.quick_push (arg
);
3583 else if (adj
->op
!= IPA_PARM_OP_REMOVE
)
3585 tree expr
, base
, off
;
3587 unsigned int deref_align
= 0;
3588 bool deref_base
= false;
3590 /* We create a new parameter out of the value of the old one, we can
3591 do the following kind of transformations:
3593 - A scalar passed by reference is converted to a scalar passed by
3594 value. (adj->by_ref is false and the type of the original
3595 actual argument is a pointer to a scalar).
3597 - A part of an aggregate is passed instead of the whole aggregate.
3598 The part can be passed either by value or by reference, this is
3599 determined by value of adj->by_ref. Moreover, the code below
3600 handles both situations when the original aggregate is passed by
3601 value (its type is not a pointer) and when it is passed by
3602 reference (it is a pointer to an aggregate).
3604 When the new argument is passed by reference (adj->by_ref is true)
3605 it must be a part of an aggregate and therefore we form it by
3606 simply taking the address of a reference inside the original
3609 gcc_checking_assert (adj
->offset
% BITS_PER_UNIT
== 0);
3610 base
= gimple_call_arg (stmt
, adj
->base_index
);
3611 loc
= DECL_P (base
) ? DECL_SOURCE_LOCATION (base
)
3612 : EXPR_LOCATION (base
);
3614 if (TREE_CODE (base
) != ADDR_EXPR
3615 && POINTER_TYPE_P (TREE_TYPE (base
)))
3616 off
= build_int_cst (adj
->alias_ptr_type
,
3617 adj
->offset
/ BITS_PER_UNIT
);
3620 HOST_WIDE_INT base_offset
;
3624 if (TREE_CODE (base
) == ADDR_EXPR
)
3626 base
= TREE_OPERAND (base
, 0);
3632 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
3633 /* Aggregate arguments can have non-invariant addresses. */
3636 base
= build_fold_addr_expr (prev_base
);
3637 off
= build_int_cst (adj
->alias_ptr_type
,
3638 adj
->offset
/ BITS_PER_UNIT
);
3640 else if (TREE_CODE (base
) == MEM_REF
)
3645 deref_align
= TYPE_ALIGN (TREE_TYPE (base
));
3647 off
= build_int_cst (adj
->alias_ptr_type
,
3649 + adj
->offset
/ BITS_PER_UNIT
);
3650 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1),
3652 base
= TREE_OPERAND (base
, 0);
3656 off
= build_int_cst (adj
->alias_ptr_type
,
3658 + adj
->offset
/ BITS_PER_UNIT
);
3659 base
= build_fold_addr_expr (base
);
3665 tree type
= adj
->type
;
3667 unsigned HOST_WIDE_INT misalign
;
3671 align
= deref_align
;
3676 get_pointer_alignment_1 (base
, &align
, &misalign
);
3677 if (TYPE_ALIGN (type
) > align
)
3678 align
= TYPE_ALIGN (type
);
3680 misalign
+= (tree_to_double_int (off
)
3681 .sext (TYPE_PRECISION (TREE_TYPE (off
))).low
3683 misalign
= misalign
& (align
- 1);
3685 align
= (misalign
& -misalign
);
3686 if (align
< TYPE_ALIGN (type
))
3687 type
= build_aligned_type (type
, align
);
3688 expr
= fold_build2_loc (loc
, MEM_REF
, type
, base
, off
);
3692 expr
= fold_build2_loc (loc
, MEM_REF
, adj
->type
, base
, off
);
3693 expr
= build_fold_addr_expr (expr
);
3696 expr
= force_gimple_operand_gsi (&gsi
, expr
,
3698 || is_gimple_reg_type (adj
->type
),
3699 NULL
, true, GSI_SAME_STMT
);
3700 vargs
.quick_push (expr
);
3702 if (adj
->op
!= IPA_PARM_OP_COPY
&& MAY_HAVE_DEBUG_STMTS
)
3705 tree ddecl
= NULL_TREE
, origin
= DECL_ORIGIN (adj
->base
), arg
;
3708 arg
= gimple_call_arg (stmt
, adj
->base_index
);
3709 if (!useless_type_conversion_p (TREE_TYPE (origin
), TREE_TYPE (arg
)))
3711 if (!fold_convertible_p (TREE_TYPE (origin
), arg
))
3713 arg
= fold_convert_loc (gimple_location (stmt
),
3714 TREE_TYPE (origin
), arg
);
3716 if (debug_args
== NULL
)
3717 debug_args
= decl_debug_args_insert (callee_decl
);
3718 for (ix
= 0; vec_safe_iterate (*debug_args
, ix
, &ddecl
); ix
+= 2)
3719 if (ddecl
== origin
)
3721 ddecl
= (**debug_args
)[ix
+ 1];
3726 ddecl
= make_node (DEBUG_EXPR_DECL
);
3727 DECL_ARTIFICIAL (ddecl
) = 1;
3728 TREE_TYPE (ddecl
) = TREE_TYPE (origin
);
3729 DECL_MODE (ddecl
) = DECL_MODE (origin
);
3731 vec_safe_push (*debug_args
, origin
);
3732 vec_safe_push (*debug_args
, ddecl
);
3734 def_temp
= gimple_build_debug_bind (ddecl
, unshare_expr (arg
), stmt
);
3735 gsi_insert_before (&gsi
, def_temp
, GSI_SAME_STMT
);
3739 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3741 fprintf (dump_file
, "replacing stmt:");
3742 print_gimple_stmt (dump_file
, gsi_stmt (gsi
), 0, 0);
3745 new_stmt
= gimple_build_call_vec (callee_decl
, vargs
);
3747 if (gimple_call_lhs (stmt
))
3748 gimple_call_set_lhs (new_stmt
, gimple_call_lhs (stmt
));
3750 gimple_set_block (new_stmt
, gimple_block (stmt
));
3751 if (gimple_has_location (stmt
))
3752 gimple_set_location (new_stmt
, gimple_location (stmt
));
3753 gimple_call_set_chain (new_stmt
, gimple_call_chain (stmt
));
3754 gimple_call_copy_flags (new_stmt
, stmt
);
3756 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3758 fprintf (dump_file
, "with stmt:");
3759 print_gimple_stmt (dump_file
, new_stmt
, 0, 0);
3760 fprintf (dump_file
, "\n");
3762 gsi_replace (&gsi
, new_stmt
, true);
3764 cgraph_set_call_stmt (cs
, new_stmt
);
3767 ipa_record_stmt_references (current_node
, gsi_stmt (gsi
));
3770 while ((gsi_end_p (prev_gsi
) && !gsi_end_p (gsi
))
3771 || (!gsi_end_p (prev_gsi
) && gsi_stmt (gsi
) == gsi_stmt (prev_gsi
)));
3773 update_ssa (TODO_update_ssa
);
3774 free_dominance_info (CDI_DOMINATORS
);
3777 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
3778 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
3779 specifies whether the function should care about type incompatibility the
3780 current and new expressions. If it is false, the function will leave
3781 incompatibility issues to the caller. Return true iff the expression
3785 ipa_modify_expr (tree
*expr
, bool convert
,
3786 ipa_parm_adjustment_vec adjustments
)
3788 struct ipa_parm_adjustment
*cand
3789 = ipa_get_adjustment_candidate (&expr
, &convert
, adjustments
, false);
3795 src
= build_simple_mem_ref (cand
->new_decl
);
3797 src
= cand
->new_decl
;
3799 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3801 fprintf (dump_file
, "About to replace expr ");
3802 print_generic_expr (dump_file
, *expr
, 0);
3803 fprintf (dump_file
, " with ");
3804 print_generic_expr (dump_file
, src
, 0);
3805 fprintf (dump_file
, "\n");
3808 if (convert
&& !useless_type_conversion_p (TREE_TYPE (*expr
), cand
->type
))
3810 tree vce
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (*expr
), src
);
3818 /* If T is an SSA_NAME, return NULL if it is not a default def or
3819 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
3820 the base variable is always returned, regardless if it is a default
3821 def. Return T if it is not an SSA_NAME. */
3824 get_ssa_base_param (tree t
, bool ignore_default_def
)
3826 if (TREE_CODE (t
) == SSA_NAME
)
3828 if (ignore_default_def
|| SSA_NAME_IS_DEFAULT_DEF (t
))
3829 return SSA_NAME_VAR (t
);
3836 /* Given an expression, return an adjustment entry specifying the
3837 transformation to be done on EXPR. If no suitable adjustment entry
3838 was found, returns NULL.
3840 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
3841 default def, otherwise bail on them.
3843 If CONVERT is non-NULL, this function will set *CONVERT if the
3844 expression provided is a component reference. ADJUSTMENTS is the
3845 adjustments vector. */
3847 ipa_parm_adjustment
*
3848 ipa_get_adjustment_candidate (tree
**expr
, bool *convert
,
3849 ipa_parm_adjustment_vec adjustments
,
3850 bool ignore_default_def
)
3852 if (TREE_CODE (**expr
) == BIT_FIELD_REF
3853 || TREE_CODE (**expr
) == IMAGPART_EXPR
3854 || TREE_CODE (**expr
) == REALPART_EXPR
)
3856 *expr
= &TREE_OPERAND (**expr
, 0);
3861 HOST_WIDE_INT offset
, size
, max_size
;
3862 tree base
= get_ref_base_and_extent (**expr
, &offset
, &size
, &max_size
);
3863 if (!base
|| size
== -1 || max_size
== -1)
3866 if (TREE_CODE (base
) == MEM_REF
)
3868 offset
+= mem_ref_offset (base
).low
* BITS_PER_UNIT
;
3869 base
= TREE_OPERAND (base
, 0);
3872 base
= get_ssa_base_param (base
, ignore_default_def
);
3873 if (!base
|| TREE_CODE (base
) != PARM_DECL
)
3876 struct ipa_parm_adjustment
*cand
= NULL
;
3877 unsigned int len
= adjustments
.length ();
3878 for (unsigned i
= 0; i
< len
; i
++)
3880 struct ipa_parm_adjustment
*adj
= &adjustments
[i
];
3882 if (adj
->base
== base
3883 && (adj
->offset
== offset
|| adj
->op
== IPA_PARM_OP_REMOVE
))
3890 if (!cand
|| cand
->op
== IPA_PARM_OP_COPY
|| cand
->op
== IPA_PARM_OP_REMOVE
)
3895 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
3898 index_in_adjustments_multiple_times_p (int base_index
,
3899 ipa_parm_adjustment_vec adjustments
)
3901 int i
, len
= adjustments
.length ();
3904 for (i
= 0; i
< len
; i
++)
3906 struct ipa_parm_adjustment
*adj
;
3907 adj
= &adjustments
[i
];
3909 if (adj
->base_index
== base_index
)
3921 /* Return adjustments that should have the same effect on function parameters
3922 and call arguments as if they were first changed according to adjustments in
3923 INNER and then by adjustments in OUTER. */
3925 ipa_parm_adjustment_vec
3926 ipa_combine_adjustments (ipa_parm_adjustment_vec inner
,
3927 ipa_parm_adjustment_vec outer
)
3929 int i
, outlen
= outer
.length ();
3930 int inlen
= inner
.length ();
3932 ipa_parm_adjustment_vec adjustments
, tmp
;
3935 for (i
= 0; i
< inlen
; i
++)
3937 struct ipa_parm_adjustment
*n
;
3940 if (n
->op
== IPA_PARM_OP_REMOVE
)
3944 /* FIXME: Handling of new arguments are not implemented yet. */
3945 gcc_assert (n
->op
!= IPA_PARM_OP_NEW
);
3946 tmp
.quick_push (*n
);
3950 adjustments
.create (outlen
+ removals
);
3951 for (i
= 0; i
< outlen
; i
++)
3953 struct ipa_parm_adjustment r
;
3954 struct ipa_parm_adjustment
*out
= &outer
[i
];
3955 struct ipa_parm_adjustment
*in
= &tmp
[out
->base_index
];
3957 memset (&r
, 0, sizeof (r
));
3958 gcc_assert (in
->op
!= IPA_PARM_OP_REMOVE
);
3959 if (out
->op
== IPA_PARM_OP_REMOVE
)
3961 if (!index_in_adjustments_multiple_times_p (in
->base_index
, tmp
))
3963 r
.op
= IPA_PARM_OP_REMOVE
;
3964 adjustments
.quick_push (r
);
3970 /* FIXME: Handling of new arguments are not implemented yet. */
3971 gcc_assert (out
->op
!= IPA_PARM_OP_NEW
);
3974 r
.base_index
= in
->base_index
;
3977 /* FIXME: Create nonlocal value too. */
3979 if (in
->op
== IPA_PARM_OP_COPY
&& out
->op
== IPA_PARM_OP_COPY
)
3980 r
.op
= IPA_PARM_OP_COPY
;
3981 else if (in
->op
== IPA_PARM_OP_COPY
)
3982 r
.offset
= out
->offset
;
3983 else if (out
->op
== IPA_PARM_OP_COPY
)
3984 r
.offset
= in
->offset
;
3986 r
.offset
= in
->offset
+ out
->offset
;
3987 adjustments
.quick_push (r
);
3990 for (i
= 0; i
< inlen
; i
++)
3992 struct ipa_parm_adjustment
*n
= &inner
[i
];
3994 if (n
->op
== IPA_PARM_OP_REMOVE
)
3995 adjustments
.quick_push (*n
);
4002 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4003 friendly way, assuming they are meant to be applied to FNDECL. */
4006 ipa_dump_param_adjustments (FILE *file
, ipa_parm_adjustment_vec adjustments
,
4009 int i
, len
= adjustments
.length ();
4011 vec
<tree
> parms
= ipa_get_vector_of_formal_parms (fndecl
);
4013 fprintf (file
, "IPA param adjustments: ");
4014 for (i
= 0; i
< len
; i
++)
4016 struct ipa_parm_adjustment
*adj
;
4017 adj
= &adjustments
[i
];
4020 fprintf (file
, " ");
4024 fprintf (file
, "%i. base_index: %i - ", i
, adj
->base_index
);
4025 print_generic_expr (file
, parms
[adj
->base_index
], 0);
4028 fprintf (file
, ", base: ");
4029 print_generic_expr (file
, adj
->base
, 0);
4033 fprintf (file
, ", new_decl: ");
4034 print_generic_expr (file
, adj
->new_decl
, 0);
4036 if (adj
->new_ssa_base
)
4038 fprintf (file
, ", new_ssa_base: ");
4039 print_generic_expr (file
, adj
->new_ssa_base
, 0);
4042 if (adj
->op
== IPA_PARM_OP_COPY
)
4043 fprintf (file
, ", copy_param");
4044 else if (adj
->op
== IPA_PARM_OP_REMOVE
)
4045 fprintf (file
, ", remove_param");
4047 fprintf (file
, ", offset %li", (long) adj
->offset
);
4049 fprintf (file
, ", by_ref");
4050 print_node_brief (file
, ", type: ", adj
->type
, 0);
4051 fprintf (file
, "\n");
4056 /* Dump the AV linked list. */
4059 ipa_dump_agg_replacement_values (FILE *f
, struct ipa_agg_replacement_value
*av
)
4062 fprintf (f
, " Aggregate replacements:");
4063 for (; av
; av
= av
->next
)
4065 fprintf (f
, "%s %i[" HOST_WIDE_INT_PRINT_DEC
"]=", comma
? "," : "",
4066 av
->index
, av
->offset
);
4067 print_generic_expr (f
, av
->value
, 0);
4073 /* Stream out jump function JUMP_FUNC to OB. */
4076 ipa_write_jump_function (struct output_block
*ob
,
4077 struct ipa_jump_func
*jump_func
)
4079 struct ipa_agg_jf_item
*item
;
4080 struct bitpack_d bp
;
4083 streamer_write_uhwi (ob
, jump_func
->type
);
4084 switch (jump_func
->type
)
4086 case IPA_JF_UNKNOWN
:
4088 case IPA_JF_KNOWN_TYPE
:
4089 streamer_write_uhwi (ob
, jump_func
->value
.known_type
.offset
);
4090 stream_write_tree (ob
, jump_func
->value
.known_type
.base_type
, true);
4091 stream_write_tree (ob
, jump_func
->value
.known_type
.component_type
, true);
4095 EXPR_LOCATION (jump_func
->value
.constant
.value
) == UNKNOWN_LOCATION
);
4096 stream_write_tree (ob
, jump_func
->value
.constant
.value
, true);
4098 case IPA_JF_PASS_THROUGH
:
4099 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.operation
);
4100 if (jump_func
->value
.pass_through
.operation
== NOP_EXPR
)
4102 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4103 bp
= bitpack_create (ob
->main_stream
);
4104 bp_pack_value (&bp
, jump_func
->value
.pass_through
.agg_preserved
, 1);
4105 bp_pack_value (&bp
, jump_func
->value
.pass_through
.type_preserved
, 1);
4106 streamer_write_bitpack (&bp
);
4110 stream_write_tree (ob
, jump_func
->value
.pass_through
.operand
, true);
4111 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4114 case IPA_JF_ANCESTOR
:
4115 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.offset
);
4116 stream_write_tree (ob
, jump_func
->value
.ancestor
.type
, true);
4117 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.formal_id
);
4118 bp
= bitpack_create (ob
->main_stream
);
4119 bp_pack_value (&bp
, jump_func
->value
.ancestor
.agg_preserved
, 1);
4120 bp_pack_value (&bp
, jump_func
->value
.ancestor
.type_preserved
, 1);
4121 streamer_write_bitpack (&bp
);
4125 count
= vec_safe_length (jump_func
->agg
.items
);
4126 streamer_write_uhwi (ob
, count
);
4129 bp
= bitpack_create (ob
->main_stream
);
4130 bp_pack_value (&bp
, jump_func
->agg
.by_ref
, 1);
4131 streamer_write_bitpack (&bp
);
4134 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, i
, item
)
4136 streamer_write_uhwi (ob
, item
->offset
);
4137 stream_write_tree (ob
, item
->value
, true);
4141 /* Read in jump function JUMP_FUNC from IB. */
4144 ipa_read_jump_function (struct lto_input_block
*ib
,
4145 struct ipa_jump_func
*jump_func
,
4146 struct cgraph_edge
*cs
,
4147 struct data_in
*data_in
)
4149 enum jump_func_type jftype
;
4150 enum tree_code operation
;
4153 jftype
= (enum jump_func_type
) streamer_read_uhwi (ib
);
4156 case IPA_JF_UNKNOWN
:
4157 jump_func
->type
= IPA_JF_UNKNOWN
;
4159 case IPA_JF_KNOWN_TYPE
:
4161 HOST_WIDE_INT offset
= streamer_read_uhwi (ib
);
4162 tree base_type
= stream_read_tree (ib
, data_in
);
4163 tree component_type
= stream_read_tree (ib
, data_in
);
4165 ipa_set_jf_known_type (jump_func
, offset
, base_type
, component_type
);
4169 ipa_set_jf_constant (jump_func
, stream_read_tree (ib
, data_in
), cs
);
4171 case IPA_JF_PASS_THROUGH
:
4172 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
4173 if (operation
== NOP_EXPR
)
4175 int formal_id
= streamer_read_uhwi (ib
);
4176 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4177 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4178 bool type_preserved
= bp_unpack_value (&bp
, 1);
4179 ipa_set_jf_simple_pass_through (jump_func
, formal_id
, agg_preserved
,
4184 tree operand
= stream_read_tree (ib
, data_in
);
4185 int formal_id
= streamer_read_uhwi (ib
);
4186 ipa_set_jf_arith_pass_through (jump_func
, formal_id
, operand
,
4190 case IPA_JF_ANCESTOR
:
4192 HOST_WIDE_INT offset
= streamer_read_uhwi (ib
);
4193 tree type
= stream_read_tree (ib
, data_in
);
4194 int formal_id
= streamer_read_uhwi (ib
);
4195 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4196 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4197 bool type_preserved
= bp_unpack_value (&bp
, 1);
4199 ipa_set_ancestor_jf (jump_func
, offset
, type
, formal_id
, agg_preserved
,
4205 count
= streamer_read_uhwi (ib
);
4206 vec_alloc (jump_func
->agg
.items
, count
);
4209 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4210 jump_func
->agg
.by_ref
= bp_unpack_value (&bp
, 1);
4212 for (i
= 0; i
< count
; i
++)
4214 struct ipa_agg_jf_item item
;
4215 item
.offset
= streamer_read_uhwi (ib
);
4216 item
.value
= stream_read_tree (ib
, data_in
);
4217 jump_func
->agg
.items
->quick_push (item
);
4221 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4222 relevant to indirect inlining to OB. */
4225 ipa_write_indirect_edge_info (struct output_block
*ob
,
4226 struct cgraph_edge
*cs
)
4228 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4229 struct bitpack_d bp
;
4231 streamer_write_hwi (ob
, ii
->param_index
);
4232 streamer_write_hwi (ob
, ii
->offset
);
4233 bp
= bitpack_create (ob
->main_stream
);
4234 bp_pack_value (&bp
, ii
->polymorphic
, 1);
4235 bp_pack_value (&bp
, ii
->agg_contents
, 1);
4236 bp_pack_value (&bp
, ii
->member_ptr
, 1);
4237 bp_pack_value (&bp
, ii
->by_ref
, 1);
4238 bp_pack_value (&bp
, ii
->maybe_in_construction
, 1);
4239 bp_pack_value (&bp
, ii
->maybe_derived_type
, 1);
4240 streamer_write_bitpack (&bp
);
4242 if (ii
->polymorphic
)
4244 streamer_write_hwi (ob
, ii
->otr_token
);
4245 stream_write_tree (ob
, ii
->otr_type
, true);
4246 stream_write_tree (ob
, ii
->outer_type
, true);
4250 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4251 relevant to indirect inlining from IB. */
4254 ipa_read_indirect_edge_info (struct lto_input_block
*ib
,
4255 struct data_in
*data_in ATTRIBUTE_UNUSED
,
4256 struct cgraph_edge
*cs
)
4258 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4259 struct bitpack_d bp
;
4261 ii
->param_index
= (int) streamer_read_hwi (ib
);
4262 ii
->offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4263 bp
= streamer_read_bitpack (ib
);
4264 ii
->polymorphic
= bp_unpack_value (&bp
, 1);
4265 ii
->agg_contents
= bp_unpack_value (&bp
, 1);
4266 ii
->member_ptr
= bp_unpack_value (&bp
, 1);
4267 ii
->by_ref
= bp_unpack_value (&bp
, 1);
4268 ii
->maybe_in_construction
= bp_unpack_value (&bp
, 1);
4269 ii
->maybe_derived_type
= bp_unpack_value (&bp
, 1);
4270 if (ii
->polymorphic
)
4272 ii
->otr_token
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4273 ii
->otr_type
= stream_read_tree (ib
, data_in
);
4274 ii
->outer_type
= stream_read_tree (ib
, data_in
);
4278 /* Stream out NODE info to OB. */
4281 ipa_write_node_info (struct output_block
*ob
, struct cgraph_node
*node
)
4284 lto_symtab_encoder_t encoder
;
4285 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
4287 struct cgraph_edge
*e
;
4288 struct bitpack_d bp
;
4290 encoder
= ob
->decl_state
->symtab_node_encoder
;
4291 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4292 streamer_write_uhwi (ob
, node_ref
);
4294 streamer_write_uhwi (ob
, ipa_get_param_count (info
));
4295 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4296 streamer_write_uhwi (ob
, ipa_get_param_move_cost (info
, j
));
4297 bp
= bitpack_create (ob
->main_stream
);
4298 gcc_assert (info
->uses_analysis_done
4299 || ipa_get_param_count (info
) == 0);
4300 gcc_assert (!info
->node_enqueued
);
4301 gcc_assert (!info
->ipcp_orig_node
);
4302 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4303 bp_pack_value (&bp
, ipa_is_param_used (info
, j
), 1);
4304 streamer_write_bitpack (&bp
);
4305 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4306 streamer_write_hwi (ob
, ipa_get_controlled_uses (info
, j
));
4307 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4309 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4311 streamer_write_uhwi (ob
, ipa_get_cs_argument_count (args
));
4312 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4313 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4315 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4317 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4319 streamer_write_uhwi (ob
, ipa_get_cs_argument_count (args
));
4320 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4321 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4322 ipa_write_indirect_edge_info (ob
, e
);
4326 /* Stream in NODE info from IB. */
4329 ipa_read_node_info (struct lto_input_block
*ib
, struct cgraph_node
*node
,
4330 struct data_in
*data_in
)
4332 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
4334 struct cgraph_edge
*e
;
4335 struct bitpack_d bp
;
4337 ipa_alloc_node_params (node
, streamer_read_uhwi (ib
));
4339 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4340 info
->descriptors
[k
].move_cost
= streamer_read_uhwi (ib
);
4342 bp
= streamer_read_bitpack (ib
);
4343 if (ipa_get_param_count (info
) != 0)
4344 info
->uses_analysis_done
= true;
4345 info
->node_enqueued
= false;
4346 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4347 ipa_set_param_used (info
, k
, bp_unpack_value (&bp
, 1));
4348 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4349 ipa_set_controlled_uses (info
, k
, streamer_read_hwi (ib
));
4350 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4352 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4353 int count
= streamer_read_uhwi (ib
);
4357 vec_safe_grow_cleared (args
->jump_functions
, count
);
4359 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
4360 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4363 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4365 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4366 int count
= streamer_read_uhwi (ib
);
4370 vec_safe_grow_cleared (args
->jump_functions
, count
);
4371 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
4372 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4375 ipa_read_indirect_edge_info (ib
, data_in
, e
);
4379 /* Write jump functions for nodes in SET. */
4382 ipa_prop_write_jump_functions (void)
4384 struct cgraph_node
*node
;
4385 struct output_block
*ob
;
4386 unsigned int count
= 0;
4387 lto_symtab_encoder_iterator lsei
;
4388 lto_symtab_encoder_t encoder
;
4391 if (!ipa_node_params_vector
.exists ())
4394 ob
= create_output_block (LTO_section_jump_functions
);
4395 encoder
= ob
->decl_state
->symtab_node_encoder
;
4396 ob
->cgraph_node
= NULL
;
4397 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4398 lsei_next_function_in_partition (&lsei
))
4400 node
= lsei_cgraph_node (lsei
);
4401 if (cgraph_function_with_gimple_body_p (node
)
4402 && IPA_NODE_REF (node
) != NULL
)
4406 streamer_write_uhwi (ob
, count
);
4408 /* Process all of the functions. */
4409 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4410 lsei_next_function_in_partition (&lsei
))
4412 node
= lsei_cgraph_node (lsei
);
4413 if (cgraph_function_with_gimple_body_p (node
)
4414 && IPA_NODE_REF (node
) != NULL
)
4415 ipa_write_node_info (ob
, node
);
4417 streamer_write_char_stream (ob
->main_stream
, 0);
4418 produce_asm (ob
, NULL
);
4419 destroy_output_block (ob
);
4422 /* Read section in file FILE_DATA of length LEN with data DATA. */
4425 ipa_prop_read_section (struct lto_file_decl_data
*file_data
, const char *data
,
4428 const struct lto_function_header
*header
=
4429 (const struct lto_function_header
*) data
;
4430 const int cfg_offset
= sizeof (struct lto_function_header
);
4431 const int main_offset
= cfg_offset
+ header
->cfg_size
;
4432 const int string_offset
= main_offset
+ header
->main_size
;
4433 struct data_in
*data_in
;
4434 struct lto_input_block ib_main
;
4438 LTO_INIT_INPUT_BLOCK (ib_main
, (const char *) data
+ main_offset
, 0,
4442 lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
4443 header
->string_size
, vNULL
);
4444 count
= streamer_read_uhwi (&ib_main
);
4446 for (i
= 0; i
< count
; i
++)
4449 struct cgraph_node
*node
;
4450 lto_symtab_encoder_t encoder
;
4452 index
= streamer_read_uhwi (&ib_main
);
4453 encoder
= file_data
->symtab_node_encoder
;
4454 node
= cgraph (lto_symtab_encoder_deref (encoder
, index
));
4455 gcc_assert (node
->definition
);
4456 ipa_read_node_info (&ib_main
, node
, data_in
);
4458 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
4460 lto_data_in_delete (data_in
);
4463 /* Read ipcp jump functions. */
4466 ipa_prop_read_jump_functions (void)
4468 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
4469 struct lto_file_decl_data
*file_data
;
4472 ipa_check_create_node_params ();
4473 ipa_check_create_edge_args ();
4474 ipa_register_cgraph_hooks ();
4476 while ((file_data
= file_data_vec
[j
++]))
4479 const char *data
= lto_get_section_data (file_data
, LTO_section_jump_functions
, NULL
, &len
);
4482 ipa_prop_read_section (file_data
, data
, len
);
4486 /* After merging units, we can get mismatch in argument counts.
4487 Also decl merging might've rendered parameter lists obsolete.
4488 Also compute called_with_variable_arg info. */
4491 ipa_update_after_lto_read (void)
4493 ipa_check_create_node_params ();
4494 ipa_check_create_edge_args ();
4498 write_agg_replacement_chain (struct output_block
*ob
, struct cgraph_node
*node
)
4501 unsigned int count
= 0;
4502 lto_symtab_encoder_t encoder
;
4503 struct ipa_agg_replacement_value
*aggvals
, *av
;
4505 aggvals
= ipa_get_agg_replacements_for_node (node
);
4506 encoder
= ob
->decl_state
->symtab_node_encoder
;
4507 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4508 streamer_write_uhwi (ob
, node_ref
);
4510 for (av
= aggvals
; av
; av
= av
->next
)
4512 streamer_write_uhwi (ob
, count
);
4514 for (av
= aggvals
; av
; av
= av
->next
)
4516 struct bitpack_d bp
;
4518 streamer_write_uhwi (ob
, av
->offset
);
4519 streamer_write_uhwi (ob
, av
->index
);
4520 stream_write_tree (ob
, av
->value
, true);
4522 bp
= bitpack_create (ob
->main_stream
);
4523 bp_pack_value (&bp
, av
->by_ref
, 1);
4524 streamer_write_bitpack (&bp
);
4528 /* Stream in the aggregate value replacement chain for NODE from IB. */
4531 read_agg_replacement_chain (struct lto_input_block
*ib
,
4532 struct cgraph_node
*node
,
4533 struct data_in
*data_in
)
4535 struct ipa_agg_replacement_value
*aggvals
= NULL
;
4536 unsigned int count
, i
;
4538 count
= streamer_read_uhwi (ib
);
4539 for (i
= 0; i
<count
; i
++)
4541 struct ipa_agg_replacement_value
*av
;
4542 struct bitpack_d bp
;
4544 av
= ggc_alloc_ipa_agg_replacement_value ();
4545 av
->offset
= streamer_read_uhwi (ib
);
4546 av
->index
= streamer_read_uhwi (ib
);
4547 av
->value
= stream_read_tree (ib
, data_in
);
4548 bp
= streamer_read_bitpack (ib
);
4549 av
->by_ref
= bp_unpack_value (&bp
, 1);
4553 ipa_set_node_agg_value_chain (node
, aggvals
);
4556 /* Write all aggregate replacement for nodes in set. */
4559 ipa_prop_write_all_agg_replacement (void)
4561 struct cgraph_node
*node
;
4562 struct output_block
*ob
;
4563 unsigned int count
= 0;
4564 lto_symtab_encoder_iterator lsei
;
4565 lto_symtab_encoder_t encoder
;
4567 if (!ipa_node_agg_replacements
)
4570 ob
= create_output_block (LTO_section_ipcp_transform
);
4571 encoder
= ob
->decl_state
->symtab_node_encoder
;
4572 ob
->cgraph_node
= NULL
;
4573 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4574 lsei_next_function_in_partition (&lsei
))
4576 node
= lsei_cgraph_node (lsei
);
4577 if (cgraph_function_with_gimple_body_p (node
)
4578 && ipa_get_agg_replacements_for_node (node
) != NULL
)
4582 streamer_write_uhwi (ob
, count
);
4584 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4585 lsei_next_function_in_partition (&lsei
))
4587 node
= lsei_cgraph_node (lsei
);
4588 if (cgraph_function_with_gimple_body_p (node
)
4589 && ipa_get_agg_replacements_for_node (node
) != NULL
)
4590 write_agg_replacement_chain (ob
, node
);
4592 streamer_write_char_stream (ob
->main_stream
, 0);
4593 produce_asm (ob
, NULL
);
4594 destroy_output_block (ob
);
4597 /* Read replacements section in file FILE_DATA of length LEN with data
4601 read_replacements_section (struct lto_file_decl_data
*file_data
,
4605 const struct lto_function_header
*header
=
4606 (const struct lto_function_header
*) data
;
4607 const int cfg_offset
= sizeof (struct lto_function_header
);
4608 const int main_offset
= cfg_offset
+ header
->cfg_size
;
4609 const int string_offset
= main_offset
+ header
->main_size
;
4610 struct data_in
*data_in
;
4611 struct lto_input_block ib_main
;
4615 LTO_INIT_INPUT_BLOCK (ib_main
, (const char *) data
+ main_offset
, 0,
4618 data_in
= lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
4619 header
->string_size
, vNULL
);
4620 count
= streamer_read_uhwi (&ib_main
);
4622 for (i
= 0; i
< count
; i
++)
4625 struct cgraph_node
*node
;
4626 lto_symtab_encoder_t encoder
;
4628 index
= streamer_read_uhwi (&ib_main
);
4629 encoder
= file_data
->symtab_node_encoder
;
4630 node
= cgraph (lto_symtab_encoder_deref (encoder
, index
));
4631 gcc_assert (node
->definition
);
4632 read_agg_replacement_chain (&ib_main
, node
, data_in
);
4634 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
4636 lto_data_in_delete (data_in
);
4639 /* Read IPA-CP aggregate replacements. */
4642 ipa_prop_read_all_agg_replacement (void)
4644 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
4645 struct lto_file_decl_data
*file_data
;
4648 while ((file_data
= file_data_vec
[j
++]))
4651 const char *data
= lto_get_section_data (file_data
,
4652 LTO_section_ipcp_transform
,
4655 read_replacements_section (file_data
, data
, len
);
4659 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4663 adjust_agg_replacement_values (struct cgraph_node
*node
,
4664 struct ipa_agg_replacement_value
*aggval
)
4666 struct ipa_agg_replacement_value
*v
;
4667 int i
, c
= 0, d
= 0, *adj
;
4669 if (!node
->clone
.combined_args_to_skip
)
4672 for (v
= aggval
; v
; v
= v
->next
)
4674 gcc_assert (v
->index
>= 0);
4680 adj
= XALLOCAVEC (int, c
);
4681 for (i
= 0; i
< c
; i
++)
4682 if (bitmap_bit_p (node
->clone
.combined_args_to_skip
, i
))
4690 for (v
= aggval
; v
; v
= v
->next
)
4691 v
->index
= adj
[v
->index
];
4695 /* Function body transformation phase. */
4698 ipcp_transform_function (struct cgraph_node
*node
)
4700 vec
<ipa_param_descriptor
> descriptors
= vNULL
;
4701 struct param_analysis_info
*parms_ainfo
;
4702 struct ipa_agg_replacement_value
*aggval
;
4703 gimple_stmt_iterator gsi
;
4706 bool cfg_changed
= false, something_changed
= false;
4708 gcc_checking_assert (cfun
);
4709 gcc_checking_assert (current_function_decl
);
4712 fprintf (dump_file
, "Modification phase of node %s/%i\n",
4713 node
->name (), node
->order
);
4715 aggval
= ipa_get_agg_replacements_for_node (node
);
4718 param_count
= count_formal_params (node
->decl
);
4719 if (param_count
== 0)
4721 adjust_agg_replacement_values (node
, aggval
);
4723 ipa_dump_agg_replacement_values (dump_file
, aggval
);
4724 parms_ainfo
= XALLOCAVEC (struct param_analysis_info
, param_count
);
4725 memset (parms_ainfo
, 0, sizeof (struct param_analysis_info
) * param_count
);
4726 descriptors
.safe_grow_cleared (param_count
);
4727 ipa_populate_param_decls (node
, descriptors
);
4729 FOR_EACH_BB_FN (bb
, cfun
)
4730 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4732 struct ipa_agg_replacement_value
*v
;
4733 gimple stmt
= gsi_stmt (gsi
);
4735 HOST_WIDE_INT offset
, size
;
4739 if (!gimple_assign_load_p (stmt
))
4741 rhs
= gimple_assign_rhs1 (stmt
);
4742 if (!is_gimple_reg_type (TREE_TYPE (rhs
)))
4747 while (handled_component_p (t
))
4749 /* V_C_E can do things like convert an array of integers to one
4750 bigger integer and similar things we do not handle below. */
4751 if (TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
)
4756 t
= TREE_OPERAND (t
, 0);
4761 if (!ipa_load_from_parm_agg_1 (descriptors
, parms_ainfo
, stmt
,
4762 rhs
, &index
, &offset
, &size
, &by_ref
))
4764 for (v
= aggval
; v
; v
= v
->next
)
4765 if (v
->index
== index
4766 && v
->offset
== offset
)
4769 || v
->by_ref
!= by_ref
4770 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v
->value
))) != size
)
4773 gcc_checking_assert (is_gimple_ip_invariant (v
->value
));
4774 if (!useless_type_conversion_p (TREE_TYPE (rhs
), TREE_TYPE (v
->value
)))
4776 if (fold_convertible_p (TREE_TYPE (rhs
), v
->value
))
4777 val
= fold_build1 (NOP_EXPR
, TREE_TYPE (rhs
), v
->value
);
4778 else if (TYPE_SIZE (TREE_TYPE (rhs
))
4779 == TYPE_SIZE (TREE_TYPE (v
->value
)))
4780 val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (rhs
), v
->value
);
4785 fprintf (dump_file
, " const ");
4786 print_generic_expr (dump_file
, v
->value
, 0);
4787 fprintf (dump_file
, " can't be converted to type of ");
4788 print_generic_expr (dump_file
, rhs
, 0);
4789 fprintf (dump_file
, "\n");
4797 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4799 fprintf (dump_file
, "Modifying stmt:\n ");
4800 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4802 gimple_assign_set_rhs_from_tree (&gsi
, val
);
4805 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4807 fprintf (dump_file
, "into:\n ");
4808 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4809 fprintf (dump_file
, "\n");
4812 something_changed
= true;
4813 if (maybe_clean_eh_stmt (stmt
)
4814 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
4818 (*ipa_node_agg_replacements
)[node
->uid
] = NULL
;
4819 free_parms_ainfo (parms_ainfo
, param_count
);
4820 descriptors
.release ();
4822 if (!something_changed
)
4824 else if (cfg_changed
)
4825 return TODO_update_ssa_only_virtuals
| TODO_cleanup_cfg
;
4827 return TODO_update_ssa_only_virtuals
;