1 /* Interprocedural analyses.
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
24 #include "basic-block.h"
25 #include "tree-ssa-alias.h"
26 #include "internal-fn.h"
27 #include "gimple-fold.h"
29 #include "gimple-expr.h"
33 #include "stor-layout.h"
34 #include "print-tree.h"
36 #include "gimple-iterator.h"
37 #include "gimplify-me.h"
38 #include "gimple-walk.h"
39 #include "langhooks.h"
43 #include "gimple-ssa.h"
45 #include "tree-phinodes.h"
46 #include "ssa-iterators.h"
47 #include "tree-into-ssa.h"
49 #include "tree-pass.h"
50 #include "tree-inline.h"
51 #include "ipa-inline.h"
53 #include "diagnostic.h"
54 #include "gimple-pretty-print.h"
55 #include "lto-streamer.h"
56 #include "data-streamer.h"
57 #include "tree-streamer.h"
59 #include "ipa-utils.h"
61 /* Intermediate information about a parameter that is only useful during the
62 run of ipa_analyze_node and is not kept afterwards. */
64 struct param_analysis_info
66 bool parm_modified
, ref_modified
, pt_modified
;
67 bitmap parm_visited_statements
, pt_visited_statements
;
70 /* Vector where the parameter infos are actually stored. */
71 vec
<ipa_node_params
> ipa_node_params_vector
;
72 /* Vector of known aggregate values in cloned nodes. */
73 vec
<ipa_agg_replacement_value_p
, va_gc
> *ipa_node_agg_replacements
;
74 /* Vector where the parameter infos are actually stored. */
75 vec
<ipa_edge_args
, va_gc
> *ipa_edge_args_vector
;
77 /* Holders of ipa cgraph hooks: */
78 static struct cgraph_edge_hook_list
*edge_removal_hook_holder
;
79 static struct cgraph_node_hook_list
*node_removal_hook_holder
;
80 static struct cgraph_2edge_hook_list
*edge_duplication_hook_holder
;
81 static struct cgraph_2node_hook_list
*node_duplication_hook_holder
;
82 static struct cgraph_node_hook_list
*function_insertion_hook_holder
;
84 /* Description of a reference to an IPA constant. */
85 struct ipa_cst_ref_desc
87 /* Edge that corresponds to the statement which took the reference. */
88 struct cgraph_edge
*cs
;
89 /* Linked list of duplicates created when call graph edges are cloned. */
90 struct ipa_cst_ref_desc
*next_duplicate
;
91 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
96 /* Allocation pool for reference descriptions. */
98 static alloc_pool ipa_refdesc_pool
;
100 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
101 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
104 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node
*node
)
106 tree fs_opts
= DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node
->decl
);
107 struct cl_optimization
*os
;
111 os
= TREE_OPTIMIZATION (fs_opts
);
112 return !os
->x_optimize
|| !os
->x_flag_ipa_cp
;
115 /* Return index of the formal whose tree is PTREE in function which corresponds
119 ipa_get_param_decl_index_1 (vec
<ipa_param_descriptor
> descriptors
, tree ptree
)
123 count
= descriptors
.length ();
124 for (i
= 0; i
< count
; i
++)
125 if (descriptors
[i
].decl
== ptree
)
131 /* Return index of the formal whose tree is PTREE in function which corresponds
135 ipa_get_param_decl_index (struct ipa_node_params
*info
, tree ptree
)
137 return ipa_get_param_decl_index_1 (info
->descriptors
, ptree
);
140 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
144 ipa_populate_param_decls (struct cgraph_node
*node
,
145 vec
<ipa_param_descriptor
> &descriptors
)
153 gcc_assert (gimple_has_body_p (fndecl
));
154 fnargs
= DECL_ARGUMENTS (fndecl
);
156 for (parm
= fnargs
; parm
; parm
= DECL_CHAIN (parm
))
158 descriptors
[param_num
].decl
= parm
;
159 descriptors
[param_num
].move_cost
= estimate_move_cost (TREE_TYPE (parm
));
164 /* Return how many formal parameters FNDECL has. */
167 count_formal_params (tree fndecl
)
171 gcc_assert (gimple_has_body_p (fndecl
));
173 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
179 /* Return the declaration of Ith formal parameter of the function corresponding
180 to INFO. Note there is no setter function as this array is built just once
181 using ipa_initialize_node_params. */
184 ipa_dump_param (FILE *file
, struct ipa_node_params
*info
, int i
)
186 fprintf (file
, "param #%i", i
);
187 if (info
->descriptors
[i
].decl
)
190 print_generic_expr (file
, info
->descriptors
[i
].decl
, 0);
194 /* Initialize the ipa_node_params structure associated with NODE
195 to hold PARAM_COUNT parameters. */
198 ipa_alloc_node_params (struct cgraph_node
*node
, int param_count
)
200 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
202 if (!info
->descriptors
.exists () && param_count
)
203 info
->descriptors
.safe_grow_cleared (param_count
);
206 /* Initialize the ipa_node_params structure associated with NODE by counting
207 the function parameters, creating the descriptors and populating their
211 ipa_initialize_node_params (struct cgraph_node
*node
)
213 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
215 if (!info
->descriptors
.exists ())
217 ipa_alloc_node_params (node
, count_formal_params (node
->decl
));
218 ipa_populate_param_decls (node
, info
->descriptors
);
222 /* Print the jump functions associated with call graph edge CS to file F. */
225 ipa_print_node_jump_functions_for_edge (FILE *f
, struct cgraph_edge
*cs
)
229 count
= ipa_get_cs_argument_count (IPA_EDGE_REF (cs
));
230 for (i
= 0; i
< count
; i
++)
232 struct ipa_jump_func
*jump_func
;
233 enum jump_func_type type
;
235 jump_func
= ipa_get_ith_jump_func (IPA_EDGE_REF (cs
), i
);
236 type
= jump_func
->type
;
238 fprintf (f
, " param %d: ", i
);
239 if (type
== IPA_JF_UNKNOWN
)
240 fprintf (f
, "UNKNOWN\n");
241 else if (type
== IPA_JF_KNOWN_TYPE
)
243 fprintf (f
, "KNOWN TYPE: base ");
244 print_generic_expr (f
, jump_func
->value
.known_type
.base_type
, 0);
245 fprintf (f
, ", offset "HOST_WIDE_INT_PRINT_DEC
", component ",
246 jump_func
->value
.known_type
.offset
);
247 print_generic_expr (f
, jump_func
->value
.known_type
.component_type
, 0);
250 else if (type
== IPA_JF_CONST
)
252 tree val
= jump_func
->value
.constant
.value
;
253 fprintf (f
, "CONST: ");
254 print_generic_expr (f
, val
, 0);
255 if (TREE_CODE (val
) == ADDR_EXPR
256 && TREE_CODE (TREE_OPERAND (val
, 0)) == CONST_DECL
)
259 print_generic_expr (f
, DECL_INITIAL (TREE_OPERAND (val
, 0)),
264 else if (type
== IPA_JF_PASS_THROUGH
)
266 fprintf (f
, "PASS THROUGH: ");
267 fprintf (f
, "%d, op %s",
268 jump_func
->value
.pass_through
.formal_id
,
269 get_tree_code_name(jump_func
->value
.pass_through
.operation
));
270 if (jump_func
->value
.pass_through
.operation
!= NOP_EXPR
)
273 print_generic_expr (f
,
274 jump_func
->value
.pass_through
.operand
, 0);
276 if (jump_func
->value
.pass_through
.agg_preserved
)
277 fprintf (f
, ", agg_preserved");
278 if (jump_func
->value
.pass_through
.type_preserved
)
279 fprintf (f
, ", type_preserved");
282 else if (type
== IPA_JF_ANCESTOR
)
284 fprintf (f
, "ANCESTOR: ");
285 fprintf (f
, "%d, offset "HOST_WIDE_INT_PRINT_DEC
", ",
286 jump_func
->value
.ancestor
.formal_id
,
287 jump_func
->value
.ancestor
.offset
);
288 print_generic_expr (f
, jump_func
->value
.ancestor
.type
, 0);
289 if (jump_func
->value
.ancestor
.agg_preserved
)
290 fprintf (f
, ", agg_preserved");
291 if (jump_func
->value
.ancestor
.type_preserved
)
292 fprintf (f
, ", type_preserved");
296 if (jump_func
->agg
.items
)
298 struct ipa_agg_jf_item
*item
;
301 fprintf (f
, " Aggregate passed by %s:\n",
302 jump_func
->agg
.by_ref
? "reference" : "value");
303 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, j
, item
)
305 fprintf (f
, " offset: " HOST_WIDE_INT_PRINT_DEC
", ",
307 if (TYPE_P (item
->value
))
308 fprintf (f
, "clobber of " HOST_WIDE_INT_PRINT_DEC
" bits",
309 tree_to_uhwi (TYPE_SIZE (item
->value
)));
312 fprintf (f
, "cst: ");
313 print_generic_expr (f
, item
->value
, 0);
322 /* Print the jump functions of all arguments on all call graph edges going from
326 ipa_print_node_jump_functions (FILE *f
, struct cgraph_node
*node
)
328 struct cgraph_edge
*cs
;
330 fprintf (f
, " Jump functions of caller %s/%i:\n", node
->name (),
332 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
334 if (!ipa_edge_args_info_available_for_edge_p (cs
))
337 fprintf (f
, " callsite %s/%i -> %s/%i : \n",
338 xstrdup (node
->name ()), node
->order
,
339 xstrdup (cs
->callee
->name ()),
341 ipa_print_node_jump_functions_for_edge (f
, cs
);
344 for (cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
346 struct cgraph_indirect_call_info
*ii
;
347 if (!ipa_edge_args_info_available_for_edge_p (cs
))
350 ii
= cs
->indirect_info
;
351 if (ii
->agg_contents
)
352 fprintf (f
, " indirect %s callsite, calling param %i, "
353 "offset " HOST_WIDE_INT_PRINT_DEC
", %s",
354 ii
->member_ptr
? "member ptr" : "aggregate",
355 ii
->param_index
, ii
->offset
,
356 ii
->by_ref
? "by reference" : "by_value");
358 fprintf (f
, " indirect %s callsite, calling param %i",
359 ii
->polymorphic
? "polymorphic" : "simple", ii
->param_index
);
363 fprintf (f
, ", for stmt ");
364 print_gimple_stmt (f
, cs
->call_stmt
, 0, TDF_SLIM
);
368 ipa_print_node_jump_functions_for_edge (f
, cs
);
372 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
375 ipa_print_all_jump_functions (FILE *f
)
377 struct cgraph_node
*node
;
379 fprintf (f
, "\nJump functions:\n");
380 FOR_EACH_FUNCTION (node
)
382 ipa_print_node_jump_functions (f
, node
);
386 /* Set JFUNC to be a known type jump function. */
389 ipa_set_jf_known_type (struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
,
390 tree base_type
, tree component_type
)
392 gcc_assert (TREE_CODE (component_type
) == RECORD_TYPE
393 && TYPE_BINFO (component_type
));
394 jfunc
->type
= IPA_JF_KNOWN_TYPE
;
395 jfunc
->value
.known_type
.offset
= offset
,
396 jfunc
->value
.known_type
.base_type
= base_type
;
397 jfunc
->value
.known_type
.component_type
= component_type
;
398 gcc_assert (component_type
);
401 /* Set JFUNC to be a copy of another jmp (to be used by jump function
402 combination code). The two functions will share their rdesc. */
405 ipa_set_jf_cst_copy (struct ipa_jump_func
*dst
,
406 struct ipa_jump_func
*src
)
409 gcc_checking_assert (src
->type
== IPA_JF_CONST
);
410 dst
->type
= IPA_JF_CONST
;
411 dst
->value
.constant
= src
->value
.constant
;
414 /* Set JFUNC to be a constant jmp function. */
417 ipa_set_jf_constant (struct ipa_jump_func
*jfunc
, tree constant
,
418 struct cgraph_edge
*cs
)
420 constant
= unshare_expr (constant
);
421 if (constant
&& EXPR_P (constant
))
422 SET_EXPR_LOCATION (constant
, UNKNOWN_LOCATION
);
423 jfunc
->type
= IPA_JF_CONST
;
424 jfunc
->value
.constant
.value
= unshare_expr_without_location (constant
);
426 if (TREE_CODE (constant
) == ADDR_EXPR
427 && TREE_CODE (TREE_OPERAND (constant
, 0)) == FUNCTION_DECL
)
429 struct ipa_cst_ref_desc
*rdesc
;
430 if (!ipa_refdesc_pool
)
431 ipa_refdesc_pool
= create_alloc_pool ("IPA-PROP ref descriptions",
432 sizeof (struct ipa_cst_ref_desc
), 32);
434 rdesc
= (struct ipa_cst_ref_desc
*) pool_alloc (ipa_refdesc_pool
);
436 rdesc
->next_duplicate
= NULL
;
438 jfunc
->value
.constant
.rdesc
= rdesc
;
441 jfunc
->value
.constant
.rdesc
= NULL
;
444 /* Set JFUNC to be a simple pass-through jump function. */
446 ipa_set_jf_simple_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
447 bool agg_preserved
, bool type_preserved
)
449 jfunc
->type
= IPA_JF_PASS_THROUGH
;
450 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
451 jfunc
->value
.pass_through
.formal_id
= formal_id
;
452 jfunc
->value
.pass_through
.operation
= NOP_EXPR
;
453 jfunc
->value
.pass_through
.agg_preserved
= agg_preserved
;
454 jfunc
->value
.pass_through
.type_preserved
= type_preserved
;
457 /* Set JFUNC to be an arithmetic pass through jump function. */
460 ipa_set_jf_arith_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
461 tree operand
, enum tree_code operation
)
463 jfunc
->type
= IPA_JF_PASS_THROUGH
;
464 jfunc
->value
.pass_through
.operand
= unshare_expr_without_location (operand
);
465 jfunc
->value
.pass_through
.formal_id
= formal_id
;
466 jfunc
->value
.pass_through
.operation
= operation
;
467 jfunc
->value
.pass_through
.agg_preserved
= false;
468 jfunc
->value
.pass_through
.type_preserved
= false;
471 /* Set JFUNC to be an ancestor jump function. */
474 ipa_set_ancestor_jf (struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
,
475 tree type
, int formal_id
, bool agg_preserved
,
478 jfunc
->type
= IPA_JF_ANCESTOR
;
479 jfunc
->value
.ancestor
.formal_id
= formal_id
;
480 jfunc
->value
.ancestor
.offset
= offset
;
481 jfunc
->value
.ancestor
.type
= type
;
482 jfunc
->value
.ancestor
.agg_preserved
= agg_preserved
;
483 jfunc
->value
.ancestor
.type_preserved
= type_preserved
;
486 /* Extract the acual BINFO being described by JFUNC which must be a known type
490 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func
*jfunc
)
492 tree base_binfo
= TYPE_BINFO (jfunc
->value
.known_type
.base_type
);
495 return get_binfo_at_offset (base_binfo
,
496 jfunc
->value
.known_type
.offset
,
497 jfunc
->value
.known_type
.component_type
);
500 /* Structure to be passed in between detect_type_change and
501 check_stmt_for_type_change. */
503 struct type_change_info
505 /* Offset into the object where there is the virtual method pointer we are
507 HOST_WIDE_INT offset
;
508 /* The declaration or SSA_NAME pointer of the base that we are checking for
511 /* If we actually can tell the type that the object has changed to, it is
512 stored in this field. Otherwise it remains NULL_TREE. */
513 tree known_current_type
;
514 /* Set to true if dynamic type change has been detected. */
515 bool type_maybe_changed
;
516 /* Set to true if multiple types have been encountered. known_current_type
517 must be disregarded in that case. */
518 bool multiple_types_encountered
;
521 /* Return true if STMT can modify a virtual method table pointer.
523 This function makes special assumptions about both constructors and
524 destructors which are all the functions that are allowed to alter the VMT
525 pointers. It assumes that destructors begin with assignment into all VMT
526 pointers and that constructors essentially look in the following way:
528 1) The very first thing they do is that they call constructors of ancestor
529 sub-objects that have them.
531 2) Then VMT pointers of this and all its ancestors is set to new values
532 corresponding to the type corresponding to the constructor.
534 3) Only afterwards, other stuff such as constructor of member sub-objects
535 and the code written by the user is run. Only this may include calling
536 virtual functions, directly or indirectly.
538 There is no way to call a constructor of an ancestor sub-object in any
541 This means that we do not have to care whether constructors get the correct
542 type information because they will always change it (in fact, if we define
543 the type to be given by the VMT pointer, it is undefined).
545 The most important fact to derive from the above is that if, for some
546 statement in the section 3, we try to detect whether the dynamic type has
547 changed, we can safely ignore all calls as we examine the function body
548 backwards until we reach statements in section 2 because these calls cannot
549 be ancestor constructors or destructors (if the input is not bogus) and so
550 do not change the dynamic type (this holds true only for automatically
551 allocated objects but at the moment we devirtualize only these). We then
552 must detect that statements in section 2 change the dynamic type and can try
553 to derive the new type. That is enough and we can stop, we will never see
554 the calls into constructors of sub-objects in this code. Therefore we can
555 safely ignore all call statements that we traverse.
559 stmt_may_be_vtbl_ptr_store (gimple stmt
)
561 if (is_gimple_call (stmt
))
563 else if (gimple_clobber_p (stmt
))
565 else if (is_gimple_assign (stmt
))
567 tree lhs
= gimple_assign_lhs (stmt
);
569 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs
)))
571 if (flag_strict_aliasing
572 && !POINTER_TYPE_P (TREE_TYPE (lhs
)))
575 if (TREE_CODE (lhs
) == COMPONENT_REF
576 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
578 /* In the future we might want to use get_base_ref_and_offset to find
579 if there is a field corresponding to the offset and if so, proceed
580 almost like if it was a component ref. */
586 /* If STMT can be proved to be an assignment to the virtual method table
587 pointer of ANALYZED_OBJ and the type associated with the new table
588 identified, return the type. Otherwise return NULL_TREE. */
591 extr_type_from_vtbl_ptr_store (gimple stmt
, struct type_change_info
*tci
)
593 HOST_WIDE_INT offset
, size
, max_size
;
596 if (!gimple_assign_single_p (stmt
))
599 lhs
= gimple_assign_lhs (stmt
);
600 rhs
= gimple_assign_rhs1 (stmt
);
601 if (TREE_CODE (lhs
) != COMPONENT_REF
602 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1))
603 || TREE_CODE (rhs
) != ADDR_EXPR
)
605 rhs
= get_base_address (TREE_OPERAND (rhs
, 0));
607 || TREE_CODE (rhs
) != VAR_DECL
608 || !DECL_VIRTUAL_P (rhs
))
611 base
= get_ref_base_and_extent (lhs
, &offset
, &size
, &max_size
);
612 if (offset
!= tci
->offset
613 || size
!= POINTER_SIZE
614 || max_size
!= POINTER_SIZE
)
616 if (TREE_CODE (base
) == MEM_REF
)
618 if (TREE_CODE (tci
->object
) != MEM_REF
619 || TREE_OPERAND (tci
->object
, 0) != TREE_OPERAND (base
, 0)
620 || !tree_int_cst_equal (TREE_OPERAND (tci
->object
, 1),
621 TREE_OPERAND (base
, 1)))
624 else if (tci
->object
!= base
)
627 return DECL_CONTEXT (rhs
);
630 /* Callback of walk_aliased_vdefs and a helper function for
631 detect_type_change to check whether a particular statement may modify
632 the virtual table pointer, and if possible also determine the new type of
633 the (sub-)object. It stores its result into DATA, which points to a
634 type_change_info structure. */
637 check_stmt_for_type_change (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef
, void *data
)
639 gimple stmt
= SSA_NAME_DEF_STMT (vdef
);
640 struct type_change_info
*tci
= (struct type_change_info
*) data
;
642 if (stmt_may_be_vtbl_ptr_store (stmt
))
645 type
= extr_type_from_vtbl_ptr_store (stmt
, tci
);
646 if (tci
->type_maybe_changed
647 && type
!= tci
->known_current_type
)
648 tci
->multiple_types_encountered
= true;
649 tci
->known_current_type
= type
;
650 tci
->type_maybe_changed
= true;
659 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
660 callsite CALL) by looking for assignments to its virtual table pointer. If
661 it is, return true and fill in the jump function JFUNC with relevant type
662 information or set it to unknown. ARG is the object itself (not a pointer
663 to it, unless dereferenced). BASE is the base of the memory access as
664 returned by get_ref_base_and_extent, as is the offset. */
667 detect_type_change (tree arg
, tree base
, tree comp_type
, gimple call
,
668 struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
)
670 struct type_change_info tci
;
673 gcc_checking_assert (DECL_P (arg
)
674 || TREE_CODE (arg
) == MEM_REF
675 || handled_component_p (arg
));
676 /* Const calls cannot call virtual methods through VMT and so type changes do
678 if (!flag_devirtualize
|| !gimple_vuse (call
)
679 /* Be sure expected_type is polymorphic. */
681 || TREE_CODE (comp_type
) != RECORD_TYPE
682 || !TYPE_BINFO (comp_type
)
683 || !BINFO_VTABLE (TYPE_BINFO (comp_type
)))
686 ao_ref_init (&ao
, arg
);
689 ao
.size
= POINTER_SIZE
;
690 ao
.max_size
= ao
.size
;
693 tci
.object
= get_base_address (arg
);
694 tci
.known_current_type
= NULL_TREE
;
695 tci
.type_maybe_changed
= false;
696 tci
.multiple_types_encountered
= false;
698 walk_aliased_vdefs (&ao
, gimple_vuse (call
), check_stmt_for_type_change
,
700 if (!tci
.type_maybe_changed
)
703 if (!tci
.known_current_type
704 || tci
.multiple_types_encountered
706 jfunc
->type
= IPA_JF_UNKNOWN
;
708 ipa_set_jf_known_type (jfunc
, 0, tci
.known_current_type
, comp_type
);
713 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
714 SSA name (its dereference will become the base and the offset is assumed to
718 detect_type_change_ssa (tree arg
, tree comp_type
,
719 gimple call
, struct ipa_jump_func
*jfunc
)
721 gcc_checking_assert (TREE_CODE (arg
) == SSA_NAME
);
722 if (!flag_devirtualize
723 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
726 arg
= build2 (MEM_REF
, ptr_type_node
, arg
,
727 build_int_cst (ptr_type_node
, 0));
729 return detect_type_change (arg
, arg
, comp_type
, call
, jfunc
, 0);
732 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
733 boolean variable pointed to by DATA. */
736 mark_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
739 bool *b
= (bool *) data
;
744 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
745 a value known not to be modified in this function before reaching the
746 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
747 information about the parameter. */
750 parm_preserved_before_stmt_p (struct param_analysis_info
*parm_ainfo
,
751 gimple stmt
, tree parm_load
)
753 bool modified
= false;
754 bitmap
*visited_stmts
;
757 if (parm_ainfo
&& parm_ainfo
->parm_modified
)
760 gcc_checking_assert (gimple_vuse (stmt
) != NULL_TREE
);
761 ao_ref_init (&refd
, parm_load
);
762 /* We can cache visited statements only when parm_ainfo is available and when
763 we are looking at a naked load of the whole parameter. */
764 if (!parm_ainfo
|| TREE_CODE (parm_load
) != PARM_DECL
)
765 visited_stmts
= NULL
;
767 visited_stmts
= &parm_ainfo
->parm_visited_statements
;
768 walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
, &modified
,
770 if (parm_ainfo
&& modified
)
771 parm_ainfo
->parm_modified
= true;
775 /* If STMT is an assignment that loads a value from an parameter declaration,
776 return the index of the parameter in ipa_node_params which has not been
777 modified. Otherwise return -1. */
780 load_from_unmodified_param (vec
<ipa_param_descriptor
> descriptors
,
781 struct param_analysis_info
*parms_ainfo
,
787 if (!gimple_assign_single_p (stmt
))
790 op1
= gimple_assign_rhs1 (stmt
);
791 if (TREE_CODE (op1
) != PARM_DECL
)
794 index
= ipa_get_param_decl_index_1 (descriptors
, op1
);
796 || !parm_preserved_before_stmt_p (parms_ainfo
? &parms_ainfo
[index
]
803 /* Return true if memory reference REF loads data that are known to be
804 unmodified in this function before reaching statement STMT. PARM_AINFO, if
805 non-NULL, is a pointer to a structure containing temporary information about
809 parm_ref_data_preserved_p (struct param_analysis_info
*parm_ainfo
,
810 gimple stmt
, tree ref
)
812 bool modified
= false;
815 gcc_checking_assert (gimple_vuse (stmt
));
816 if (parm_ainfo
&& parm_ainfo
->ref_modified
)
819 ao_ref_init (&refd
, ref
);
820 walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
, &modified
,
822 if (parm_ainfo
&& modified
)
823 parm_ainfo
->ref_modified
= true;
827 /* Return true if the data pointed to by PARM is known to be unmodified in this
828 function before reaching call statement CALL into which it is passed.
829 PARM_AINFO is a pointer to a structure containing temporary information
833 parm_ref_data_pass_through_p (struct param_analysis_info
*parm_ainfo
,
834 gimple call
, tree parm
)
836 bool modified
= false;
839 /* It's unnecessary to calculate anything about memory contnets for a const
840 function because it is not goin to use it. But do not cache the result
841 either. Also, no such calculations for non-pointers. */
842 if (!gimple_vuse (call
)
843 || !POINTER_TYPE_P (TREE_TYPE (parm
)))
846 if (parm_ainfo
->pt_modified
)
849 ao_ref_init_from_ptr_and_size (&refd
, parm
, NULL_TREE
);
850 walk_aliased_vdefs (&refd
, gimple_vuse (call
), mark_modified
, &modified
,
851 parm_ainfo
? &parm_ainfo
->pt_visited_statements
: NULL
);
853 parm_ainfo
->pt_modified
= true;
857 /* Return true if we can prove that OP is a memory reference loading unmodified
858 data from an aggregate passed as a parameter and if the aggregate is passed
859 by reference, that the alias type of the load corresponds to the type of the
860 formal parameter (so that we can rely on this type for TBAA in callers).
861 INFO and PARMS_AINFO describe parameters of the current function (but the
862 latter can be NULL), STMT is the load statement. If function returns true,
863 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
864 within the aggregate and whether it is a load from a value passed by
865 reference respectively. */
868 ipa_load_from_parm_agg_1 (vec
<ipa_param_descriptor
> descriptors
,
869 struct param_analysis_info
*parms_ainfo
, gimple stmt
,
870 tree op
, int *index_p
, HOST_WIDE_INT
*offset_p
,
871 HOST_WIDE_INT
*size_p
, bool *by_ref_p
)
874 HOST_WIDE_INT size
, max_size
;
875 tree base
= get_ref_base_and_extent (op
, offset_p
, &size
, &max_size
);
877 if (max_size
== -1 || max_size
!= size
|| *offset_p
< 0)
882 int index
= ipa_get_param_decl_index_1 (descriptors
, base
);
884 && parm_preserved_before_stmt_p (parms_ainfo
? &parms_ainfo
[index
]
896 if (TREE_CODE (base
) != MEM_REF
897 || TREE_CODE (TREE_OPERAND (base
, 0)) != SSA_NAME
898 || !integer_zerop (TREE_OPERAND (base
, 1)))
901 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base
, 0)))
903 tree parm
= SSA_NAME_VAR (TREE_OPERAND (base
, 0));
904 index
= ipa_get_param_decl_index_1 (descriptors
, parm
);
908 /* This branch catches situations where a pointer parameter is not a
909 gimple register, for example:
911 void hip7(S*) (struct S * p)
913 void (*<T2e4>) (struct S *) D.1867;
923 gimple def
= SSA_NAME_DEF_STMT (TREE_OPERAND (base
, 0));
924 index
= load_from_unmodified_param (descriptors
, parms_ainfo
, def
);
928 && parm_ref_data_preserved_p (parms_ainfo
? &parms_ainfo
[index
] : NULL
,
940 /* Just like the previous function, just without the param_analysis_info
941 pointer, for users outside of this file. */
944 ipa_load_from_parm_agg (struct ipa_node_params
*info
, gimple stmt
,
945 tree op
, int *index_p
, HOST_WIDE_INT
*offset_p
,
948 return ipa_load_from_parm_agg_1 (info
->descriptors
, NULL
, stmt
, op
, index_p
,
949 offset_p
, NULL
, by_ref_p
);
952 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
953 of an assignment statement STMT, try to determine whether we are actually
954 handling any of the following cases and construct an appropriate jump
955 function into JFUNC if so:
957 1) The passed value is loaded from a formal parameter which is not a gimple
958 register (most probably because it is addressable, the value has to be
959 scalar) and we can guarantee the value has not changed. This case can
960 therefore be described by a simple pass-through jump function. For example:
969 2) The passed value can be described by a simple arithmetic pass-through
976 D.2064_4 = a.1(D) + 4;
979 This case can also occur in combination of the previous one, e.g.:
987 D.2064_4 = a.0_3 + 4;
990 3) The passed value is an address of an object within another one (which
991 also passed by reference). Such situations are described by an ancestor
992 jump function and describe situations such as:
994 B::foo() (struct B * const this)
998 D.1845_2 = &this_1(D)->D.1748;
1001 INFO is the structure describing individual parameters access different
1002 stages of IPA optimizations. PARMS_AINFO contains the information that is
1003 only needed for intraprocedural analysis. */
1006 compute_complex_assign_jump_func (struct ipa_node_params
*info
,
1007 struct param_analysis_info
*parms_ainfo
,
1008 struct ipa_jump_func
*jfunc
,
1009 gimple call
, gimple stmt
, tree name
,
1012 HOST_WIDE_INT offset
, size
, max_size
;
1013 tree op1
, tc_ssa
, base
, ssa
;
1016 op1
= gimple_assign_rhs1 (stmt
);
1018 if (TREE_CODE (op1
) == SSA_NAME
)
1020 if (SSA_NAME_IS_DEFAULT_DEF (op1
))
1021 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (op1
));
1023 index
= load_from_unmodified_param (info
->descriptors
, parms_ainfo
,
1024 SSA_NAME_DEF_STMT (op1
));
1029 index
= load_from_unmodified_param (info
->descriptors
, parms_ainfo
, stmt
);
1030 tc_ssa
= gimple_assign_lhs (stmt
);
1035 tree op2
= gimple_assign_rhs2 (stmt
);
1039 if (!is_gimple_ip_invariant (op2
)
1040 || (TREE_CODE_CLASS (gimple_expr_code (stmt
)) != tcc_comparison
1041 && !useless_type_conversion_p (TREE_TYPE (name
),
1045 ipa_set_jf_arith_pass_through (jfunc
, index
, op2
,
1046 gimple_assign_rhs_code (stmt
));
1048 else if (gimple_assign_single_p (stmt
))
1050 bool agg_p
= parm_ref_data_pass_through_p (&parms_ainfo
[index
],
1052 bool type_p
= false;
1054 if (param_type
&& POINTER_TYPE_P (param_type
))
1055 type_p
= !detect_type_change_ssa (tc_ssa
, TREE_TYPE (param_type
),
1057 if (type_p
|| jfunc
->type
== IPA_JF_UNKNOWN
)
1058 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
, type_p
);
1063 if (TREE_CODE (op1
) != ADDR_EXPR
)
1065 op1
= TREE_OPERAND (op1
, 0);
1066 if (TREE_CODE (TREE_TYPE (op1
)) != RECORD_TYPE
)
1068 base
= get_ref_base_and_extent (op1
, &offset
, &size
, &max_size
);
1069 if (TREE_CODE (base
) != MEM_REF
1070 /* If this is a varying address, punt. */
1072 || max_size
!= size
)
1074 offset
+= mem_ref_offset (base
).low
* BITS_PER_UNIT
;
1075 ssa
= TREE_OPERAND (base
, 0);
1076 if (TREE_CODE (ssa
) != SSA_NAME
1077 || !SSA_NAME_IS_DEFAULT_DEF (ssa
)
1081 /* Dynamic types are changed in constructors and destructors. */
1082 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (ssa
));
1083 if (index
>= 0 && param_type
&& POINTER_TYPE_P (param_type
))
1085 bool type_p
= !detect_type_change (op1
, base
, TREE_TYPE (param_type
),
1086 call
, jfunc
, offset
);
1087 if (type_p
|| jfunc
->type
== IPA_JF_UNKNOWN
)
1088 ipa_set_ancestor_jf (jfunc
, offset
, TREE_TYPE (op1
), index
,
1089 parm_ref_data_pass_through_p (&parms_ainfo
[index
],
1090 call
, ssa
), type_p
);
1094 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1097 iftmp.1_3 = &obj_2(D)->D.1762;
1099 The base of the MEM_REF must be a default definition SSA NAME of a
1100 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1101 whole MEM_REF expression is returned and the offset calculated from any
1102 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1103 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1106 get_ancestor_addr_info (gimple assign
, tree
*obj_p
, HOST_WIDE_INT
*offset
)
1108 HOST_WIDE_INT size
, max_size
;
1109 tree expr
, parm
, obj
;
1111 if (!gimple_assign_single_p (assign
))
1113 expr
= gimple_assign_rhs1 (assign
);
1115 if (TREE_CODE (expr
) != ADDR_EXPR
)
1117 expr
= TREE_OPERAND (expr
, 0);
1119 expr
= get_ref_base_and_extent (expr
, offset
, &size
, &max_size
);
1121 if (TREE_CODE (expr
) != MEM_REF
1122 /* If this is a varying address, punt. */
1127 parm
= TREE_OPERAND (expr
, 0);
1128 if (TREE_CODE (parm
) != SSA_NAME
1129 || !SSA_NAME_IS_DEFAULT_DEF (parm
)
1130 || TREE_CODE (SSA_NAME_VAR (parm
)) != PARM_DECL
)
1133 *offset
+= mem_ref_offset (expr
).low
* BITS_PER_UNIT
;
1139 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1140 statement PHI, try to find out whether NAME is in fact a
1141 multiple-inheritance typecast from a descendant into an ancestor of a formal
1142 parameter and thus can be described by an ancestor jump function and if so,
1143 write the appropriate function into JFUNC.
1145 Essentially we want to match the following pattern:
1153 iftmp.1_3 = &obj_2(D)->D.1762;
1156 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1157 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1161 compute_complex_ancestor_jump_func (struct ipa_node_params
*info
,
1162 struct param_analysis_info
*parms_ainfo
,
1163 struct ipa_jump_func
*jfunc
,
1164 gimple call
, gimple phi
, tree param_type
)
1166 HOST_WIDE_INT offset
;
1167 gimple assign
, cond
;
1168 basic_block phi_bb
, assign_bb
, cond_bb
;
1169 tree tmp
, parm
, expr
, obj
;
1172 if (gimple_phi_num_args (phi
) != 2)
1175 if (integer_zerop (PHI_ARG_DEF (phi
, 1)))
1176 tmp
= PHI_ARG_DEF (phi
, 0);
1177 else if (integer_zerop (PHI_ARG_DEF (phi
, 0)))
1178 tmp
= PHI_ARG_DEF (phi
, 1);
1181 if (TREE_CODE (tmp
) != SSA_NAME
1182 || SSA_NAME_IS_DEFAULT_DEF (tmp
)
1183 || !POINTER_TYPE_P (TREE_TYPE (tmp
))
1184 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp
))) != RECORD_TYPE
)
1187 assign
= SSA_NAME_DEF_STMT (tmp
);
1188 assign_bb
= gimple_bb (assign
);
1189 if (!single_pred_p (assign_bb
))
1191 expr
= get_ancestor_addr_info (assign
, &obj
, &offset
);
1194 parm
= TREE_OPERAND (expr
, 0);
1195 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (parm
));
1196 gcc_assert (index
>= 0);
1198 cond_bb
= single_pred (assign_bb
);
1199 cond
= last_stmt (cond_bb
);
1201 || gimple_code (cond
) != GIMPLE_COND
1202 || gimple_cond_code (cond
) != NE_EXPR
1203 || gimple_cond_lhs (cond
) != parm
1204 || !integer_zerop (gimple_cond_rhs (cond
)))
1207 phi_bb
= gimple_bb (phi
);
1208 for (i
= 0; i
< 2; i
++)
1210 basic_block pred
= EDGE_PRED (phi_bb
, i
)->src
;
1211 if (pred
!= assign_bb
&& pred
!= cond_bb
)
1215 bool type_p
= false;
1216 if (param_type
&& POINTER_TYPE_P (param_type
))
1217 type_p
= !detect_type_change (obj
, expr
, TREE_TYPE (param_type
),
1218 call
, jfunc
, offset
);
1219 if (type_p
|| jfunc
->type
== IPA_JF_UNKNOWN
)
1220 ipa_set_ancestor_jf (jfunc
, offset
, TREE_TYPE (obj
), index
,
1221 parm_ref_data_pass_through_p (&parms_ainfo
[index
],
1222 call
, parm
), type_p
);
1225 /* Given OP which is passed as an actual argument to a called function,
1226 determine if it is possible to construct a KNOWN_TYPE jump function for it
1227 and if so, create one and store it to JFUNC.
1228 EXPECTED_TYPE represents a type the argument should be in */
1231 compute_known_type_jump_func (tree op
, struct ipa_jump_func
*jfunc
,
1232 gimple call
, tree expected_type
)
1234 HOST_WIDE_INT offset
, size
, max_size
;
1237 if (!flag_devirtualize
1238 || TREE_CODE (op
) != ADDR_EXPR
1239 || TREE_CODE (TREE_TYPE (TREE_TYPE (op
))) != RECORD_TYPE
1240 /* Be sure expected_type is polymorphic. */
1242 || TREE_CODE (expected_type
) != RECORD_TYPE
1243 || !TYPE_BINFO (expected_type
)
1244 || !BINFO_VTABLE (TYPE_BINFO (expected_type
)))
1247 op
= TREE_OPERAND (op
, 0);
1248 base
= get_ref_base_and_extent (op
, &offset
, &size
, &max_size
);
1252 || TREE_CODE (TREE_TYPE (base
)) != RECORD_TYPE
1253 || is_global_var (base
))
1256 if (detect_type_change (op
, base
, expected_type
, call
, jfunc
, offset
))
1259 ipa_set_jf_known_type (jfunc
, offset
, TREE_TYPE (base
),
1263 /* Inspect the given TYPE and return true iff it has the same structure (the
1264 same number of fields of the same types) as a C++ member pointer. If
1265 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1266 corresponding fields there. */
1269 type_like_member_ptr_p (tree type
, tree
*method_ptr
, tree
*delta
)
1273 if (TREE_CODE (type
) != RECORD_TYPE
)
1276 fld
= TYPE_FIELDS (type
);
1277 if (!fld
|| !POINTER_TYPE_P (TREE_TYPE (fld
))
1278 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld
))) != METHOD_TYPE
1279 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1285 fld
= DECL_CHAIN (fld
);
1286 if (!fld
|| INTEGRAL_TYPE_P (fld
)
1287 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1292 if (DECL_CHAIN (fld
))
1298 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1299 return the rhs of its defining statement. Otherwise return RHS as it
1303 get_ssa_def_if_simple_copy (tree rhs
)
1305 while (TREE_CODE (rhs
) == SSA_NAME
&& !SSA_NAME_IS_DEFAULT_DEF (rhs
))
1307 gimple def_stmt
= SSA_NAME_DEF_STMT (rhs
);
1309 if (gimple_assign_single_p (def_stmt
))
1310 rhs
= gimple_assign_rhs1 (def_stmt
);
1317 /* Simple linked list, describing known contents of an aggregate beforere
1320 struct ipa_known_agg_contents_list
1322 /* Offset and size of the described part of the aggregate. */
1323 HOST_WIDE_INT offset
, size
;
1324 /* Known constant value or NULL if the contents is known to be unknown. */
1326 /* Pointer to the next structure in the list. */
1327 struct ipa_known_agg_contents_list
*next
;
1330 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1331 in ARG is filled in with constant values. ARG can either be an aggregate
1332 expression or a pointer to an aggregate. JFUNC is the jump function into
1333 which the constants are subsequently stored. */
1336 determine_known_aggregate_parts (gimple call
, tree arg
,
1337 struct ipa_jump_func
*jfunc
)
1339 struct ipa_known_agg_contents_list
*list
= NULL
;
1340 int item_count
= 0, const_count
= 0;
1341 HOST_WIDE_INT arg_offset
, arg_size
;
1342 gimple_stmt_iterator gsi
;
1344 bool check_ref
, by_ref
;
1347 /* The function operates in three stages. First, we prepare check_ref, r,
1348 arg_base and arg_offset based on what is actually passed as an actual
1351 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
1354 if (TREE_CODE (arg
) == SSA_NAME
)
1357 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg
)))))
1362 type_size
= TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg
)));
1363 arg_size
= tree_to_uhwi (type_size
);
1364 ao_ref_init_from_ptr_and_size (&r
, arg_base
, NULL_TREE
);
1366 else if (TREE_CODE (arg
) == ADDR_EXPR
)
1368 HOST_WIDE_INT arg_max_size
;
1370 arg
= TREE_OPERAND (arg
, 0);
1371 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1373 if (arg_max_size
== -1
1374 || arg_max_size
!= arg_size
1377 if (DECL_P (arg_base
))
1381 size
= build_int_cst (integer_type_node
, arg_size
);
1382 ao_ref_init_from_ptr_and_size (&r
, arg_base
, size
);
1392 HOST_WIDE_INT arg_max_size
;
1394 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg
)));
1398 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1400 if (arg_max_size
== -1
1401 || arg_max_size
!= arg_size
1405 ao_ref_init (&r
, arg
);
1408 /* Second stage walks back the BB, looks at individual statements and as long
1409 as it is confident of how the statements affect contents of the
1410 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1412 gsi
= gsi_for_stmt (call
);
1414 for (; !gsi_end_p (gsi
); gsi_prev (&gsi
))
1416 struct ipa_known_agg_contents_list
*n
, **p
;
1417 gimple stmt
= gsi_stmt (gsi
);
1418 HOST_WIDE_INT lhs_offset
, lhs_size
, lhs_max_size
;
1419 tree lhs
, rhs
, lhs_base
;
1420 bool partial_overlap
;
1422 if (!stmt_may_clobber_ref_p_1 (stmt
, &r
))
1424 if (!gimple_assign_single_p (stmt
))
1427 lhs
= gimple_assign_lhs (stmt
);
1428 rhs
= gimple_assign_rhs1 (stmt
);
1429 if (!is_gimple_reg_type (TREE_TYPE (rhs
))
1430 || TREE_CODE (lhs
) == BIT_FIELD_REF
1431 || contains_bitfld_component_ref_p (lhs
))
1434 lhs_base
= get_ref_base_and_extent (lhs
, &lhs_offset
, &lhs_size
,
1436 if (lhs_max_size
== -1
1437 || lhs_max_size
!= lhs_size
1438 || (lhs_offset
< arg_offset
1439 && lhs_offset
+ lhs_size
> arg_offset
)
1440 || (lhs_offset
< arg_offset
+ arg_size
1441 && lhs_offset
+ lhs_size
> arg_offset
+ arg_size
))
1446 if (TREE_CODE (lhs_base
) != MEM_REF
1447 || TREE_OPERAND (lhs_base
, 0) != arg_base
1448 || !integer_zerop (TREE_OPERAND (lhs_base
, 1)))
1451 else if (lhs_base
!= arg_base
)
1453 if (DECL_P (lhs_base
))
1459 if (lhs_offset
+ lhs_size
< arg_offset
1460 || lhs_offset
>= (arg_offset
+ arg_size
))
1463 partial_overlap
= false;
1465 while (*p
&& (*p
)->offset
< lhs_offset
)
1467 if ((*p
)->offset
+ (*p
)->size
> lhs_offset
)
1469 partial_overlap
= true;
1474 if (partial_overlap
)
1476 if (*p
&& (*p
)->offset
< lhs_offset
+ lhs_size
)
1478 if ((*p
)->offset
== lhs_offset
&& (*p
)->size
== lhs_size
)
1479 /* We already know this value is subsequently overwritten with
1483 /* Otherwise this is a partial overlap which we cannot
1488 rhs
= get_ssa_def_if_simple_copy (rhs
);
1489 n
= XALLOCA (struct ipa_known_agg_contents_list
);
1491 n
->offset
= lhs_offset
;
1492 if (is_gimple_ip_invariant (rhs
))
1498 n
->constant
= NULL_TREE
;
1503 if (const_count
== PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
)
1504 || item_count
== 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
))
1508 /* Third stage just goes over the list and creates an appropriate vector of
1509 ipa_agg_jf_item structures out of it, of sourse only if there are
1510 any known constants to begin with. */
1514 jfunc
->agg
.by_ref
= by_ref
;
1515 vec_alloc (jfunc
->agg
.items
, const_count
);
1520 struct ipa_agg_jf_item item
;
1521 item
.offset
= list
->offset
- arg_offset
;
1522 gcc_assert ((item
.offset
% BITS_PER_UNIT
) == 0);
1523 item
.value
= unshare_expr_without_location (list
->constant
);
1524 jfunc
->agg
.items
->quick_push (item
);
1532 ipa_get_callee_param_type (struct cgraph_edge
*e
, int i
)
1535 tree type
= (e
->callee
1536 ? TREE_TYPE (e
->callee
->decl
)
1537 : gimple_call_fntype (e
->call_stmt
));
1538 tree t
= TYPE_ARG_TYPES (type
);
1540 for (n
= 0; n
< i
; n
++)
1547 return TREE_VALUE (t
);
1550 t
= DECL_ARGUMENTS (e
->callee
->decl
);
1551 for (n
= 0; n
< i
; n
++)
1558 return TREE_TYPE (t
);
1562 /* Compute jump function for all arguments of callsite CS and insert the
1563 information in the jump_functions array in the ipa_edge_args corresponding
1564 to this callsite. */
1567 ipa_compute_jump_functions_for_edge (struct param_analysis_info
*parms_ainfo
,
1568 struct cgraph_edge
*cs
)
1570 struct ipa_node_params
*info
= IPA_NODE_REF (cs
->caller
);
1571 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
1572 gimple call
= cs
->call_stmt
;
1573 int n
, arg_num
= gimple_call_num_args (call
);
1575 if (arg_num
== 0 || args
->jump_functions
)
1577 vec_safe_grow_cleared (args
->jump_functions
, arg_num
);
1579 if (gimple_call_internal_p (call
))
1581 if (ipa_func_spec_opts_forbid_analysis_p (cs
->caller
))
1584 for (n
= 0; n
< arg_num
; n
++)
1586 struct ipa_jump_func
*jfunc
= ipa_get_ith_jump_func (args
, n
);
1587 tree arg
= gimple_call_arg (call
, n
);
1588 tree param_type
= ipa_get_callee_param_type (cs
, n
);
1590 if (is_gimple_ip_invariant (arg
))
1591 ipa_set_jf_constant (jfunc
, arg
, cs
);
1592 else if (!is_gimple_reg_type (TREE_TYPE (arg
))
1593 && TREE_CODE (arg
) == PARM_DECL
)
1595 int index
= ipa_get_param_decl_index (info
, arg
);
1597 gcc_assert (index
>=0);
1598 /* Aggregate passed by value, check for pass-through, otherwise we
1599 will attempt to fill in aggregate contents later in this
1601 if (parm_preserved_before_stmt_p (&parms_ainfo
[index
], call
, arg
))
1603 ipa_set_jf_simple_pass_through (jfunc
, index
, false, false);
1607 else if (TREE_CODE (arg
) == SSA_NAME
)
1609 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
1611 int index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (arg
));
1615 agg_p
= parm_ref_data_pass_through_p (&parms_ainfo
[index
],
1617 if (param_type
&& POINTER_TYPE_P (param_type
))
1618 type_p
= !detect_type_change_ssa (arg
, TREE_TYPE (param_type
),
1622 if (type_p
|| jfunc
->type
== IPA_JF_UNKNOWN
)
1623 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
,
1629 gimple stmt
= SSA_NAME_DEF_STMT (arg
);
1630 if (is_gimple_assign (stmt
))
1631 compute_complex_assign_jump_func (info
, parms_ainfo
, jfunc
,
1632 call
, stmt
, arg
, param_type
);
1633 else if (gimple_code (stmt
) == GIMPLE_PHI
)
1634 compute_complex_ancestor_jump_func (info
, parms_ainfo
, jfunc
,
1635 call
, stmt
, param_type
);
1639 compute_known_type_jump_func (arg
, jfunc
, call
,
1641 && POINTER_TYPE_P (param_type
)
1642 ? TREE_TYPE (param_type
)
1645 if ((jfunc
->type
!= IPA_JF_PASS_THROUGH
1646 || !ipa_get_jf_pass_through_agg_preserved (jfunc
))
1647 && (jfunc
->type
!= IPA_JF_ANCESTOR
1648 || !ipa_get_jf_ancestor_agg_preserved (jfunc
))
1649 && (AGGREGATE_TYPE_P (TREE_TYPE (arg
))
1650 || (POINTER_TYPE_P (TREE_TYPE (arg
)))))
1651 determine_known_aggregate_parts (call
, arg
, jfunc
);
1655 /* Compute jump functions for all edges - both direct and indirect - outgoing
1656 from NODE. Also count the actual arguments in the process. */
1659 ipa_compute_jump_functions (struct cgraph_node
*node
,
1660 struct param_analysis_info
*parms_ainfo
)
1662 struct cgraph_edge
*cs
;
1664 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
1666 struct cgraph_node
*callee
= cgraph_function_or_thunk_node (cs
->callee
,
1668 /* We do not need to bother analyzing calls to unknown
1669 functions unless they may become known during lto/whopr. */
1670 if (!callee
->definition
&& !flag_lto
)
1672 ipa_compute_jump_functions_for_edge (parms_ainfo
, cs
);
1675 for (cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
1676 ipa_compute_jump_functions_for_edge (parms_ainfo
, cs
);
1679 /* If STMT looks like a statement loading a value from a member pointer formal
1680 parameter, return that parameter and store the offset of the field to
1681 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1682 might be clobbered). If USE_DELTA, then we look for a use of the delta
1683 field rather than the pfn. */
1686 ipa_get_stmt_member_ptr_load_param (gimple stmt
, bool use_delta
,
1687 HOST_WIDE_INT
*offset_p
)
1689 tree rhs
, rec
, ref_field
, ref_offset
, fld
, ptr_field
, delta_field
;
1691 if (!gimple_assign_single_p (stmt
))
1694 rhs
= gimple_assign_rhs1 (stmt
);
1695 if (TREE_CODE (rhs
) == COMPONENT_REF
)
1697 ref_field
= TREE_OPERAND (rhs
, 1);
1698 rhs
= TREE_OPERAND (rhs
, 0);
1701 ref_field
= NULL_TREE
;
1702 if (TREE_CODE (rhs
) != MEM_REF
)
1704 rec
= TREE_OPERAND (rhs
, 0);
1705 if (TREE_CODE (rec
) != ADDR_EXPR
)
1707 rec
= TREE_OPERAND (rec
, 0);
1708 if (TREE_CODE (rec
) != PARM_DECL
1709 || !type_like_member_ptr_p (TREE_TYPE (rec
), &ptr_field
, &delta_field
))
1711 ref_offset
= TREE_OPERAND (rhs
, 1);
1718 *offset_p
= int_bit_position (fld
);
1722 if (integer_nonzerop (ref_offset
))
1724 return ref_field
== fld
? rec
: NULL_TREE
;
1727 return tree_int_cst_equal (byte_position (fld
), ref_offset
) ? rec
1731 /* Returns true iff T is an SSA_NAME defined by a statement. */
1734 ipa_is_ssa_with_stmt_def (tree t
)
1736 if (TREE_CODE (t
) == SSA_NAME
1737 && !SSA_NAME_IS_DEFAULT_DEF (t
))
1743 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1744 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1745 indirect call graph edge. */
1747 static struct cgraph_edge
*
1748 ipa_note_param_call (struct cgraph_node
*node
, int param_index
, gimple stmt
)
1750 struct cgraph_edge
*cs
;
1752 cs
= cgraph_edge (node
, stmt
);
1753 cs
->indirect_info
->param_index
= param_index
;
1754 cs
->indirect_info
->agg_contents
= 0;
1755 cs
->indirect_info
->member_ptr
= 0;
1759 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1760 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1761 intermediate information about each formal parameter. Currently it checks
1762 whether the call calls a pointer that is a formal parameter and if so, the
1763 parameter is marked with the called flag and an indirect call graph edge
1764 describing the call is created. This is very simple for ordinary pointers
1765 represented in SSA but not-so-nice when it comes to member pointers. The
1766 ugly part of this function does nothing more than trying to match the
1767 pattern of such a call. An example of such a pattern is the gimple dump
1768 below, the call is on the last line:
1771 f$__delta_5 = f.__delta;
1772 f$__pfn_24 = f.__pfn;
1776 f$__delta_5 = MEM[(struct *)&f];
1777 f$__pfn_24 = MEM[(struct *)&f + 4B];
1779 and a few lines below:
1782 D.2496_3 = (int) f$__pfn_24;
1783 D.2497_4 = D.2496_3 & 1;
1790 D.2500_7 = (unsigned int) f$__delta_5;
1791 D.2501_8 = &S + D.2500_7;
1792 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1793 D.2503_10 = *D.2502_9;
1794 D.2504_12 = f$__pfn_24 + -1;
1795 D.2505_13 = (unsigned int) D.2504_12;
1796 D.2506_14 = D.2503_10 + D.2505_13;
1797 D.2507_15 = *D.2506_14;
1798 iftmp.11_16 = (String:: *) D.2507_15;
1801 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1802 D.2500_19 = (unsigned int) f$__delta_5;
1803 D.2508_20 = &S + D.2500_19;
1804 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1806 Such patterns are results of simple calls to a member pointer:
1808 int doprinting (int (MyString::* f)(int) const)
1810 MyString S ("somestring");
1815 Moreover, the function also looks for called pointers loaded from aggregates
1816 passed by value or reference. */
1819 ipa_analyze_indirect_call_uses (struct cgraph_node
*node
,
1820 struct ipa_node_params
*info
,
1821 struct param_analysis_info
*parms_ainfo
,
1822 gimple call
, tree target
)
1827 tree rec
, rec2
, cond
;
1830 basic_block bb
, virt_bb
, join
;
1831 HOST_WIDE_INT offset
;
1834 if (SSA_NAME_IS_DEFAULT_DEF (target
))
1836 tree var
= SSA_NAME_VAR (target
);
1837 index
= ipa_get_param_decl_index (info
, var
);
1839 ipa_note_param_call (node
, index
, call
);
1843 def
= SSA_NAME_DEF_STMT (target
);
1844 if (gimple_assign_single_p (def
)
1845 && ipa_load_from_parm_agg_1 (info
->descriptors
, parms_ainfo
, def
,
1846 gimple_assign_rhs1 (def
), &index
, &offset
,
1849 struct cgraph_edge
*cs
= ipa_note_param_call (node
, index
, call
);
1850 if (cs
->indirect_info
->offset
!= offset
)
1851 cs
->indirect_info
->outer_type
= NULL
;
1852 cs
->indirect_info
->offset
= offset
;
1853 cs
->indirect_info
->agg_contents
= 1;
1854 cs
->indirect_info
->by_ref
= by_ref
;
1858 /* Now we need to try to match the complex pattern of calling a member
1860 if (gimple_code (def
) != GIMPLE_PHI
1861 || gimple_phi_num_args (def
) != 2
1862 || !POINTER_TYPE_P (TREE_TYPE (target
))
1863 || TREE_CODE (TREE_TYPE (TREE_TYPE (target
))) != METHOD_TYPE
)
1866 /* First, we need to check whether one of these is a load from a member
1867 pointer that is a parameter to this function. */
1868 n1
= PHI_ARG_DEF (def
, 0);
1869 n2
= PHI_ARG_DEF (def
, 1);
1870 if (!ipa_is_ssa_with_stmt_def (n1
) || !ipa_is_ssa_with_stmt_def (n2
))
1872 d1
= SSA_NAME_DEF_STMT (n1
);
1873 d2
= SSA_NAME_DEF_STMT (n2
);
1875 join
= gimple_bb (def
);
1876 if ((rec
= ipa_get_stmt_member_ptr_load_param (d1
, false, &offset
)))
1878 if (ipa_get_stmt_member_ptr_load_param (d2
, false, NULL
))
1881 bb
= EDGE_PRED (join
, 0)->src
;
1882 virt_bb
= gimple_bb (d2
);
1884 else if ((rec
= ipa_get_stmt_member_ptr_load_param (d2
, false, &offset
)))
1886 bb
= EDGE_PRED (join
, 1)->src
;
1887 virt_bb
= gimple_bb (d1
);
1892 /* Second, we need to check that the basic blocks are laid out in the way
1893 corresponding to the pattern. */
1895 if (!single_pred_p (virt_bb
) || !single_succ_p (virt_bb
)
1896 || single_pred (virt_bb
) != bb
1897 || single_succ (virt_bb
) != join
)
1900 /* Third, let's see that the branching is done depending on the least
1901 significant bit of the pfn. */
1903 branch
= last_stmt (bb
);
1904 if (!branch
|| gimple_code (branch
) != GIMPLE_COND
)
1907 if ((gimple_cond_code (branch
) != NE_EXPR
1908 && gimple_cond_code (branch
) != EQ_EXPR
)
1909 || !integer_zerop (gimple_cond_rhs (branch
)))
1912 cond
= gimple_cond_lhs (branch
);
1913 if (!ipa_is_ssa_with_stmt_def (cond
))
1916 def
= SSA_NAME_DEF_STMT (cond
);
1917 if (!is_gimple_assign (def
)
1918 || gimple_assign_rhs_code (def
) != BIT_AND_EXPR
1919 || !integer_onep (gimple_assign_rhs2 (def
)))
1922 cond
= gimple_assign_rhs1 (def
);
1923 if (!ipa_is_ssa_with_stmt_def (cond
))
1926 def
= SSA_NAME_DEF_STMT (cond
);
1928 if (is_gimple_assign (def
)
1929 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
1931 cond
= gimple_assign_rhs1 (def
);
1932 if (!ipa_is_ssa_with_stmt_def (cond
))
1934 def
= SSA_NAME_DEF_STMT (cond
);
1937 rec2
= ipa_get_stmt_member_ptr_load_param (def
,
1938 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1939 == ptrmemfunc_vbit_in_delta
),
1944 index
= ipa_get_param_decl_index (info
, rec
);
1946 && parm_preserved_before_stmt_p (&parms_ainfo
[index
], call
, rec
))
1948 struct cgraph_edge
*cs
= ipa_note_param_call (node
, index
, call
);
1949 if (cs
->indirect_info
->offset
!= offset
)
1950 cs
->indirect_info
->outer_type
= NULL
;
1951 cs
->indirect_info
->offset
= offset
;
1952 cs
->indirect_info
->agg_contents
= 1;
1953 cs
->indirect_info
->member_ptr
= 1;
1959 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1960 object referenced in the expression is a formal parameter of the caller
1961 (described by INFO), create a call note for the statement. */
1964 ipa_analyze_virtual_call_uses (struct cgraph_node
*node
,
1965 struct ipa_node_params
*info
, gimple call
,
1968 struct cgraph_edge
*cs
;
1969 struct cgraph_indirect_call_info
*ii
;
1970 struct ipa_jump_func jfunc
;
1971 tree obj
= OBJ_TYPE_REF_OBJECT (target
);
1973 HOST_WIDE_INT anc_offset
;
1975 if (!flag_devirtualize
)
1978 if (TREE_CODE (obj
) != SSA_NAME
)
1981 if (SSA_NAME_IS_DEFAULT_DEF (obj
))
1983 if (TREE_CODE (SSA_NAME_VAR (obj
)) != PARM_DECL
)
1987 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (obj
));
1988 gcc_assert (index
>= 0);
1989 if (detect_type_change_ssa (obj
, obj_type_ref_class (target
),
1995 gimple stmt
= SSA_NAME_DEF_STMT (obj
);
1998 expr
= get_ancestor_addr_info (stmt
, &obj
, &anc_offset
);
2001 index
= ipa_get_param_decl_index (info
,
2002 SSA_NAME_VAR (TREE_OPERAND (expr
, 0)));
2003 gcc_assert (index
>= 0);
2004 if (detect_type_change (obj
, expr
, obj_type_ref_class (target
),
2005 call
, &jfunc
, anc_offset
))
2009 cs
= ipa_note_param_call (node
, index
, call
);
2010 ii
= cs
->indirect_info
;
2011 ii
->offset
= anc_offset
;
2012 ii
->otr_token
= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target
));
2013 ii
->otr_type
= obj_type_ref_class (target
);
2014 ii
->polymorphic
= 1;
2017 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2018 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2019 containing intermediate information about each formal parameter. */
2022 ipa_analyze_call_uses (struct cgraph_node
*node
,
2023 struct ipa_node_params
*info
,
2024 struct param_analysis_info
*parms_ainfo
, gimple call
)
2026 tree target
= gimple_call_fn (call
);
2027 struct cgraph_edge
*cs
;
2030 || (TREE_CODE (target
) != SSA_NAME
2031 && !virtual_method_call_p (target
)))
2034 /* If we previously turned the call into a direct call, there is
2035 no need to analyze. */
2036 cs
= cgraph_edge (node
, call
);
2037 if (cs
&& !cs
->indirect_unknown_callee
)
2039 if (TREE_CODE (target
) == SSA_NAME
)
2040 ipa_analyze_indirect_call_uses (node
, info
, parms_ainfo
, call
, target
);
2041 else if (virtual_method_call_p (target
))
2042 ipa_analyze_virtual_call_uses (node
, info
, call
, target
);
2046 /* Analyze the call statement STMT with respect to formal parameters (described
2047 in INFO) of caller given by NODE. Currently it only checks whether formal
2048 parameters are called. PARMS_AINFO is a pointer to a vector containing
2049 intermediate information about each formal parameter. */
2052 ipa_analyze_stmt_uses (struct cgraph_node
*node
, struct ipa_node_params
*info
,
2053 struct param_analysis_info
*parms_ainfo
, gimple stmt
)
2055 if (is_gimple_call (stmt
))
2056 ipa_analyze_call_uses (node
, info
, parms_ainfo
, stmt
);
2059 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2060 If OP is a parameter declaration, mark it as used in the info structure
2064 visit_ref_for_mod_analysis (gimple
, tree op
, tree
, void *data
)
2066 struct ipa_node_params
*info
= (struct ipa_node_params
*) data
;
2068 op
= get_base_address (op
);
2070 && TREE_CODE (op
) == PARM_DECL
)
2072 int index
= ipa_get_param_decl_index (info
, op
);
2073 gcc_assert (index
>= 0);
2074 ipa_set_param_used (info
, index
, true);
2080 /* Scan the function body of NODE and inspect the uses of formal parameters.
2081 Store the findings in various structures of the associated ipa_node_params
2082 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
2083 vector containing intermediate information about each formal parameter. */
2086 ipa_analyze_params_uses (struct cgraph_node
*node
,
2087 struct param_analysis_info
*parms_ainfo
)
2089 tree decl
= node
->decl
;
2091 struct function
*func
;
2092 gimple_stmt_iterator gsi
;
2093 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
2096 if (ipa_get_param_count (info
) == 0 || info
->uses_analysis_done
)
2099 info
->uses_analysis_done
= 1;
2100 if (ipa_func_spec_opts_forbid_analysis_p (node
))
2102 for (i
= 0; i
< ipa_get_param_count (info
); i
++)
2104 ipa_set_param_used (info
, i
, true);
2105 ipa_set_controlled_uses (info
, i
, IPA_UNDESCRIBED_USE
);
2110 for (i
= 0; i
< ipa_get_param_count (info
); i
++)
2112 tree parm
= ipa_get_param (info
, i
);
2113 int controlled_uses
= 0;
2115 /* For SSA regs see if parameter is used. For non-SSA we compute
2116 the flag during modification analysis. */
2117 if (is_gimple_reg (parm
))
2119 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
),
2121 if (ddef
&& !has_zero_uses (ddef
))
2123 imm_use_iterator imm_iter
;
2124 use_operand_p use_p
;
2126 ipa_set_param_used (info
, i
, true);
2127 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, ddef
)
2128 if (!is_gimple_call (USE_STMT (use_p
)))
2130 controlled_uses
= IPA_UNDESCRIBED_USE
;
2137 controlled_uses
= 0;
2140 controlled_uses
= IPA_UNDESCRIBED_USE
;
2141 ipa_set_controlled_uses (info
, i
, controlled_uses
);
2144 func
= DECL_STRUCT_FUNCTION (decl
);
2145 FOR_EACH_BB_FN (bb
, func
)
2147 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2149 gimple stmt
= gsi_stmt (gsi
);
2151 if (is_gimple_debug (stmt
))
2154 ipa_analyze_stmt_uses (node
, info
, parms_ainfo
, stmt
);
2155 walk_stmt_load_store_addr_ops (stmt
, info
,
2156 visit_ref_for_mod_analysis
,
2157 visit_ref_for_mod_analysis
,
2158 visit_ref_for_mod_analysis
);
2160 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2161 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), info
,
2162 visit_ref_for_mod_analysis
,
2163 visit_ref_for_mod_analysis
,
2164 visit_ref_for_mod_analysis
);
2168 /* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
2171 free_parms_ainfo (struct param_analysis_info
*parms_ainfo
, int param_count
)
2175 for (i
= 0; i
< param_count
; i
++)
2177 if (parms_ainfo
[i
].parm_visited_statements
)
2178 BITMAP_FREE (parms_ainfo
[i
].parm_visited_statements
);
2179 if (parms_ainfo
[i
].pt_visited_statements
)
2180 BITMAP_FREE (parms_ainfo
[i
].pt_visited_statements
);
2184 /* Initialize the array describing properties of of formal parameters
2185 of NODE, analyze their uses and compute jump functions associated
2186 with actual arguments of calls from within NODE. */
2189 ipa_analyze_node (struct cgraph_node
*node
)
2191 struct ipa_node_params
*info
;
2192 struct param_analysis_info
*parms_ainfo
;
2195 ipa_check_create_node_params ();
2196 ipa_check_create_edge_args ();
2197 info
= IPA_NODE_REF (node
);
2198 push_cfun (DECL_STRUCT_FUNCTION (node
->decl
));
2199 ipa_initialize_node_params (node
);
2201 param_count
= ipa_get_param_count (info
);
2202 parms_ainfo
= XALLOCAVEC (struct param_analysis_info
, param_count
);
2203 memset (parms_ainfo
, 0, sizeof (struct param_analysis_info
) * param_count
);
2205 ipa_analyze_params_uses (node
, parms_ainfo
);
2206 ipa_compute_jump_functions (node
, parms_ainfo
);
2208 free_parms_ainfo (parms_ainfo
, param_count
);
2212 /* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2213 attempt a type-based devirtualization. If successful, return the
2214 target function declaration, otherwise return NULL. */
2217 ipa_intraprocedural_devirtualization (gimple call
)
2219 tree binfo
, token
, fndecl
;
2220 struct ipa_jump_func jfunc
;
2221 tree otr
= gimple_call_fn (call
);
2223 jfunc
.type
= IPA_JF_UNKNOWN
;
2224 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr
), &jfunc
,
2225 call
, obj_type_ref_class (otr
));
2226 if (jfunc
.type
!= IPA_JF_KNOWN_TYPE
)
2228 binfo
= ipa_binfo_from_known_type_jfunc (&jfunc
);
2231 token
= OBJ_TYPE_REF_TOKEN (otr
);
2232 fndecl
= gimple_get_virt_method_for_binfo (tree_to_uhwi (token
),
2234 #ifdef ENABLE_CHECKING
2236 gcc_assert (possible_polymorphic_call_target_p
2237 (otr
, cgraph_get_node (fndecl
)));
2242 /* Update the jump function DST when the call graph edge corresponding to SRC is
2243 is being inlined, knowing that DST is of type ancestor and src of known
2247 combine_known_type_and_ancestor_jfs (struct ipa_jump_func
*src
,
2248 struct ipa_jump_func
*dst
)
2250 HOST_WIDE_INT combined_offset
;
2253 if (!ipa_get_jf_ancestor_type_preserved (dst
))
2255 dst
->type
= IPA_JF_UNKNOWN
;
2259 combined_offset
= ipa_get_jf_known_type_offset (src
)
2260 + ipa_get_jf_ancestor_offset (dst
);
2261 combined_type
= ipa_get_jf_ancestor_type (dst
);
2263 ipa_set_jf_known_type (dst
, combined_offset
,
2264 ipa_get_jf_known_type_base_type (src
),
2268 /* Update the jump functions associated with call graph edge E when the call
2269 graph edge CS is being inlined, assuming that E->caller is already (possibly
2270 indirectly) inlined into CS->callee and that E has not been inlined. */
2273 update_jump_functions_after_inlining (struct cgraph_edge
*cs
,
2274 struct cgraph_edge
*e
)
2276 struct ipa_edge_args
*top
= IPA_EDGE_REF (cs
);
2277 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
2278 int count
= ipa_get_cs_argument_count (args
);
2281 for (i
= 0; i
< count
; i
++)
2283 struct ipa_jump_func
*dst
= ipa_get_ith_jump_func (args
, i
);
2285 if (dst
->type
== IPA_JF_ANCESTOR
)
2287 struct ipa_jump_func
*src
;
2288 int dst_fid
= dst
->value
.ancestor
.formal_id
;
2290 /* Variable number of arguments can cause havoc if we try to access
2291 one that does not exist in the inlined edge. So make sure we
2293 if (dst_fid
>= ipa_get_cs_argument_count (top
))
2295 dst
->type
= IPA_JF_UNKNOWN
;
2299 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2302 && (dst
->value
.ancestor
.agg_preserved
|| !src
->agg
.by_ref
))
2304 struct ipa_agg_jf_item
*item
;
2307 /* Currently we do not produce clobber aggregate jump functions,
2308 replace with merging when we do. */
2309 gcc_assert (!dst
->agg
.items
);
2311 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2312 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2313 FOR_EACH_VEC_SAFE_ELT (dst
->agg
.items
, j
, item
)
2314 item
->offset
-= dst
->value
.ancestor
.offset
;
2317 if (src
->type
== IPA_JF_KNOWN_TYPE
)
2318 combine_known_type_and_ancestor_jfs (src
, dst
);
2319 else if (src
->type
== IPA_JF_PASS_THROUGH
2320 && src
->value
.pass_through
.operation
== NOP_EXPR
)
2322 dst
->value
.ancestor
.formal_id
= src
->value
.pass_through
.formal_id
;
2323 dst
->value
.ancestor
.agg_preserved
&=
2324 src
->value
.pass_through
.agg_preserved
;
2325 dst
->value
.ancestor
.type_preserved
&=
2326 src
->value
.pass_through
.type_preserved
;
2328 else if (src
->type
== IPA_JF_ANCESTOR
)
2330 dst
->value
.ancestor
.formal_id
= src
->value
.ancestor
.formal_id
;
2331 dst
->value
.ancestor
.offset
+= src
->value
.ancestor
.offset
;
2332 dst
->value
.ancestor
.agg_preserved
&=
2333 src
->value
.ancestor
.agg_preserved
;
2334 dst
->value
.ancestor
.type_preserved
&=
2335 src
->value
.ancestor
.type_preserved
;
2338 dst
->type
= IPA_JF_UNKNOWN
;
2340 else if (dst
->type
== IPA_JF_PASS_THROUGH
)
2342 struct ipa_jump_func
*src
;
2343 /* We must check range due to calls with variable number of arguments
2344 and we cannot combine jump functions with operations. */
2345 if (dst
->value
.pass_through
.operation
== NOP_EXPR
2346 && (dst
->value
.pass_through
.formal_id
2347 < ipa_get_cs_argument_count (top
)))
2349 int dst_fid
= dst
->value
.pass_through
.formal_id
;
2350 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2351 bool dst_agg_p
= ipa_get_jf_pass_through_agg_preserved (dst
);
2355 case IPA_JF_UNKNOWN
:
2356 dst
->type
= IPA_JF_UNKNOWN
;
2358 case IPA_JF_KNOWN_TYPE
:
2359 ipa_set_jf_known_type (dst
,
2360 ipa_get_jf_known_type_offset (src
),
2361 ipa_get_jf_known_type_base_type (src
),
2362 ipa_get_jf_known_type_base_type (src
));
2365 ipa_set_jf_cst_copy (dst
, src
);
2368 case IPA_JF_PASS_THROUGH
:
2370 int formal_id
= ipa_get_jf_pass_through_formal_id (src
);
2371 enum tree_code operation
;
2372 operation
= ipa_get_jf_pass_through_operation (src
);
2374 if (operation
== NOP_EXPR
)
2378 && ipa_get_jf_pass_through_agg_preserved (src
);
2379 type_p
= ipa_get_jf_pass_through_type_preserved (src
)
2380 && ipa_get_jf_pass_through_type_preserved (dst
);
2381 ipa_set_jf_simple_pass_through (dst
, formal_id
,
2386 tree operand
= ipa_get_jf_pass_through_operand (src
);
2387 ipa_set_jf_arith_pass_through (dst
, formal_id
, operand
,
2392 case IPA_JF_ANCESTOR
:
2396 && ipa_get_jf_ancestor_agg_preserved (src
);
2397 type_p
= ipa_get_jf_ancestor_type_preserved (src
)
2398 && ipa_get_jf_pass_through_type_preserved (dst
);
2399 ipa_set_ancestor_jf (dst
,
2400 ipa_get_jf_ancestor_offset (src
),
2401 ipa_get_jf_ancestor_type (src
),
2402 ipa_get_jf_ancestor_formal_id (src
),
2411 && (dst_agg_p
|| !src
->agg
.by_ref
))
2413 /* Currently we do not produce clobber aggregate jump
2414 functions, replace with merging when we do. */
2415 gcc_assert (!dst
->agg
.items
);
2417 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2418 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2422 dst
->type
= IPA_JF_UNKNOWN
;
2427 /* If TARGET is an addr_expr of a function declaration, make it the destination
2428 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2430 struct cgraph_edge
*
2431 ipa_make_edge_direct_to_target (struct cgraph_edge
*ie
, tree target
)
2433 struct cgraph_node
*callee
;
2434 struct inline_edge_summary
*es
= inline_edge_summary (ie
);
2435 bool unreachable
= false;
2437 if (TREE_CODE (target
) == ADDR_EXPR
)
2438 target
= TREE_OPERAND (target
, 0);
2439 if (TREE_CODE (target
) != FUNCTION_DECL
)
2441 target
= canonicalize_constructor_val (target
, NULL
);
2442 if (!target
|| TREE_CODE (target
) != FUNCTION_DECL
)
2444 if (ie
->indirect_info
->member_ptr
)
2445 /* Member pointer call that goes through a VMT lookup. */
2449 fprintf (dump_file
, "ipa-prop: Discovered direct call to non-function"
2450 " in %s/%i, making it unreachable.\n",
2451 ie
->caller
->name (), ie
->caller
->order
);
2452 target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2453 callee
= cgraph_get_create_node (target
);
2457 callee
= cgraph_get_node (target
);
2460 callee
= cgraph_get_node (target
);
2462 /* Because may-edges are not explicitely represented and vtable may be external,
2463 we may create the first reference to the object in the unit. */
2464 if (!callee
|| callee
->global
.inlined_to
)
2467 /* We are better to ensure we can refer to it.
2468 In the case of static functions we are out of luck, since we already
2469 removed its body. In the case of public functions we may or may
2470 not introduce the reference. */
2471 if (!canonicalize_constructor_val (target
, NULL
)
2472 || !TREE_PUBLIC (target
))
2475 fprintf (dump_file
, "ipa-prop: Discovered call to a known target "
2476 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2477 xstrdup (ie
->caller
->name ()),
2479 xstrdup (ie
->callee
->name ()),
2483 callee
= cgraph_get_create_node (target
);
2485 ipa_check_create_node_params ();
2487 /* We can not make edges to inline clones. It is bug that someone removed
2488 the cgraph node too early. */
2489 gcc_assert (!callee
->global
.inlined_to
);
2491 if (dump_file
&& !unreachable
)
2493 fprintf (dump_file
, "ipa-prop: Discovered %s call to a known target "
2494 "(%s/%i -> %s/%i), for stmt ",
2495 ie
->indirect_info
->polymorphic
? "a virtual" : "an indirect",
2496 xstrdup (ie
->caller
->name ()),
2498 xstrdup (callee
->name ()),
2501 print_gimple_stmt (dump_file
, ie
->call_stmt
, 2, TDF_SLIM
);
2503 fprintf (dump_file
, "with uid %i\n", ie
->lto_stmt_uid
);
2505 ie
= cgraph_make_edge_direct (ie
, callee
);
2506 es
= inline_edge_summary (ie
);
2507 es
->call_stmt_size
-= (eni_size_weights
.indirect_call_cost
2508 - eni_size_weights
.call_cost
);
2509 es
->call_stmt_time
-= (eni_time_weights
.indirect_call_cost
2510 - eni_time_weights
.call_cost
);
2515 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2516 return NULL if there is not any. BY_REF specifies whether the value has to
2517 be passed by reference or by value. */
2520 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function
*agg
,
2521 HOST_WIDE_INT offset
, bool by_ref
)
2523 struct ipa_agg_jf_item
*item
;
2526 if (by_ref
!= agg
->by_ref
)
2529 FOR_EACH_VEC_SAFE_ELT (agg
->items
, i
, item
)
2530 if (item
->offset
== offset
)
2532 /* Currently we do not have clobber values, return NULL for them once
2534 gcc_checking_assert (is_gimple_ip_invariant (item
->value
));
2540 /* Remove a reference to SYMBOL from the list of references of a node given by
2541 reference description RDESC. Return true if the reference has been
2542 successfully found and removed. */
2545 remove_described_reference (symtab_node
*symbol
, struct ipa_cst_ref_desc
*rdesc
)
2547 struct ipa_ref
*to_del
;
2548 struct cgraph_edge
*origin
;
2553 to_del
= ipa_find_reference (origin
->caller
, symbol
,
2554 origin
->call_stmt
, origin
->lto_stmt_uid
);
2558 ipa_remove_reference (to_del
);
2560 fprintf (dump_file
, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2561 xstrdup (origin
->caller
->name ()),
2562 origin
->caller
->order
, xstrdup (symbol
->name ()));
2566 /* If JFUNC has a reference description with refcount different from
2567 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2568 NULL. JFUNC must be a constant jump function. */
2570 static struct ipa_cst_ref_desc
*
2571 jfunc_rdesc_usable (struct ipa_jump_func
*jfunc
)
2573 struct ipa_cst_ref_desc
*rdesc
= ipa_get_jf_constant_rdesc (jfunc
);
2574 if (rdesc
&& rdesc
->refcount
!= IPA_UNDESCRIBED_USE
)
2580 /* If the value of constant jump function JFUNC is an address of a function
2581 declaration, return the associated call graph node. Otherwise return
2584 static cgraph_node
*
2585 cgraph_node_for_jfunc (struct ipa_jump_func
*jfunc
)
2587 gcc_checking_assert (jfunc
->type
== IPA_JF_CONST
);
2588 tree cst
= ipa_get_jf_constant (jfunc
);
2589 if (TREE_CODE (cst
) != ADDR_EXPR
2590 || TREE_CODE (TREE_OPERAND (cst
, 0)) != FUNCTION_DECL
)
2593 return cgraph_get_node (TREE_OPERAND (cst
, 0));
2597 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2598 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2599 the edge specified in the rdesc. Return false if either the symbol or the
2600 reference could not be found, otherwise return true. */
2603 try_decrement_rdesc_refcount (struct ipa_jump_func
*jfunc
)
2605 struct ipa_cst_ref_desc
*rdesc
;
2606 if (jfunc
->type
== IPA_JF_CONST
2607 && (rdesc
= jfunc_rdesc_usable (jfunc
))
2608 && --rdesc
->refcount
== 0)
2610 symtab_node
*symbol
= cgraph_node_for_jfunc (jfunc
);
2614 return remove_described_reference (symbol
, rdesc
);
2619 /* Try to find a destination for indirect edge IE that corresponds to a simple
2620 call or a call of a member function pointer and where the destination is a
2621 pointer formal parameter described by jump function JFUNC. If it can be
2622 determined, return the newly direct edge, otherwise return NULL.
2623 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2625 static struct cgraph_edge
*
2626 try_make_edge_direct_simple_call (struct cgraph_edge
*ie
,
2627 struct ipa_jump_func
*jfunc
,
2628 struct ipa_node_params
*new_root_info
)
2630 struct cgraph_edge
*cs
;
2632 bool agg_contents
= ie
->indirect_info
->agg_contents
;
2634 if (ie
->indirect_info
->agg_contents
)
2635 target
= ipa_find_agg_cst_for_param (&jfunc
->agg
,
2636 ie
->indirect_info
->offset
,
2637 ie
->indirect_info
->by_ref
);
2639 target
= ipa_value_from_jfunc (new_root_info
, jfunc
);
2642 cs
= ipa_make_edge_direct_to_target (ie
, target
);
2644 if (cs
&& !agg_contents
)
2647 gcc_checking_assert (cs
->callee
2649 || jfunc
->type
!= IPA_JF_CONST
2650 || !cgraph_node_for_jfunc (jfunc
)
2651 || cs
->callee
== cgraph_node_for_jfunc (jfunc
)));
2652 ok
= try_decrement_rdesc_refcount (jfunc
);
2653 gcc_checking_assert (ok
);
2659 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2660 call based on a formal parameter which is described by jump function JFUNC
2661 and if it can be determined, make it direct and return the direct edge.
2662 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2665 static struct cgraph_edge
*
2666 try_make_edge_direct_virtual_call (struct cgraph_edge
*ie
,
2667 struct ipa_jump_func
*jfunc
,
2668 struct ipa_node_params
*new_root_info
)
2672 binfo
= ipa_value_from_jfunc (new_root_info
, jfunc
);
2677 if (TREE_CODE (binfo
) != TREE_BINFO
)
2679 binfo
= gimple_extract_devirt_binfo_from_cst
2680 (binfo
, ie
->indirect_info
->otr_type
);
2685 binfo
= get_binfo_at_offset (binfo
, ie
->indirect_info
->offset
,
2686 ie
->indirect_info
->otr_type
);
2688 target
= gimple_get_virt_method_for_binfo (ie
->indirect_info
->otr_token
,
2695 #ifdef ENABLE_CHECKING
2696 gcc_assert (possible_polymorphic_call_target_p
2697 (ie
, cgraph_get_node (target
)));
2699 return ipa_make_edge_direct_to_target (ie
, target
);
2705 /* Update the param called notes associated with NODE when CS is being inlined,
2706 assuming NODE is (potentially indirectly) inlined into CS->callee.
2707 Moreover, if the callee is discovered to be constant, create a new cgraph
2708 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2709 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2712 update_indirect_edges_after_inlining (struct cgraph_edge
*cs
,
2713 struct cgraph_node
*node
,
2714 vec
<cgraph_edge_p
> *new_edges
)
2716 struct ipa_edge_args
*top
;
2717 struct cgraph_edge
*ie
, *next_ie
, *new_direct_edge
;
2718 struct ipa_node_params
*new_root_info
;
2721 ipa_check_create_edge_args ();
2722 top
= IPA_EDGE_REF (cs
);
2723 new_root_info
= IPA_NODE_REF (cs
->caller
->global
.inlined_to
2724 ? cs
->caller
->global
.inlined_to
2727 for (ie
= node
->indirect_calls
; ie
; ie
= next_ie
)
2729 struct cgraph_indirect_call_info
*ici
= ie
->indirect_info
;
2730 struct ipa_jump_func
*jfunc
;
2733 next_ie
= ie
->next_callee
;
2735 if (ici
->param_index
== -1)
2738 /* We must check range due to calls with variable number of arguments: */
2739 if (ici
->param_index
>= ipa_get_cs_argument_count (top
))
2741 ici
->param_index
= -1;
2745 param_index
= ici
->param_index
;
2746 jfunc
= ipa_get_ith_jump_func (top
, param_index
);
2748 if (!flag_indirect_inlining
)
2749 new_direct_edge
= NULL
;
2750 else if (ici
->polymorphic
)
2751 new_direct_edge
= try_make_edge_direct_virtual_call (ie
, jfunc
,
2754 new_direct_edge
= try_make_edge_direct_simple_call (ie
, jfunc
,
2756 /* If speculation was removed, then we need to do nothing. */
2757 if (new_direct_edge
&& new_direct_edge
!= ie
)
2759 new_direct_edge
->indirect_inlining_edge
= 1;
2760 top
= IPA_EDGE_REF (cs
);
2763 else if (new_direct_edge
)
2765 new_direct_edge
->indirect_inlining_edge
= 1;
2766 if (new_direct_edge
->call_stmt
)
2767 new_direct_edge
->call_stmt_cannot_inline_p
2768 = !gimple_check_call_matching_types (
2769 new_direct_edge
->call_stmt
,
2770 new_direct_edge
->callee
->decl
, false);
2773 new_edges
->safe_push (new_direct_edge
);
2776 top
= IPA_EDGE_REF (cs
);
2778 else if (jfunc
->type
== IPA_JF_PASS_THROUGH
2779 && ipa_get_jf_pass_through_operation (jfunc
) == NOP_EXPR
)
2781 if (ici
->agg_contents
2782 && !ipa_get_jf_pass_through_agg_preserved (jfunc
))
2783 ici
->param_index
= -1;
2785 ici
->param_index
= ipa_get_jf_pass_through_formal_id (jfunc
);
2787 else if (jfunc
->type
== IPA_JF_ANCESTOR
)
2789 if (ici
->agg_contents
2790 && !ipa_get_jf_ancestor_agg_preserved (jfunc
))
2791 ici
->param_index
= -1;
2794 ici
->param_index
= ipa_get_jf_ancestor_formal_id (jfunc
);
2795 if (ipa_get_jf_ancestor_offset (jfunc
))
2796 ici
->outer_type
= NULL
;
2797 ici
->offset
+= ipa_get_jf_ancestor_offset (jfunc
);
2801 /* Either we can find a destination for this edge now or never. */
2802 ici
->param_index
= -1;
2808 /* Recursively traverse subtree of NODE (including node) made of inlined
2809 cgraph_edges when CS has been inlined and invoke
2810 update_indirect_edges_after_inlining on all nodes and
2811 update_jump_functions_after_inlining on all non-inlined edges that lead out
2812 of this subtree. Newly discovered indirect edges will be added to
2813 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2817 propagate_info_to_inlined_callees (struct cgraph_edge
*cs
,
2818 struct cgraph_node
*node
,
2819 vec
<cgraph_edge_p
> *new_edges
)
2821 struct cgraph_edge
*e
;
2824 res
= update_indirect_edges_after_inlining (cs
, node
, new_edges
);
2826 for (e
= node
->callees
; e
; e
= e
->next_callee
)
2827 if (!e
->inline_failed
)
2828 res
|= propagate_info_to_inlined_callees (cs
, e
->callee
, new_edges
);
2830 update_jump_functions_after_inlining (cs
, e
);
2831 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
2832 update_jump_functions_after_inlining (cs
, e
);
2837 /* Combine two controlled uses counts as done during inlining. */
2840 combine_controlled_uses_counters (int c
, int d
)
2842 if (c
== IPA_UNDESCRIBED_USE
|| d
== IPA_UNDESCRIBED_USE
)
2843 return IPA_UNDESCRIBED_USE
;
2848 /* Propagate number of controlled users from CS->caleee to the new root of the
2849 tree of inlined nodes. */
2852 propagate_controlled_uses (struct cgraph_edge
*cs
)
2854 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
2855 struct cgraph_node
*new_root
= cs
->caller
->global
.inlined_to
2856 ? cs
->caller
->global
.inlined_to
: cs
->caller
;
2857 struct ipa_node_params
*new_root_info
= IPA_NODE_REF (new_root
);
2858 struct ipa_node_params
*old_root_info
= IPA_NODE_REF (cs
->callee
);
2861 count
= MIN (ipa_get_cs_argument_count (args
),
2862 ipa_get_param_count (old_root_info
));
2863 for (i
= 0; i
< count
; i
++)
2865 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
2866 struct ipa_cst_ref_desc
*rdesc
;
2868 if (jf
->type
== IPA_JF_PASS_THROUGH
)
2871 src_idx
= ipa_get_jf_pass_through_formal_id (jf
);
2872 c
= ipa_get_controlled_uses (new_root_info
, src_idx
);
2873 d
= ipa_get_controlled_uses (old_root_info
, i
);
2875 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf
)
2876 == NOP_EXPR
|| c
== IPA_UNDESCRIBED_USE
);
2877 c
= combine_controlled_uses_counters (c
, d
);
2878 ipa_set_controlled_uses (new_root_info
, src_idx
, c
);
2879 if (c
== 0 && new_root_info
->ipcp_orig_node
)
2881 struct cgraph_node
*n
;
2882 struct ipa_ref
*ref
;
2883 tree t
= new_root_info
->known_vals
[src_idx
];
2885 if (t
&& TREE_CODE (t
) == ADDR_EXPR
2886 && TREE_CODE (TREE_OPERAND (t
, 0)) == FUNCTION_DECL
2887 && (n
= cgraph_get_node (TREE_OPERAND (t
, 0)))
2888 && (ref
= ipa_find_reference (new_root
,
2892 fprintf (dump_file
, "ipa-prop: Removing cloning-created "
2893 "reference from %s/%i to %s/%i.\n",
2894 xstrdup (new_root
->name ()),
2896 xstrdup (n
->name ()), n
->order
);
2897 ipa_remove_reference (ref
);
2901 else if (jf
->type
== IPA_JF_CONST
2902 && (rdesc
= jfunc_rdesc_usable (jf
)))
2904 int d
= ipa_get_controlled_uses (old_root_info
, i
);
2905 int c
= rdesc
->refcount
;
2906 rdesc
->refcount
= combine_controlled_uses_counters (c
, d
);
2907 if (rdesc
->refcount
== 0)
2909 tree cst
= ipa_get_jf_constant (jf
);
2910 struct cgraph_node
*n
;
2911 gcc_checking_assert (TREE_CODE (cst
) == ADDR_EXPR
2912 && TREE_CODE (TREE_OPERAND (cst
, 0))
2914 n
= cgraph_get_node (TREE_OPERAND (cst
, 0));
2917 struct cgraph_node
*clone
;
2919 ok
= remove_described_reference (n
, rdesc
);
2920 gcc_checking_assert (ok
);
2923 while (clone
->global
.inlined_to
2924 && clone
!= rdesc
->cs
->caller
2925 && IPA_NODE_REF (clone
)->ipcp_orig_node
)
2927 struct ipa_ref
*ref
;
2928 ref
= ipa_find_reference (clone
,
2933 fprintf (dump_file
, "ipa-prop: Removing "
2934 "cloning-created reference "
2935 "from %s/%i to %s/%i.\n",
2936 xstrdup (clone
->name ()),
2938 xstrdup (n
->name ()),
2940 ipa_remove_reference (ref
);
2942 clone
= clone
->callers
->caller
;
2949 for (i
= ipa_get_param_count (old_root_info
);
2950 i
< ipa_get_cs_argument_count (args
);
2953 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
2955 if (jf
->type
== IPA_JF_CONST
)
2957 struct ipa_cst_ref_desc
*rdesc
= jfunc_rdesc_usable (jf
);
2959 rdesc
->refcount
= IPA_UNDESCRIBED_USE
;
2961 else if (jf
->type
== IPA_JF_PASS_THROUGH
)
2962 ipa_set_controlled_uses (new_root_info
,
2963 jf
->value
.pass_through
.formal_id
,
2964 IPA_UNDESCRIBED_USE
);
2968 /* Update jump functions and call note functions on inlining the call site CS.
2969 CS is expected to lead to a node already cloned by
2970 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
2971 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2975 ipa_propagate_indirect_call_infos (struct cgraph_edge
*cs
,
2976 vec
<cgraph_edge_p
> *new_edges
)
2979 /* Do nothing if the preparation phase has not been carried out yet
2980 (i.e. during early inlining). */
2981 if (!ipa_node_params_vector
.exists ())
2983 gcc_assert (ipa_edge_args_vector
);
2985 propagate_controlled_uses (cs
);
2986 changed
= propagate_info_to_inlined_callees (cs
, cs
->callee
, new_edges
);
2991 /* Frees all dynamically allocated structures that the argument info points
2995 ipa_free_edge_args_substructures (struct ipa_edge_args
*args
)
2997 vec_free (args
->jump_functions
);
2998 memset (args
, 0, sizeof (*args
));
3001 /* Free all ipa_edge structures. */
3004 ipa_free_all_edge_args (void)
3007 struct ipa_edge_args
*args
;
3009 if (!ipa_edge_args_vector
)
3012 FOR_EACH_VEC_ELT (*ipa_edge_args_vector
, i
, args
)
3013 ipa_free_edge_args_substructures (args
);
3015 vec_free (ipa_edge_args_vector
);
3018 /* Frees all dynamically allocated structures that the param info points
3022 ipa_free_node_params_substructures (struct ipa_node_params
*info
)
3024 info
->descriptors
.release ();
3025 free (info
->lattices
);
3026 /* Lattice values and their sources are deallocated with their alocation
3028 info
->known_vals
.release ();
3029 memset (info
, 0, sizeof (*info
));
3032 /* Free all ipa_node_params structures. */
3035 ipa_free_all_node_params (void)
3038 struct ipa_node_params
*info
;
3040 FOR_EACH_VEC_ELT (ipa_node_params_vector
, i
, info
)
3041 ipa_free_node_params_substructures (info
);
3043 ipa_node_params_vector
.release ();
3046 /* Set the aggregate replacements of NODE to be AGGVALS. */
3049 ipa_set_node_agg_value_chain (struct cgraph_node
*node
,
3050 struct ipa_agg_replacement_value
*aggvals
)
3052 if (vec_safe_length (ipa_node_agg_replacements
) <= (unsigned) cgraph_max_uid
)
3053 vec_safe_grow_cleared (ipa_node_agg_replacements
, cgraph_max_uid
+ 1);
3055 (*ipa_node_agg_replacements
)[node
->uid
] = aggvals
;
3058 /* Hook that is called by cgraph.c when an edge is removed. */
3061 ipa_edge_removal_hook (struct cgraph_edge
*cs
, void *data ATTRIBUTE_UNUSED
)
3063 struct ipa_edge_args
*args
;
3065 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3066 if (vec_safe_length (ipa_edge_args_vector
) <= (unsigned)cs
->uid
)
3069 args
= IPA_EDGE_REF (cs
);
3070 if (args
->jump_functions
)
3072 struct ipa_jump_func
*jf
;
3074 FOR_EACH_VEC_ELT (*args
->jump_functions
, i
, jf
)
3076 struct ipa_cst_ref_desc
*rdesc
;
3077 try_decrement_rdesc_refcount (jf
);
3078 if (jf
->type
== IPA_JF_CONST
3079 && (rdesc
= ipa_get_jf_constant_rdesc (jf
))
3085 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs
));
3088 /* Hook that is called by cgraph.c when a node is removed. */
3091 ipa_node_removal_hook (struct cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
3093 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3094 if (ipa_node_params_vector
.length () > (unsigned)node
->uid
)
3095 ipa_free_node_params_substructures (IPA_NODE_REF (node
));
3096 if (vec_safe_length (ipa_node_agg_replacements
) > (unsigned)node
->uid
)
3097 (*ipa_node_agg_replacements
)[(unsigned)node
->uid
] = NULL
;
3100 /* Hook that is called by cgraph.c when an edge is duplicated. */
3103 ipa_edge_duplication_hook (struct cgraph_edge
*src
, struct cgraph_edge
*dst
,
3104 __attribute__((unused
)) void *data
)
3106 struct ipa_edge_args
*old_args
, *new_args
;
3109 ipa_check_create_edge_args ();
3111 old_args
= IPA_EDGE_REF (src
);
3112 new_args
= IPA_EDGE_REF (dst
);
3114 new_args
->jump_functions
= vec_safe_copy (old_args
->jump_functions
);
3116 for (i
= 0; i
< vec_safe_length (old_args
->jump_functions
); i
++)
3118 struct ipa_jump_func
*src_jf
= ipa_get_ith_jump_func (old_args
, i
);
3119 struct ipa_jump_func
*dst_jf
= ipa_get_ith_jump_func (new_args
, i
);
3121 dst_jf
->agg
.items
= vec_safe_copy (dst_jf
->agg
.items
);
3123 if (src_jf
->type
== IPA_JF_CONST
)
3125 struct ipa_cst_ref_desc
*src_rdesc
= jfunc_rdesc_usable (src_jf
);
3128 dst_jf
->value
.constant
.rdesc
= NULL
;
3129 else if (src
->caller
== dst
->caller
)
3131 struct ipa_ref
*ref
;
3132 symtab_node
*n
= cgraph_node_for_jfunc (src_jf
);
3133 gcc_checking_assert (n
);
3134 ref
= ipa_find_reference (src
->caller
, n
,
3135 src
->call_stmt
, src
->lto_stmt_uid
);
3136 gcc_checking_assert (ref
);
3137 ipa_clone_ref (ref
, dst
->caller
, ref
->stmt
);
3139 gcc_checking_assert (ipa_refdesc_pool
);
3140 struct ipa_cst_ref_desc
*dst_rdesc
3141 = (struct ipa_cst_ref_desc
*) pool_alloc (ipa_refdesc_pool
);
3142 dst_rdesc
->cs
= dst
;
3143 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3144 dst_rdesc
->next_duplicate
= NULL
;
3145 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3147 else if (src_rdesc
->cs
== src
)
3149 struct ipa_cst_ref_desc
*dst_rdesc
;
3150 gcc_checking_assert (ipa_refdesc_pool
);
3152 = (struct ipa_cst_ref_desc
*) pool_alloc (ipa_refdesc_pool
);
3153 dst_rdesc
->cs
= dst
;
3154 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3155 dst_rdesc
->next_duplicate
= src_rdesc
->next_duplicate
;
3156 src_rdesc
->next_duplicate
= dst_rdesc
;
3157 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3161 struct ipa_cst_ref_desc
*dst_rdesc
;
3162 /* This can happen during inlining, when a JFUNC can refer to a
3163 reference taken in a function up in the tree of inline clones.
3164 We need to find the duplicate that refers to our tree of
3167 gcc_assert (dst
->caller
->global
.inlined_to
);
3168 for (dst_rdesc
= src_rdesc
->next_duplicate
;
3170 dst_rdesc
= dst_rdesc
->next_duplicate
)
3172 struct cgraph_node
*top
;
3173 top
= dst_rdesc
->cs
->caller
->global
.inlined_to
3174 ? dst_rdesc
->cs
->caller
->global
.inlined_to
3175 : dst_rdesc
->cs
->caller
;
3176 if (dst
->caller
->global
.inlined_to
== top
)
3179 gcc_assert (dst_rdesc
);
3180 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3186 /* Hook that is called by cgraph.c when a node is duplicated. */
3189 ipa_node_duplication_hook (struct cgraph_node
*src
, struct cgraph_node
*dst
,
3190 ATTRIBUTE_UNUSED
void *data
)
3192 struct ipa_node_params
*old_info
, *new_info
;
3193 struct ipa_agg_replacement_value
*old_av
, *new_av
;
3195 ipa_check_create_node_params ();
3196 old_info
= IPA_NODE_REF (src
);
3197 new_info
= IPA_NODE_REF (dst
);
3199 new_info
->descriptors
= old_info
->descriptors
.copy ();
3200 new_info
->lattices
= NULL
;
3201 new_info
->ipcp_orig_node
= old_info
->ipcp_orig_node
;
3203 new_info
->uses_analysis_done
= old_info
->uses_analysis_done
;
3204 new_info
->node_enqueued
= old_info
->node_enqueued
;
3206 old_av
= ipa_get_agg_replacements_for_node (src
);
3213 struct ipa_agg_replacement_value
*v
;
3215 v
= ggc_alloc_ipa_agg_replacement_value ();
3216 memcpy (v
, old_av
, sizeof (*v
));
3219 old_av
= old_av
->next
;
3221 ipa_set_node_agg_value_chain (dst
, new_av
);
3225 /* Analyze newly added function into callgraph. */
3228 ipa_add_new_function (struct cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
3230 if (cgraph_function_with_gimple_body_p (node
))
3231 ipa_analyze_node (node
);
3234 /* Register our cgraph hooks if they are not already there. */
3237 ipa_register_cgraph_hooks (void)
3239 if (!edge_removal_hook_holder
)
3240 edge_removal_hook_holder
=
3241 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook
, NULL
);
3242 if (!node_removal_hook_holder
)
3243 node_removal_hook_holder
=
3244 cgraph_add_node_removal_hook (&ipa_node_removal_hook
, NULL
);
3245 if (!edge_duplication_hook_holder
)
3246 edge_duplication_hook_holder
=
3247 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook
, NULL
);
3248 if (!node_duplication_hook_holder
)
3249 node_duplication_hook_holder
=
3250 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook
, NULL
);
3251 function_insertion_hook_holder
=
3252 cgraph_add_function_insertion_hook (&ipa_add_new_function
, NULL
);
3255 /* Unregister our cgraph hooks if they are not already there. */
3258 ipa_unregister_cgraph_hooks (void)
3260 cgraph_remove_edge_removal_hook (edge_removal_hook_holder
);
3261 edge_removal_hook_holder
= NULL
;
3262 cgraph_remove_node_removal_hook (node_removal_hook_holder
);
3263 node_removal_hook_holder
= NULL
;
3264 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder
);
3265 edge_duplication_hook_holder
= NULL
;
3266 cgraph_remove_node_duplication_hook (node_duplication_hook_holder
);
3267 node_duplication_hook_holder
= NULL
;
3268 cgraph_remove_function_insertion_hook (function_insertion_hook_holder
);
3269 function_insertion_hook_holder
= NULL
;
3272 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3273 longer needed after ipa-cp. */
3276 ipa_free_all_structures_after_ipa_cp (void)
3280 ipa_free_all_edge_args ();
3281 ipa_free_all_node_params ();
3282 free_alloc_pool (ipcp_sources_pool
);
3283 free_alloc_pool (ipcp_values_pool
);
3284 free_alloc_pool (ipcp_agg_lattice_pool
);
3285 ipa_unregister_cgraph_hooks ();
3286 if (ipa_refdesc_pool
)
3287 free_alloc_pool (ipa_refdesc_pool
);
3291 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3292 longer needed after indirect inlining. */
3295 ipa_free_all_structures_after_iinln (void)
3297 ipa_free_all_edge_args ();
3298 ipa_free_all_node_params ();
3299 ipa_unregister_cgraph_hooks ();
3300 if (ipcp_sources_pool
)
3301 free_alloc_pool (ipcp_sources_pool
);
3302 if (ipcp_values_pool
)
3303 free_alloc_pool (ipcp_values_pool
);
3304 if (ipcp_agg_lattice_pool
)
3305 free_alloc_pool (ipcp_agg_lattice_pool
);
3306 if (ipa_refdesc_pool
)
3307 free_alloc_pool (ipa_refdesc_pool
);
3310 /* Print ipa_tree_map data structures of all functions in the
3314 ipa_print_node_params (FILE *f
, struct cgraph_node
*node
)
3317 struct ipa_node_params
*info
;
3319 if (!node
->definition
)
3321 info
= IPA_NODE_REF (node
);
3322 fprintf (f
, " function %s/%i parameter descriptors:\n",
3323 node
->name (), node
->order
);
3324 count
= ipa_get_param_count (info
);
3325 for (i
= 0; i
< count
; i
++)
3329 ipa_dump_param (f
, info
, i
);
3330 if (ipa_is_param_used (info
, i
))
3331 fprintf (f
, " used");
3332 c
= ipa_get_controlled_uses (info
, i
);
3333 if (c
== IPA_UNDESCRIBED_USE
)
3334 fprintf (f
, " undescribed_use");
3336 fprintf (f
, " controlled_uses=%i", c
);
3341 /* Print ipa_tree_map data structures of all functions in the
3345 ipa_print_all_params (FILE * f
)
3347 struct cgraph_node
*node
;
3349 fprintf (f
, "\nFunction parameters:\n");
3350 FOR_EACH_FUNCTION (node
)
3351 ipa_print_node_params (f
, node
);
3354 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3357 ipa_get_vector_of_formal_parms (tree fndecl
)
3363 gcc_assert (!flag_wpa
);
3364 count
= count_formal_params (fndecl
);
3365 args
.create (count
);
3366 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
3367 args
.quick_push (parm
);
3372 /* Return a heap allocated vector containing types of formal parameters of
3373 function type FNTYPE. */
3376 ipa_get_vector_of_formal_parm_types (tree fntype
)
3382 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
3385 types
.create (count
);
3386 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
3387 types
.quick_push (TREE_VALUE (t
));
3392 /* Modify the function declaration FNDECL and its type according to the plan in
3393 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3394 to reflect the actual parameters being modified which are determined by the
3395 base_index field. */
3398 ipa_modify_formal_parameters (tree fndecl
, ipa_parm_adjustment_vec adjustments
)
3400 vec
<tree
> oparms
= ipa_get_vector_of_formal_parms (fndecl
);
3401 tree orig_type
= TREE_TYPE (fndecl
);
3402 tree old_arg_types
= TYPE_ARG_TYPES (orig_type
);
3404 /* The following test is an ugly hack, some functions simply don't have any
3405 arguments in their type. This is probably a bug but well... */
3406 bool care_for_types
= (old_arg_types
!= NULL_TREE
);
3407 bool last_parm_void
;
3411 last_parm_void
= (TREE_VALUE (tree_last (old_arg_types
))
3413 otypes
= ipa_get_vector_of_formal_parm_types (orig_type
);
3415 gcc_assert (oparms
.length () + 1 == otypes
.length ());
3417 gcc_assert (oparms
.length () == otypes
.length ());
3421 last_parm_void
= false;
3425 int len
= adjustments
.length ();
3426 tree
*link
= &DECL_ARGUMENTS (fndecl
);
3427 tree new_arg_types
= NULL
;
3428 for (int i
= 0; i
< len
; i
++)
3430 struct ipa_parm_adjustment
*adj
;
3433 adj
= &adjustments
[i
];
3435 if (adj
->op
== IPA_PARM_OP_NEW
)
3438 parm
= oparms
[adj
->base_index
];
3441 if (adj
->op
== IPA_PARM_OP_COPY
)
3444 new_arg_types
= tree_cons (NULL_TREE
, otypes
[adj
->base_index
],
3447 link
= &DECL_CHAIN (parm
);
3449 else if (adj
->op
!= IPA_PARM_OP_REMOVE
)
3455 ptype
= build_pointer_type (adj
->type
);
3459 if (is_gimple_reg_type (ptype
))
3461 unsigned malign
= GET_MODE_ALIGNMENT (TYPE_MODE (ptype
));
3462 if (TYPE_ALIGN (ptype
) < malign
)
3463 ptype
= build_aligned_type (ptype
, malign
);
3468 new_arg_types
= tree_cons (NULL_TREE
, ptype
, new_arg_types
);
3470 new_parm
= build_decl (UNKNOWN_LOCATION
, PARM_DECL
, NULL_TREE
,
3472 const char *prefix
= adj
->arg_prefix
? adj
->arg_prefix
: "SYNTH";
3473 DECL_NAME (new_parm
) = create_tmp_var_name (prefix
);
3474 DECL_ARTIFICIAL (new_parm
) = 1;
3475 DECL_ARG_TYPE (new_parm
) = ptype
;
3476 DECL_CONTEXT (new_parm
) = fndecl
;
3477 TREE_USED (new_parm
) = 1;
3478 DECL_IGNORED_P (new_parm
) = 1;
3479 layout_decl (new_parm
, 0);
3481 if (adj
->op
== IPA_PARM_OP_NEW
)
3485 adj
->new_decl
= new_parm
;
3488 link
= &DECL_CHAIN (new_parm
);
3494 tree new_reversed
= NULL
;
3497 new_reversed
= nreverse (new_arg_types
);
3501 TREE_CHAIN (new_arg_types
) = void_list_node
;
3503 new_reversed
= void_list_node
;
3507 /* Use copy_node to preserve as much as possible from original type
3508 (debug info, attribute lists etc.)
3509 Exception is METHOD_TYPEs must have THIS argument.
3510 When we are asked to remove it, we need to build new FUNCTION_TYPE
3512 tree new_type
= NULL
;
3513 if (TREE_CODE (orig_type
) != METHOD_TYPE
3514 || (adjustments
[0].op
== IPA_PARM_OP_COPY
3515 && adjustments
[0].base_index
== 0))
3517 new_type
= build_distinct_type_copy (orig_type
);
3518 TYPE_ARG_TYPES (new_type
) = new_reversed
;
3523 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type
),
3525 TYPE_CONTEXT (new_type
) = TYPE_CONTEXT (orig_type
);
3526 DECL_VINDEX (fndecl
) = NULL_TREE
;
3529 /* When signature changes, we need to clear builtin info. */
3530 if (DECL_BUILT_IN (fndecl
))
3532 DECL_BUILT_IN_CLASS (fndecl
) = NOT_BUILT_IN
;
3533 DECL_FUNCTION_CODE (fndecl
) = (enum built_in_function
) 0;
3536 /* This is a new type, not a copy of an old type. Need to reassociate
3537 variants. We can handle everything except the main variant lazily. */
3538 tree t
= TYPE_MAIN_VARIANT (orig_type
);
3541 TYPE_MAIN_VARIANT (new_type
) = t
;
3542 TYPE_NEXT_VARIANT (new_type
) = TYPE_NEXT_VARIANT (t
);
3543 TYPE_NEXT_VARIANT (t
) = new_type
;
3547 TYPE_MAIN_VARIANT (new_type
) = new_type
;
3548 TYPE_NEXT_VARIANT (new_type
) = NULL
;
3551 TREE_TYPE (fndecl
) = new_type
;
3552 DECL_VIRTUAL_P (fndecl
) = 0;
3557 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3558 If this is a directly recursive call, CS must be NULL. Otherwise it must
3559 contain the corresponding call graph edge. */
3562 ipa_modify_call_arguments (struct cgraph_edge
*cs
, gimple stmt
,
3563 ipa_parm_adjustment_vec adjustments
)
3565 struct cgraph_node
*current_node
= cgraph_get_node (current_function_decl
);
3567 vec
<tree
, va_gc
> **debug_args
= NULL
;
3569 gimple_stmt_iterator gsi
, prev_gsi
;
3573 len
= adjustments
.length ();
3575 callee_decl
= !cs
? gimple_call_fndecl (stmt
) : cs
->callee
->decl
;
3576 ipa_remove_stmt_references (current_node
, stmt
);
3578 gsi
= gsi_for_stmt (stmt
);
3580 gsi_prev (&prev_gsi
);
3581 for (i
= 0; i
< len
; i
++)
3583 struct ipa_parm_adjustment
*adj
;
3585 adj
= &adjustments
[i
];
3587 if (adj
->op
== IPA_PARM_OP_COPY
)
3589 tree arg
= gimple_call_arg (stmt
, adj
->base_index
);
3591 vargs
.quick_push (arg
);
3593 else if (adj
->op
!= IPA_PARM_OP_REMOVE
)
3595 tree expr
, base
, off
;
3597 unsigned int deref_align
= 0;
3598 bool deref_base
= false;
3600 /* We create a new parameter out of the value of the old one, we can
3601 do the following kind of transformations:
3603 - A scalar passed by reference is converted to a scalar passed by
3604 value. (adj->by_ref is false and the type of the original
3605 actual argument is a pointer to a scalar).
3607 - A part of an aggregate is passed instead of the whole aggregate.
3608 The part can be passed either by value or by reference, this is
3609 determined by value of adj->by_ref. Moreover, the code below
3610 handles both situations when the original aggregate is passed by
3611 value (its type is not a pointer) and when it is passed by
3612 reference (it is a pointer to an aggregate).
3614 When the new argument is passed by reference (adj->by_ref is true)
3615 it must be a part of an aggregate and therefore we form it by
3616 simply taking the address of a reference inside the original
3619 gcc_checking_assert (adj
->offset
% BITS_PER_UNIT
== 0);
3620 base
= gimple_call_arg (stmt
, adj
->base_index
);
3621 loc
= DECL_P (base
) ? DECL_SOURCE_LOCATION (base
)
3622 : EXPR_LOCATION (base
);
3624 if (TREE_CODE (base
) != ADDR_EXPR
3625 && POINTER_TYPE_P (TREE_TYPE (base
)))
3626 off
= build_int_cst (adj
->alias_ptr_type
,
3627 adj
->offset
/ BITS_PER_UNIT
);
3630 HOST_WIDE_INT base_offset
;
3634 if (TREE_CODE (base
) == ADDR_EXPR
)
3636 base
= TREE_OPERAND (base
, 0);
3642 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
3643 /* Aggregate arguments can have non-invariant addresses. */
3646 base
= build_fold_addr_expr (prev_base
);
3647 off
= build_int_cst (adj
->alias_ptr_type
,
3648 adj
->offset
/ BITS_PER_UNIT
);
3650 else if (TREE_CODE (base
) == MEM_REF
)
3655 deref_align
= TYPE_ALIGN (TREE_TYPE (base
));
3657 off
= build_int_cst (adj
->alias_ptr_type
,
3659 + adj
->offset
/ BITS_PER_UNIT
);
3660 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1),
3662 base
= TREE_OPERAND (base
, 0);
3666 off
= build_int_cst (adj
->alias_ptr_type
,
3668 + adj
->offset
/ BITS_PER_UNIT
);
3669 base
= build_fold_addr_expr (base
);
3675 tree type
= adj
->type
;
3677 unsigned HOST_WIDE_INT misalign
;
3681 align
= deref_align
;
3686 get_pointer_alignment_1 (base
, &align
, &misalign
);
3687 if (TYPE_ALIGN (type
) > align
)
3688 align
= TYPE_ALIGN (type
);
3690 misalign
+= (tree_to_double_int (off
)
3691 .sext (TYPE_PRECISION (TREE_TYPE (off
))).low
3693 misalign
= misalign
& (align
- 1);
3695 align
= (misalign
& -misalign
);
3696 if (align
< TYPE_ALIGN (type
))
3697 type
= build_aligned_type (type
, align
);
3698 expr
= fold_build2_loc (loc
, MEM_REF
, type
, base
, off
);
3702 expr
= fold_build2_loc (loc
, MEM_REF
, adj
->type
, base
, off
);
3703 expr
= build_fold_addr_expr (expr
);
3706 expr
= force_gimple_operand_gsi (&gsi
, expr
,
3708 || is_gimple_reg_type (adj
->type
),
3709 NULL
, true, GSI_SAME_STMT
);
3710 vargs
.quick_push (expr
);
3712 if (adj
->op
!= IPA_PARM_OP_COPY
&& MAY_HAVE_DEBUG_STMTS
)
3715 tree ddecl
= NULL_TREE
, origin
= DECL_ORIGIN (adj
->base
), arg
;
3718 arg
= gimple_call_arg (stmt
, adj
->base_index
);
3719 if (!useless_type_conversion_p (TREE_TYPE (origin
), TREE_TYPE (arg
)))
3721 if (!fold_convertible_p (TREE_TYPE (origin
), arg
))
3723 arg
= fold_convert_loc (gimple_location (stmt
),
3724 TREE_TYPE (origin
), arg
);
3726 if (debug_args
== NULL
)
3727 debug_args
= decl_debug_args_insert (callee_decl
);
3728 for (ix
= 0; vec_safe_iterate (*debug_args
, ix
, &ddecl
); ix
+= 2)
3729 if (ddecl
== origin
)
3731 ddecl
= (**debug_args
)[ix
+ 1];
3736 ddecl
= make_node (DEBUG_EXPR_DECL
);
3737 DECL_ARTIFICIAL (ddecl
) = 1;
3738 TREE_TYPE (ddecl
) = TREE_TYPE (origin
);
3739 DECL_MODE (ddecl
) = DECL_MODE (origin
);
3741 vec_safe_push (*debug_args
, origin
);
3742 vec_safe_push (*debug_args
, ddecl
);
3744 def_temp
= gimple_build_debug_bind (ddecl
, unshare_expr (arg
), stmt
);
3745 gsi_insert_before (&gsi
, def_temp
, GSI_SAME_STMT
);
3749 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3751 fprintf (dump_file
, "replacing stmt:");
3752 print_gimple_stmt (dump_file
, gsi_stmt (gsi
), 0, 0);
3755 new_stmt
= gimple_build_call_vec (callee_decl
, vargs
);
3757 if (gimple_call_lhs (stmt
))
3758 gimple_call_set_lhs (new_stmt
, gimple_call_lhs (stmt
));
3760 gimple_set_block (new_stmt
, gimple_block (stmt
));
3761 if (gimple_has_location (stmt
))
3762 gimple_set_location (new_stmt
, gimple_location (stmt
));
3763 gimple_call_set_chain (new_stmt
, gimple_call_chain (stmt
));
3764 gimple_call_copy_flags (new_stmt
, stmt
);
3766 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3768 fprintf (dump_file
, "with stmt:");
3769 print_gimple_stmt (dump_file
, new_stmt
, 0, 0);
3770 fprintf (dump_file
, "\n");
3772 gsi_replace (&gsi
, new_stmt
, true);
3774 cgraph_set_call_stmt (cs
, new_stmt
);
3777 ipa_record_stmt_references (current_node
, gsi_stmt (gsi
));
3780 while ((gsi_end_p (prev_gsi
) && !gsi_end_p (gsi
))
3781 || (!gsi_end_p (prev_gsi
) && gsi_stmt (gsi
) == gsi_stmt (prev_gsi
)));
3783 update_ssa (TODO_update_ssa
);
3784 free_dominance_info (CDI_DOMINATORS
);
3787 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
3788 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
3789 specifies whether the function should care about type incompatibility the
3790 current and new expressions. If it is false, the function will leave
3791 incompatibility issues to the caller. Return true iff the expression
3795 ipa_modify_expr (tree
*expr
, bool convert
,
3796 ipa_parm_adjustment_vec adjustments
)
3798 struct ipa_parm_adjustment
*cand
3799 = ipa_get_adjustment_candidate (&expr
, &convert
, adjustments
, false);
3805 src
= build_simple_mem_ref (cand
->new_decl
);
3807 src
= cand
->new_decl
;
3809 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3811 fprintf (dump_file
, "About to replace expr ");
3812 print_generic_expr (dump_file
, *expr
, 0);
3813 fprintf (dump_file
, " with ");
3814 print_generic_expr (dump_file
, src
, 0);
3815 fprintf (dump_file
, "\n");
3818 if (convert
&& !useless_type_conversion_p (TREE_TYPE (*expr
), cand
->type
))
3820 tree vce
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (*expr
), src
);
3828 /* If T is an SSA_NAME, return NULL if it is not a default def or
3829 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
3830 the base variable is always returned, regardless if it is a default
3831 def. Return T if it is not an SSA_NAME. */
3834 get_ssa_base_param (tree t
, bool ignore_default_def
)
3836 if (TREE_CODE (t
) == SSA_NAME
)
3838 if (ignore_default_def
|| SSA_NAME_IS_DEFAULT_DEF (t
))
3839 return SSA_NAME_VAR (t
);
3846 /* Given an expression, return an adjustment entry specifying the
3847 transformation to be done on EXPR. If no suitable adjustment entry
3848 was found, returns NULL.
3850 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
3851 default def, otherwise bail on them.
3853 If CONVERT is non-NULL, this function will set *CONVERT if the
3854 expression provided is a component reference. ADJUSTMENTS is the
3855 adjustments vector. */
3857 ipa_parm_adjustment
*
3858 ipa_get_adjustment_candidate (tree
**expr
, bool *convert
,
3859 ipa_parm_adjustment_vec adjustments
,
3860 bool ignore_default_def
)
3862 if (TREE_CODE (**expr
) == BIT_FIELD_REF
3863 || TREE_CODE (**expr
) == IMAGPART_EXPR
3864 || TREE_CODE (**expr
) == REALPART_EXPR
)
3866 *expr
= &TREE_OPERAND (**expr
, 0);
3871 HOST_WIDE_INT offset
, size
, max_size
;
3872 tree base
= get_ref_base_and_extent (**expr
, &offset
, &size
, &max_size
);
3873 if (!base
|| size
== -1 || max_size
== -1)
3876 if (TREE_CODE (base
) == MEM_REF
)
3878 offset
+= mem_ref_offset (base
).low
* BITS_PER_UNIT
;
3879 base
= TREE_OPERAND (base
, 0);
3882 base
= get_ssa_base_param (base
, ignore_default_def
);
3883 if (!base
|| TREE_CODE (base
) != PARM_DECL
)
3886 struct ipa_parm_adjustment
*cand
= NULL
;
3887 unsigned int len
= adjustments
.length ();
3888 for (unsigned i
= 0; i
< len
; i
++)
3890 struct ipa_parm_adjustment
*adj
= &adjustments
[i
];
3892 if (adj
->base
== base
3893 && (adj
->offset
== offset
|| adj
->op
== IPA_PARM_OP_REMOVE
))
3900 if (!cand
|| cand
->op
== IPA_PARM_OP_COPY
|| cand
->op
== IPA_PARM_OP_REMOVE
)
3905 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
3908 index_in_adjustments_multiple_times_p (int base_index
,
3909 ipa_parm_adjustment_vec adjustments
)
3911 int i
, len
= adjustments
.length ();
3914 for (i
= 0; i
< len
; i
++)
3916 struct ipa_parm_adjustment
*adj
;
3917 adj
= &adjustments
[i
];
3919 if (adj
->base_index
== base_index
)
3931 /* Return adjustments that should have the same effect on function parameters
3932 and call arguments as if they were first changed according to adjustments in
3933 INNER and then by adjustments in OUTER. */
3935 ipa_parm_adjustment_vec
3936 ipa_combine_adjustments (ipa_parm_adjustment_vec inner
,
3937 ipa_parm_adjustment_vec outer
)
3939 int i
, outlen
= outer
.length ();
3940 int inlen
= inner
.length ();
3942 ipa_parm_adjustment_vec adjustments
, tmp
;
3945 for (i
= 0; i
< inlen
; i
++)
3947 struct ipa_parm_adjustment
*n
;
3950 if (n
->op
== IPA_PARM_OP_REMOVE
)
3954 /* FIXME: Handling of new arguments are not implemented yet. */
3955 gcc_assert (n
->op
!= IPA_PARM_OP_NEW
);
3956 tmp
.quick_push (*n
);
3960 adjustments
.create (outlen
+ removals
);
3961 for (i
= 0; i
< outlen
; i
++)
3963 struct ipa_parm_adjustment r
;
3964 struct ipa_parm_adjustment
*out
= &outer
[i
];
3965 struct ipa_parm_adjustment
*in
= &tmp
[out
->base_index
];
3967 memset (&r
, 0, sizeof (r
));
3968 gcc_assert (in
->op
!= IPA_PARM_OP_REMOVE
);
3969 if (out
->op
== IPA_PARM_OP_REMOVE
)
3971 if (!index_in_adjustments_multiple_times_p (in
->base_index
, tmp
))
3973 r
.op
= IPA_PARM_OP_REMOVE
;
3974 adjustments
.quick_push (r
);
3980 /* FIXME: Handling of new arguments are not implemented yet. */
3981 gcc_assert (out
->op
!= IPA_PARM_OP_NEW
);
3984 r
.base_index
= in
->base_index
;
3987 /* FIXME: Create nonlocal value too. */
3989 if (in
->op
== IPA_PARM_OP_COPY
&& out
->op
== IPA_PARM_OP_COPY
)
3990 r
.op
= IPA_PARM_OP_COPY
;
3991 else if (in
->op
== IPA_PARM_OP_COPY
)
3992 r
.offset
= out
->offset
;
3993 else if (out
->op
== IPA_PARM_OP_COPY
)
3994 r
.offset
= in
->offset
;
3996 r
.offset
= in
->offset
+ out
->offset
;
3997 adjustments
.quick_push (r
);
4000 for (i
= 0; i
< inlen
; i
++)
4002 struct ipa_parm_adjustment
*n
= &inner
[i
];
4004 if (n
->op
== IPA_PARM_OP_REMOVE
)
4005 adjustments
.quick_push (*n
);
4012 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4013 friendly way, assuming they are meant to be applied to FNDECL. */
4016 ipa_dump_param_adjustments (FILE *file
, ipa_parm_adjustment_vec adjustments
,
4019 int i
, len
= adjustments
.length ();
4021 vec
<tree
> parms
= ipa_get_vector_of_formal_parms (fndecl
);
4023 fprintf (file
, "IPA param adjustments: ");
4024 for (i
= 0; i
< len
; i
++)
4026 struct ipa_parm_adjustment
*adj
;
4027 adj
= &adjustments
[i
];
4030 fprintf (file
, " ");
4034 fprintf (file
, "%i. base_index: %i - ", i
, adj
->base_index
);
4035 print_generic_expr (file
, parms
[adj
->base_index
], 0);
4038 fprintf (file
, ", base: ");
4039 print_generic_expr (file
, adj
->base
, 0);
4043 fprintf (file
, ", new_decl: ");
4044 print_generic_expr (file
, adj
->new_decl
, 0);
4046 if (adj
->new_ssa_base
)
4048 fprintf (file
, ", new_ssa_base: ");
4049 print_generic_expr (file
, adj
->new_ssa_base
, 0);
4052 if (adj
->op
== IPA_PARM_OP_COPY
)
4053 fprintf (file
, ", copy_param");
4054 else if (adj
->op
== IPA_PARM_OP_REMOVE
)
4055 fprintf (file
, ", remove_param");
4057 fprintf (file
, ", offset %li", (long) adj
->offset
);
4059 fprintf (file
, ", by_ref");
4060 print_node_brief (file
, ", type: ", adj
->type
, 0);
4061 fprintf (file
, "\n");
4066 /* Dump the AV linked list. */
4069 ipa_dump_agg_replacement_values (FILE *f
, struct ipa_agg_replacement_value
*av
)
4072 fprintf (f
, " Aggregate replacements:");
4073 for (; av
; av
= av
->next
)
4075 fprintf (f
, "%s %i[" HOST_WIDE_INT_PRINT_DEC
"]=", comma
? "," : "",
4076 av
->index
, av
->offset
);
4077 print_generic_expr (f
, av
->value
, 0);
4083 /* Stream out jump function JUMP_FUNC to OB. */
4086 ipa_write_jump_function (struct output_block
*ob
,
4087 struct ipa_jump_func
*jump_func
)
4089 struct ipa_agg_jf_item
*item
;
4090 struct bitpack_d bp
;
4093 streamer_write_uhwi (ob
, jump_func
->type
);
4094 switch (jump_func
->type
)
4096 case IPA_JF_UNKNOWN
:
4098 case IPA_JF_KNOWN_TYPE
:
4099 streamer_write_uhwi (ob
, jump_func
->value
.known_type
.offset
);
4100 stream_write_tree (ob
, jump_func
->value
.known_type
.base_type
, true);
4101 stream_write_tree (ob
, jump_func
->value
.known_type
.component_type
, true);
4105 EXPR_LOCATION (jump_func
->value
.constant
.value
) == UNKNOWN_LOCATION
);
4106 stream_write_tree (ob
, jump_func
->value
.constant
.value
, true);
4108 case IPA_JF_PASS_THROUGH
:
4109 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.operation
);
4110 if (jump_func
->value
.pass_through
.operation
== NOP_EXPR
)
4112 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4113 bp
= bitpack_create (ob
->main_stream
);
4114 bp_pack_value (&bp
, jump_func
->value
.pass_through
.agg_preserved
, 1);
4115 bp_pack_value (&bp
, jump_func
->value
.pass_through
.type_preserved
, 1);
4116 streamer_write_bitpack (&bp
);
4120 stream_write_tree (ob
, jump_func
->value
.pass_through
.operand
, true);
4121 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4124 case IPA_JF_ANCESTOR
:
4125 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.offset
);
4126 stream_write_tree (ob
, jump_func
->value
.ancestor
.type
, true);
4127 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.formal_id
);
4128 bp
= bitpack_create (ob
->main_stream
);
4129 bp_pack_value (&bp
, jump_func
->value
.ancestor
.agg_preserved
, 1);
4130 bp_pack_value (&bp
, jump_func
->value
.ancestor
.type_preserved
, 1);
4131 streamer_write_bitpack (&bp
);
4135 count
= vec_safe_length (jump_func
->agg
.items
);
4136 streamer_write_uhwi (ob
, count
);
4139 bp
= bitpack_create (ob
->main_stream
);
4140 bp_pack_value (&bp
, jump_func
->agg
.by_ref
, 1);
4141 streamer_write_bitpack (&bp
);
4144 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, i
, item
)
4146 streamer_write_uhwi (ob
, item
->offset
);
4147 stream_write_tree (ob
, item
->value
, true);
4151 /* Read in jump function JUMP_FUNC from IB. */
4154 ipa_read_jump_function (struct lto_input_block
*ib
,
4155 struct ipa_jump_func
*jump_func
,
4156 struct cgraph_edge
*cs
,
4157 struct data_in
*data_in
)
4159 enum jump_func_type jftype
;
4160 enum tree_code operation
;
4163 jftype
= (enum jump_func_type
) streamer_read_uhwi (ib
);
4166 case IPA_JF_UNKNOWN
:
4167 jump_func
->type
= IPA_JF_UNKNOWN
;
4169 case IPA_JF_KNOWN_TYPE
:
4171 HOST_WIDE_INT offset
= streamer_read_uhwi (ib
);
4172 tree base_type
= stream_read_tree (ib
, data_in
);
4173 tree component_type
= stream_read_tree (ib
, data_in
);
4175 ipa_set_jf_known_type (jump_func
, offset
, base_type
, component_type
);
4179 ipa_set_jf_constant (jump_func
, stream_read_tree (ib
, data_in
), cs
);
4181 case IPA_JF_PASS_THROUGH
:
4182 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
4183 if (operation
== NOP_EXPR
)
4185 int formal_id
= streamer_read_uhwi (ib
);
4186 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4187 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4188 bool type_preserved
= bp_unpack_value (&bp
, 1);
4189 ipa_set_jf_simple_pass_through (jump_func
, formal_id
, agg_preserved
,
4194 tree operand
= stream_read_tree (ib
, data_in
);
4195 int formal_id
= streamer_read_uhwi (ib
);
4196 ipa_set_jf_arith_pass_through (jump_func
, formal_id
, operand
,
4200 case IPA_JF_ANCESTOR
:
4202 HOST_WIDE_INT offset
= streamer_read_uhwi (ib
);
4203 tree type
= stream_read_tree (ib
, data_in
);
4204 int formal_id
= streamer_read_uhwi (ib
);
4205 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4206 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4207 bool type_preserved
= bp_unpack_value (&bp
, 1);
4209 ipa_set_ancestor_jf (jump_func
, offset
, type
, formal_id
, agg_preserved
,
4215 count
= streamer_read_uhwi (ib
);
4216 vec_alloc (jump_func
->agg
.items
, count
);
4219 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4220 jump_func
->agg
.by_ref
= bp_unpack_value (&bp
, 1);
4222 for (i
= 0; i
< count
; i
++)
4224 struct ipa_agg_jf_item item
;
4225 item
.offset
= streamer_read_uhwi (ib
);
4226 item
.value
= stream_read_tree (ib
, data_in
);
4227 jump_func
->agg
.items
->quick_push (item
);
4231 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4232 relevant to indirect inlining to OB. */
4235 ipa_write_indirect_edge_info (struct output_block
*ob
,
4236 struct cgraph_edge
*cs
)
4238 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4239 struct bitpack_d bp
;
4241 streamer_write_hwi (ob
, ii
->param_index
);
4242 streamer_write_hwi (ob
, ii
->offset
);
4243 bp
= bitpack_create (ob
->main_stream
);
4244 bp_pack_value (&bp
, ii
->polymorphic
, 1);
4245 bp_pack_value (&bp
, ii
->agg_contents
, 1);
4246 bp_pack_value (&bp
, ii
->member_ptr
, 1);
4247 bp_pack_value (&bp
, ii
->by_ref
, 1);
4248 bp_pack_value (&bp
, ii
->maybe_in_construction
, 1);
4249 bp_pack_value (&bp
, ii
->maybe_derived_type
, 1);
4250 streamer_write_bitpack (&bp
);
4252 if (ii
->polymorphic
)
4254 streamer_write_hwi (ob
, ii
->otr_token
);
4255 stream_write_tree (ob
, ii
->otr_type
, true);
4256 stream_write_tree (ob
, ii
->outer_type
, true);
4260 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4261 relevant to indirect inlining from IB. */
4264 ipa_read_indirect_edge_info (struct lto_input_block
*ib
,
4265 struct data_in
*data_in ATTRIBUTE_UNUSED
,
4266 struct cgraph_edge
*cs
)
4268 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4269 struct bitpack_d bp
;
4271 ii
->param_index
= (int) streamer_read_hwi (ib
);
4272 ii
->offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4273 bp
= streamer_read_bitpack (ib
);
4274 ii
->polymorphic
= bp_unpack_value (&bp
, 1);
4275 ii
->agg_contents
= bp_unpack_value (&bp
, 1);
4276 ii
->member_ptr
= bp_unpack_value (&bp
, 1);
4277 ii
->by_ref
= bp_unpack_value (&bp
, 1);
4278 ii
->maybe_in_construction
= bp_unpack_value (&bp
, 1);
4279 ii
->maybe_derived_type
= bp_unpack_value (&bp
, 1);
4280 if (ii
->polymorphic
)
4282 ii
->otr_token
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4283 ii
->otr_type
= stream_read_tree (ib
, data_in
);
4284 ii
->outer_type
= stream_read_tree (ib
, data_in
);
4288 /* Stream out NODE info to OB. */
4291 ipa_write_node_info (struct output_block
*ob
, struct cgraph_node
*node
)
4294 lto_symtab_encoder_t encoder
;
4295 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
4297 struct cgraph_edge
*e
;
4298 struct bitpack_d bp
;
4300 encoder
= ob
->decl_state
->symtab_node_encoder
;
4301 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4302 streamer_write_uhwi (ob
, node_ref
);
4304 streamer_write_uhwi (ob
, ipa_get_param_count (info
));
4305 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4306 streamer_write_uhwi (ob
, ipa_get_param_move_cost (info
, j
));
4307 bp
= bitpack_create (ob
->main_stream
);
4308 gcc_assert (info
->uses_analysis_done
4309 || ipa_get_param_count (info
) == 0);
4310 gcc_assert (!info
->node_enqueued
);
4311 gcc_assert (!info
->ipcp_orig_node
);
4312 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4313 bp_pack_value (&bp
, ipa_is_param_used (info
, j
), 1);
4314 streamer_write_bitpack (&bp
);
4315 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4316 streamer_write_hwi (ob
, ipa_get_controlled_uses (info
, j
));
4317 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4319 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4321 streamer_write_uhwi (ob
, ipa_get_cs_argument_count (args
));
4322 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4323 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4325 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4327 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4329 streamer_write_uhwi (ob
, ipa_get_cs_argument_count (args
));
4330 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4331 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4332 ipa_write_indirect_edge_info (ob
, e
);
4336 /* Stream in NODE info from IB. */
4339 ipa_read_node_info (struct lto_input_block
*ib
, struct cgraph_node
*node
,
4340 struct data_in
*data_in
)
4342 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
4344 struct cgraph_edge
*e
;
4345 struct bitpack_d bp
;
4347 ipa_alloc_node_params (node
, streamer_read_uhwi (ib
));
4349 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4350 info
->descriptors
[k
].move_cost
= streamer_read_uhwi (ib
);
4352 bp
= streamer_read_bitpack (ib
);
4353 if (ipa_get_param_count (info
) != 0)
4354 info
->uses_analysis_done
= true;
4355 info
->node_enqueued
= false;
4356 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4357 ipa_set_param_used (info
, k
, bp_unpack_value (&bp
, 1));
4358 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4359 ipa_set_controlled_uses (info
, k
, streamer_read_hwi (ib
));
4360 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4362 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4363 int count
= streamer_read_uhwi (ib
);
4367 vec_safe_grow_cleared (args
->jump_functions
, count
);
4369 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
4370 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4373 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4375 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4376 int count
= streamer_read_uhwi (ib
);
4380 vec_safe_grow_cleared (args
->jump_functions
, count
);
4381 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
4382 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4385 ipa_read_indirect_edge_info (ib
, data_in
, e
);
4389 /* Write jump functions for nodes in SET. */
4392 ipa_prop_write_jump_functions (void)
4394 struct cgraph_node
*node
;
4395 struct output_block
*ob
;
4396 unsigned int count
= 0;
4397 lto_symtab_encoder_iterator lsei
;
4398 lto_symtab_encoder_t encoder
;
4401 if (!ipa_node_params_vector
.exists ())
4404 ob
= create_output_block (LTO_section_jump_functions
);
4405 encoder
= ob
->decl_state
->symtab_node_encoder
;
4406 ob
->cgraph_node
= NULL
;
4407 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4408 lsei_next_function_in_partition (&lsei
))
4410 node
= lsei_cgraph_node (lsei
);
4411 if (cgraph_function_with_gimple_body_p (node
)
4412 && IPA_NODE_REF (node
) != NULL
)
4416 streamer_write_uhwi (ob
, count
);
4418 /* Process all of the functions. */
4419 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4420 lsei_next_function_in_partition (&lsei
))
4422 node
= lsei_cgraph_node (lsei
);
4423 if (cgraph_function_with_gimple_body_p (node
)
4424 && IPA_NODE_REF (node
) != NULL
)
4425 ipa_write_node_info (ob
, node
);
4427 streamer_write_char_stream (ob
->main_stream
, 0);
4428 produce_asm (ob
, NULL
);
4429 destroy_output_block (ob
);
4432 /* Read section in file FILE_DATA of length LEN with data DATA. */
4435 ipa_prop_read_section (struct lto_file_decl_data
*file_data
, const char *data
,
4438 const struct lto_function_header
*header
=
4439 (const struct lto_function_header
*) data
;
4440 const int cfg_offset
= sizeof (struct lto_function_header
);
4441 const int main_offset
= cfg_offset
+ header
->cfg_size
;
4442 const int string_offset
= main_offset
+ header
->main_size
;
4443 struct data_in
*data_in
;
4444 struct lto_input_block ib_main
;
4448 LTO_INIT_INPUT_BLOCK (ib_main
, (const char *) data
+ main_offset
, 0,
4452 lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
4453 header
->string_size
, vNULL
);
4454 count
= streamer_read_uhwi (&ib_main
);
4456 for (i
= 0; i
< count
; i
++)
4459 struct cgraph_node
*node
;
4460 lto_symtab_encoder_t encoder
;
4462 index
= streamer_read_uhwi (&ib_main
);
4463 encoder
= file_data
->symtab_node_encoder
;
4464 node
= cgraph (lto_symtab_encoder_deref (encoder
, index
));
4465 gcc_assert (node
->definition
);
4466 ipa_read_node_info (&ib_main
, node
, data_in
);
4468 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
4470 lto_data_in_delete (data_in
);
4473 /* Read ipcp jump functions. */
4476 ipa_prop_read_jump_functions (void)
4478 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
4479 struct lto_file_decl_data
*file_data
;
4482 ipa_check_create_node_params ();
4483 ipa_check_create_edge_args ();
4484 ipa_register_cgraph_hooks ();
4486 while ((file_data
= file_data_vec
[j
++]))
4489 const char *data
= lto_get_section_data (file_data
, LTO_section_jump_functions
, NULL
, &len
);
4492 ipa_prop_read_section (file_data
, data
, len
);
4496 /* After merging units, we can get mismatch in argument counts.
4497 Also decl merging might've rendered parameter lists obsolete.
4498 Also compute called_with_variable_arg info. */
4501 ipa_update_after_lto_read (void)
4503 ipa_check_create_node_params ();
4504 ipa_check_create_edge_args ();
4508 write_agg_replacement_chain (struct output_block
*ob
, struct cgraph_node
*node
)
4511 unsigned int count
= 0;
4512 lto_symtab_encoder_t encoder
;
4513 struct ipa_agg_replacement_value
*aggvals
, *av
;
4515 aggvals
= ipa_get_agg_replacements_for_node (node
);
4516 encoder
= ob
->decl_state
->symtab_node_encoder
;
4517 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4518 streamer_write_uhwi (ob
, node_ref
);
4520 for (av
= aggvals
; av
; av
= av
->next
)
4522 streamer_write_uhwi (ob
, count
);
4524 for (av
= aggvals
; av
; av
= av
->next
)
4526 struct bitpack_d bp
;
4528 streamer_write_uhwi (ob
, av
->offset
);
4529 streamer_write_uhwi (ob
, av
->index
);
4530 stream_write_tree (ob
, av
->value
, true);
4532 bp
= bitpack_create (ob
->main_stream
);
4533 bp_pack_value (&bp
, av
->by_ref
, 1);
4534 streamer_write_bitpack (&bp
);
4538 /* Stream in the aggregate value replacement chain for NODE from IB. */
4541 read_agg_replacement_chain (struct lto_input_block
*ib
,
4542 struct cgraph_node
*node
,
4543 struct data_in
*data_in
)
4545 struct ipa_agg_replacement_value
*aggvals
= NULL
;
4546 unsigned int count
, i
;
4548 count
= streamer_read_uhwi (ib
);
4549 for (i
= 0; i
<count
; i
++)
4551 struct ipa_agg_replacement_value
*av
;
4552 struct bitpack_d bp
;
4554 av
= ggc_alloc_ipa_agg_replacement_value ();
4555 av
->offset
= streamer_read_uhwi (ib
);
4556 av
->index
= streamer_read_uhwi (ib
);
4557 av
->value
= stream_read_tree (ib
, data_in
);
4558 bp
= streamer_read_bitpack (ib
);
4559 av
->by_ref
= bp_unpack_value (&bp
, 1);
4563 ipa_set_node_agg_value_chain (node
, aggvals
);
4566 /* Write all aggregate replacement for nodes in set. */
4569 ipa_prop_write_all_agg_replacement (void)
4571 struct cgraph_node
*node
;
4572 struct output_block
*ob
;
4573 unsigned int count
= 0;
4574 lto_symtab_encoder_iterator lsei
;
4575 lto_symtab_encoder_t encoder
;
4577 if (!ipa_node_agg_replacements
)
4580 ob
= create_output_block (LTO_section_ipcp_transform
);
4581 encoder
= ob
->decl_state
->symtab_node_encoder
;
4582 ob
->cgraph_node
= NULL
;
4583 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4584 lsei_next_function_in_partition (&lsei
))
4586 node
= lsei_cgraph_node (lsei
);
4587 if (cgraph_function_with_gimple_body_p (node
)
4588 && ipa_get_agg_replacements_for_node (node
) != NULL
)
4592 streamer_write_uhwi (ob
, count
);
4594 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4595 lsei_next_function_in_partition (&lsei
))
4597 node
= lsei_cgraph_node (lsei
);
4598 if (cgraph_function_with_gimple_body_p (node
)
4599 && ipa_get_agg_replacements_for_node (node
) != NULL
)
4600 write_agg_replacement_chain (ob
, node
);
4602 streamer_write_char_stream (ob
->main_stream
, 0);
4603 produce_asm (ob
, NULL
);
4604 destroy_output_block (ob
);
4607 /* Read replacements section in file FILE_DATA of length LEN with data
4611 read_replacements_section (struct lto_file_decl_data
*file_data
,
4615 const struct lto_function_header
*header
=
4616 (const struct lto_function_header
*) data
;
4617 const int cfg_offset
= sizeof (struct lto_function_header
);
4618 const int main_offset
= cfg_offset
+ header
->cfg_size
;
4619 const int string_offset
= main_offset
+ header
->main_size
;
4620 struct data_in
*data_in
;
4621 struct lto_input_block ib_main
;
4625 LTO_INIT_INPUT_BLOCK (ib_main
, (const char *) data
+ main_offset
, 0,
4628 data_in
= lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
4629 header
->string_size
, vNULL
);
4630 count
= streamer_read_uhwi (&ib_main
);
4632 for (i
= 0; i
< count
; i
++)
4635 struct cgraph_node
*node
;
4636 lto_symtab_encoder_t encoder
;
4638 index
= streamer_read_uhwi (&ib_main
);
4639 encoder
= file_data
->symtab_node_encoder
;
4640 node
= cgraph (lto_symtab_encoder_deref (encoder
, index
));
4641 gcc_assert (node
->definition
);
4642 read_agg_replacement_chain (&ib_main
, node
, data_in
);
4644 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
4646 lto_data_in_delete (data_in
);
4649 /* Read IPA-CP aggregate replacements. */
4652 ipa_prop_read_all_agg_replacement (void)
4654 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
4655 struct lto_file_decl_data
*file_data
;
4658 while ((file_data
= file_data_vec
[j
++]))
4661 const char *data
= lto_get_section_data (file_data
,
4662 LTO_section_ipcp_transform
,
4665 read_replacements_section (file_data
, data
, len
);
4669 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4673 adjust_agg_replacement_values (struct cgraph_node
*node
,
4674 struct ipa_agg_replacement_value
*aggval
)
4676 struct ipa_agg_replacement_value
*v
;
4677 int i
, c
= 0, d
= 0, *adj
;
4679 if (!node
->clone
.combined_args_to_skip
)
4682 for (v
= aggval
; v
; v
= v
->next
)
4684 gcc_assert (v
->index
>= 0);
4690 adj
= XALLOCAVEC (int, c
);
4691 for (i
= 0; i
< c
; i
++)
4692 if (bitmap_bit_p (node
->clone
.combined_args_to_skip
, i
))
4700 for (v
= aggval
; v
; v
= v
->next
)
4701 v
->index
= adj
[v
->index
];
4705 /* Function body transformation phase. */
4708 ipcp_transform_function (struct cgraph_node
*node
)
4710 vec
<ipa_param_descriptor
> descriptors
= vNULL
;
4711 struct param_analysis_info
*parms_ainfo
;
4712 struct ipa_agg_replacement_value
*aggval
;
4713 gimple_stmt_iterator gsi
;
4716 bool cfg_changed
= false, something_changed
= false;
4718 gcc_checking_assert (cfun
);
4719 gcc_checking_assert (current_function_decl
);
4722 fprintf (dump_file
, "Modification phase of node %s/%i\n",
4723 node
->name (), node
->order
);
4725 aggval
= ipa_get_agg_replacements_for_node (node
);
4728 param_count
= count_formal_params (node
->decl
);
4729 if (param_count
== 0)
4731 adjust_agg_replacement_values (node
, aggval
);
4733 ipa_dump_agg_replacement_values (dump_file
, aggval
);
4734 parms_ainfo
= XALLOCAVEC (struct param_analysis_info
, param_count
);
4735 memset (parms_ainfo
, 0, sizeof (struct param_analysis_info
) * param_count
);
4736 descriptors
.safe_grow_cleared (param_count
);
4737 ipa_populate_param_decls (node
, descriptors
);
4739 FOR_EACH_BB_FN (bb
, cfun
)
4740 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4742 struct ipa_agg_replacement_value
*v
;
4743 gimple stmt
= gsi_stmt (gsi
);
4745 HOST_WIDE_INT offset
, size
;
4749 if (!gimple_assign_load_p (stmt
))
4751 rhs
= gimple_assign_rhs1 (stmt
);
4752 if (!is_gimple_reg_type (TREE_TYPE (rhs
)))
4757 while (handled_component_p (t
))
4759 /* V_C_E can do things like convert an array of integers to one
4760 bigger integer and similar things we do not handle below. */
4761 if (TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
)
4766 t
= TREE_OPERAND (t
, 0);
4771 if (!ipa_load_from_parm_agg_1 (descriptors
, parms_ainfo
, stmt
,
4772 rhs
, &index
, &offset
, &size
, &by_ref
))
4774 for (v
= aggval
; v
; v
= v
->next
)
4775 if (v
->index
== index
4776 && v
->offset
== offset
)
4779 || v
->by_ref
!= by_ref
4780 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v
->value
))) != size
)
4783 gcc_checking_assert (is_gimple_ip_invariant (v
->value
));
4784 if (!useless_type_conversion_p (TREE_TYPE (rhs
), TREE_TYPE (v
->value
)))
4786 if (fold_convertible_p (TREE_TYPE (rhs
), v
->value
))
4787 val
= fold_build1 (NOP_EXPR
, TREE_TYPE (rhs
), v
->value
);
4788 else if (TYPE_SIZE (TREE_TYPE (rhs
))
4789 == TYPE_SIZE (TREE_TYPE (v
->value
)))
4790 val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (rhs
), v
->value
);
4795 fprintf (dump_file
, " const ");
4796 print_generic_expr (dump_file
, v
->value
, 0);
4797 fprintf (dump_file
, " can't be converted to type of ");
4798 print_generic_expr (dump_file
, rhs
, 0);
4799 fprintf (dump_file
, "\n");
4807 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4809 fprintf (dump_file
, "Modifying stmt:\n ");
4810 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4812 gimple_assign_set_rhs_from_tree (&gsi
, val
);
4815 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4817 fprintf (dump_file
, "into:\n ");
4818 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4819 fprintf (dump_file
, "\n");
4822 something_changed
= true;
4823 if (maybe_clean_eh_stmt (stmt
)
4824 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
4828 (*ipa_node_agg_replacements
)[node
->uid
] = NULL
;
4829 free_parms_ainfo (parms_ainfo
, param_count
);
4830 descriptors
.release ();
4832 if (!something_changed
)
4834 else if (cfg_changed
)
4835 return TODO_update_ssa_only_virtuals
| TODO_cleanup_cfg
;
4837 return TODO_update_ssa_only_virtuals
;