1 /* Interprocedural analyses.
2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
24 #include "langhooks.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-inline.h"
32 #include "ipa-inline.h"
35 #include "diagnostic.h"
36 #include "gimple-pretty-print.h"
37 #include "lto-streamer.h"
38 #include "data-streamer.h"
39 #include "tree-streamer.h"
42 /* Intermediate information about a parameter that is only useful during the
43 run of ipa_analyze_node and is not kept afterwards. */
45 struct param_analysis_info
47 bool parm_modified
, ref_modified
, pt_modified
;
48 bitmap parm_visited_statements
, pt_visited_statements
;
51 /* Vector where the parameter infos are actually stored. */
52 vec
<ipa_node_params_t
> ipa_node_params_vector
;
53 /* Vector of known aggregate values in cloned nodes. */
54 vec
<ipa_agg_replacement_value_p
, va_gc
> *ipa_node_agg_replacements
;
55 /* Vector where the parameter infos are actually stored. */
56 vec
<ipa_edge_args_t
, va_gc
> *ipa_edge_args_vector
;
58 /* Holders of ipa cgraph hooks: */
59 static struct cgraph_edge_hook_list
*edge_removal_hook_holder
;
60 static struct cgraph_node_hook_list
*node_removal_hook_holder
;
61 static struct cgraph_2edge_hook_list
*edge_duplication_hook_holder
;
62 static struct cgraph_2node_hook_list
*node_duplication_hook_holder
;
63 static struct cgraph_node_hook_list
*function_insertion_hook_holder
;
65 /* Description of a reference to an IPA constant. */
66 struct ipa_cst_ref_desc
68 /* Edge that corresponds to the statement which took the reference. */
69 struct cgraph_edge
*cs
;
70 /* Linked list of duplicates created when call graph edges are cloned. */
71 struct ipa_cst_ref_desc
*next_duplicate
;
72 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
77 /* Allocation pool for reference descriptions. */
79 static alloc_pool ipa_refdesc_pool
;
81 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
82 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
85 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node
*node
)
87 tree fs_opts
= DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node
->symbol
.decl
);
88 struct cl_optimization
*os
;
92 os
= TREE_OPTIMIZATION (fs_opts
);
93 return !os
->x_optimize
|| !os
->x_flag_ipa_cp
;
96 /* Return index of the formal whose tree is PTREE in function which corresponds
100 ipa_get_param_decl_index_1 (vec
<ipa_param_descriptor_t
> descriptors
, tree ptree
)
104 count
= descriptors
.length ();
105 for (i
= 0; i
< count
; i
++)
106 if (descriptors
[i
].decl
== ptree
)
112 /* Return index of the formal whose tree is PTREE in function which corresponds
116 ipa_get_param_decl_index (struct ipa_node_params
*info
, tree ptree
)
118 return ipa_get_param_decl_index_1 (info
->descriptors
, ptree
);
121 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
125 ipa_populate_param_decls (struct cgraph_node
*node
,
126 vec
<ipa_param_descriptor_t
> &descriptors
)
133 fndecl
= node
->symbol
.decl
;
134 gcc_assert (gimple_has_body_p (fndecl
));
135 fnargs
= DECL_ARGUMENTS (fndecl
);
137 for (parm
= fnargs
; parm
; parm
= DECL_CHAIN (parm
))
139 descriptors
[param_num
].decl
= parm
;
140 descriptors
[param_num
].move_cost
= estimate_move_cost (TREE_TYPE (parm
));
145 /* Return how many formal parameters FNDECL has. */
148 count_formal_params (tree fndecl
)
152 gcc_assert (gimple_has_body_p (fndecl
));
154 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
160 /* Return the declaration of Ith formal parameter of the function corresponding
161 to INFO. Note there is no setter function as this array is built just once
162 using ipa_initialize_node_params. */
165 ipa_dump_param (FILE *file
, struct ipa_node_params
*info
, int i
)
167 fprintf (file
, "param #%i", i
);
168 if (info
->descriptors
[i
].decl
)
171 print_generic_expr (file
, info
->descriptors
[i
].decl
, 0);
175 /* Initialize the ipa_node_params structure associated with NODE
176 to hold PARAM_COUNT parameters. */
179 ipa_alloc_node_params (struct cgraph_node
*node
, int param_count
)
181 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
183 if (!info
->descriptors
.exists () && param_count
)
184 info
->descriptors
.safe_grow_cleared (param_count
);
187 /* Initialize the ipa_node_params structure associated with NODE by counting
188 the function parameters, creating the descriptors and populating their
192 ipa_initialize_node_params (struct cgraph_node
*node
)
194 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
196 if (!info
->descriptors
.exists ())
198 ipa_alloc_node_params (node
, count_formal_params (node
->symbol
.decl
));
199 ipa_populate_param_decls (node
, info
->descriptors
);
203 /* Print the jump functions associated with call graph edge CS to file F. */
206 ipa_print_node_jump_functions_for_edge (FILE *f
, struct cgraph_edge
*cs
)
210 count
= ipa_get_cs_argument_count (IPA_EDGE_REF (cs
));
211 for (i
= 0; i
< count
; i
++)
213 struct ipa_jump_func
*jump_func
;
214 enum jump_func_type type
;
216 jump_func
= ipa_get_ith_jump_func (IPA_EDGE_REF (cs
), i
);
217 type
= jump_func
->type
;
219 fprintf (f
, " param %d: ", i
);
220 if (type
== IPA_JF_UNKNOWN
)
221 fprintf (f
, "UNKNOWN\n");
222 else if (type
== IPA_JF_KNOWN_TYPE
)
224 fprintf (f
, "KNOWN TYPE: base ");
225 print_generic_expr (f
, jump_func
->value
.known_type
.base_type
, 0);
226 fprintf (f
, ", offset "HOST_WIDE_INT_PRINT_DEC
", component ",
227 jump_func
->value
.known_type
.offset
);
228 print_generic_expr (f
, jump_func
->value
.known_type
.component_type
, 0);
231 else if (type
== IPA_JF_CONST
)
233 tree val
= jump_func
->value
.constant
.value
;
234 fprintf (f
, "CONST: ");
235 print_generic_expr (f
, val
, 0);
236 if (TREE_CODE (val
) == ADDR_EXPR
237 && TREE_CODE (TREE_OPERAND (val
, 0)) == CONST_DECL
)
240 print_generic_expr (f
, DECL_INITIAL (TREE_OPERAND (val
, 0)),
245 else if (type
== IPA_JF_PASS_THROUGH
)
247 fprintf (f
, "PASS THROUGH: ");
248 fprintf (f
, "%d, op %s",
249 jump_func
->value
.pass_through
.formal_id
,
251 jump_func
->value
.pass_through
.operation
]);
252 if (jump_func
->value
.pass_through
.operation
!= NOP_EXPR
)
255 print_generic_expr (f
,
256 jump_func
->value
.pass_through
.operand
, 0);
258 if (jump_func
->value
.pass_through
.agg_preserved
)
259 fprintf (f
, ", agg_preserved");
260 if (jump_func
->value
.pass_through
.type_preserved
)
261 fprintf (f
, ", type_preserved");
264 else if (type
== IPA_JF_ANCESTOR
)
266 fprintf (f
, "ANCESTOR: ");
267 fprintf (f
, "%d, offset "HOST_WIDE_INT_PRINT_DEC
", ",
268 jump_func
->value
.ancestor
.formal_id
,
269 jump_func
->value
.ancestor
.offset
);
270 print_generic_expr (f
, jump_func
->value
.ancestor
.type
, 0);
271 if (jump_func
->value
.ancestor
.agg_preserved
)
272 fprintf (f
, ", agg_preserved");
273 if (jump_func
->value
.ancestor
.type_preserved
)
274 fprintf (f
, ", type_preserved");
278 if (jump_func
->agg
.items
)
280 struct ipa_agg_jf_item
*item
;
283 fprintf (f
, " Aggregate passed by %s:\n",
284 jump_func
->agg
.by_ref
? "reference" : "value");
285 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, j
, item
)
287 fprintf (f
, " offset: " HOST_WIDE_INT_PRINT_DEC
", ",
289 if (TYPE_P (item
->value
))
290 fprintf (f
, "clobber of " HOST_WIDE_INT_PRINT_DEC
" bits",
291 tree_low_cst (TYPE_SIZE (item
->value
), 1));
294 fprintf (f
, "cst: ");
295 print_generic_expr (f
, item
->value
, 0);
304 /* Print the jump functions of all arguments on all call graph edges going from
308 ipa_print_node_jump_functions (FILE *f
, struct cgraph_node
*node
)
310 struct cgraph_edge
*cs
;
312 fprintf (f
, " Jump functions of caller %s/%i:\n", cgraph_node_name (node
),
314 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
316 if (!ipa_edge_args_info_available_for_edge_p (cs
))
319 fprintf (f
, " callsite %s/%i -> %s/%i : \n",
320 xstrdup (cgraph_node_name (node
)), node
->symbol
.order
,
321 xstrdup (cgraph_node_name (cs
->callee
)),
322 cs
->callee
->symbol
.order
);
323 ipa_print_node_jump_functions_for_edge (f
, cs
);
326 for (cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
328 struct cgraph_indirect_call_info
*ii
;
329 if (!ipa_edge_args_info_available_for_edge_p (cs
))
332 ii
= cs
->indirect_info
;
333 if (ii
->agg_contents
)
334 fprintf (f
, " indirect %s callsite, calling param %i, "
335 "offset " HOST_WIDE_INT_PRINT_DEC
", %s",
336 ii
->member_ptr
? "member ptr" : "aggregate",
337 ii
->param_index
, ii
->offset
,
338 ii
->by_ref
? "by reference" : "by_value");
340 fprintf (f
, " indirect %s callsite, calling param %i",
341 ii
->polymorphic
? "polymorphic" : "simple", ii
->param_index
);
345 fprintf (f
, ", for stmt ");
346 print_gimple_stmt (f
, cs
->call_stmt
, 0, TDF_SLIM
);
350 ipa_print_node_jump_functions_for_edge (f
, cs
);
354 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
357 ipa_print_all_jump_functions (FILE *f
)
359 struct cgraph_node
*node
;
361 fprintf (f
, "\nJump functions:\n");
362 FOR_EACH_FUNCTION (node
)
364 ipa_print_node_jump_functions (f
, node
);
368 /* Set JFUNC to be a known type jump function. */
371 ipa_set_jf_known_type (struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
,
372 tree base_type
, tree component_type
)
374 jfunc
->type
= IPA_JF_KNOWN_TYPE
;
375 jfunc
->value
.known_type
.offset
= offset
,
376 jfunc
->value
.known_type
.base_type
= base_type
;
377 jfunc
->value
.known_type
.component_type
= component_type
;
380 /* Set JFUNC to be a copy of another jmp (to be used by jump function
381 combination code). The two functions will share their rdesc. */
384 ipa_set_jf_cst_copy (struct ipa_jump_func
*dst
,
385 struct ipa_jump_func
*src
)
388 gcc_checking_assert (src
->type
== IPA_JF_CONST
);
389 dst
->type
= IPA_JF_CONST
;
390 dst
->value
.constant
= src
->value
.constant
;
393 /* Set JFUNC to be a constant jmp function. */
396 ipa_set_jf_constant (struct ipa_jump_func
*jfunc
, tree constant
,
397 struct cgraph_edge
*cs
)
399 constant
= unshare_expr (constant
);
400 if (constant
&& EXPR_P (constant
))
401 SET_EXPR_LOCATION (constant
, UNKNOWN_LOCATION
);
402 jfunc
->type
= IPA_JF_CONST
;
403 jfunc
->value
.constant
.value
= unshare_expr_without_location (constant
);
405 if (TREE_CODE (constant
) == ADDR_EXPR
406 && TREE_CODE (TREE_OPERAND (constant
, 0)) == FUNCTION_DECL
)
408 struct ipa_cst_ref_desc
*rdesc
;
409 if (!ipa_refdesc_pool
)
410 ipa_refdesc_pool
= create_alloc_pool ("IPA-PROP ref descriptions",
411 sizeof (struct ipa_cst_ref_desc
), 32);
413 rdesc
= (struct ipa_cst_ref_desc
*) pool_alloc (ipa_refdesc_pool
);
415 rdesc
->next_duplicate
= NULL
;
417 jfunc
->value
.constant
.rdesc
= rdesc
;
420 jfunc
->value
.constant
.rdesc
= NULL
;
423 /* Set JFUNC to be a simple pass-through jump function. */
425 ipa_set_jf_simple_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
426 bool agg_preserved
, bool type_preserved
)
428 jfunc
->type
= IPA_JF_PASS_THROUGH
;
429 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
430 jfunc
->value
.pass_through
.formal_id
= formal_id
;
431 jfunc
->value
.pass_through
.operation
= NOP_EXPR
;
432 jfunc
->value
.pass_through
.agg_preserved
= agg_preserved
;
433 jfunc
->value
.pass_through
.type_preserved
= type_preserved
;
436 /* Set JFUNC to be an arithmetic pass through jump function. */
439 ipa_set_jf_arith_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
440 tree operand
, enum tree_code operation
)
442 jfunc
->type
= IPA_JF_PASS_THROUGH
;
443 jfunc
->value
.pass_through
.operand
= unshare_expr_without_location (operand
);
444 jfunc
->value
.pass_through
.formal_id
= formal_id
;
445 jfunc
->value
.pass_through
.operation
= operation
;
446 jfunc
->value
.pass_through
.agg_preserved
= false;
447 jfunc
->value
.pass_through
.type_preserved
= false;
450 /* Set JFUNC to be an ancestor jump function. */
453 ipa_set_ancestor_jf (struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
,
454 tree type
, int formal_id
, bool agg_preserved
,
457 jfunc
->type
= IPA_JF_ANCESTOR
;
458 jfunc
->value
.ancestor
.formal_id
= formal_id
;
459 jfunc
->value
.ancestor
.offset
= offset
;
460 jfunc
->value
.ancestor
.type
= type
;
461 jfunc
->value
.ancestor
.agg_preserved
= agg_preserved
;
462 jfunc
->value
.ancestor
.type_preserved
= type_preserved
;
465 /* Extract the acual BINFO being described by JFUNC which must be a known type
469 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func
*jfunc
)
471 tree base_binfo
= TYPE_BINFO (jfunc
->value
.known_type
.base_type
);
474 return get_binfo_at_offset (base_binfo
,
475 jfunc
->value
.known_type
.offset
,
476 jfunc
->value
.known_type
.component_type
);
479 /* Structure to be passed in between detect_type_change and
480 check_stmt_for_type_change. */
482 struct type_change_info
484 /* Offset into the object where there is the virtual method pointer we are
486 HOST_WIDE_INT offset
;
487 /* The declaration or SSA_NAME pointer of the base that we are checking for
490 /* If we actually can tell the type that the object has changed to, it is
491 stored in this field. Otherwise it remains NULL_TREE. */
492 tree known_current_type
;
493 /* Set to true if dynamic type change has been detected. */
494 bool type_maybe_changed
;
495 /* Set to true if multiple types have been encountered. known_current_type
496 must be disregarded in that case. */
497 bool multiple_types_encountered
;
500 /* Return true if STMT can modify a virtual method table pointer.
502 This function makes special assumptions about both constructors and
503 destructors which are all the functions that are allowed to alter the VMT
504 pointers. It assumes that destructors begin with assignment into all VMT
505 pointers and that constructors essentially look in the following way:
507 1) The very first thing they do is that they call constructors of ancestor
508 sub-objects that have them.
510 2) Then VMT pointers of this and all its ancestors is set to new values
511 corresponding to the type corresponding to the constructor.
513 3) Only afterwards, other stuff such as constructor of member sub-objects
514 and the code written by the user is run. Only this may include calling
515 virtual functions, directly or indirectly.
517 There is no way to call a constructor of an ancestor sub-object in any
520 This means that we do not have to care whether constructors get the correct
521 type information because they will always change it (in fact, if we define
522 the type to be given by the VMT pointer, it is undefined).
524 The most important fact to derive from the above is that if, for some
525 statement in the section 3, we try to detect whether the dynamic type has
526 changed, we can safely ignore all calls as we examine the function body
527 backwards until we reach statements in section 2 because these calls cannot
528 be ancestor constructors or destructors (if the input is not bogus) and so
529 do not change the dynamic type (this holds true only for automatically
530 allocated objects but at the moment we devirtualize only these). We then
531 must detect that statements in section 2 change the dynamic type and can try
532 to derive the new type. That is enough and we can stop, we will never see
533 the calls into constructors of sub-objects in this code. Therefore we can
534 safely ignore all call statements that we traverse.
538 stmt_may_be_vtbl_ptr_store (gimple stmt
)
540 if (is_gimple_call (stmt
))
542 else if (is_gimple_assign (stmt
))
544 tree lhs
= gimple_assign_lhs (stmt
);
546 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs
)))
548 if (flag_strict_aliasing
549 && !POINTER_TYPE_P (TREE_TYPE (lhs
)))
552 if (TREE_CODE (lhs
) == COMPONENT_REF
553 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
555 /* In the future we might want to use get_base_ref_and_offset to find
556 if there is a field corresponding to the offset and if so, proceed
557 almost like if it was a component ref. */
563 /* If STMT can be proved to be an assignment to the virtual method table
564 pointer of ANALYZED_OBJ and the type associated with the new table
565 identified, return the type. Otherwise return NULL_TREE. */
568 extr_type_from_vtbl_ptr_store (gimple stmt
, struct type_change_info
*tci
)
570 HOST_WIDE_INT offset
, size
, max_size
;
573 if (!gimple_assign_single_p (stmt
))
576 lhs
= gimple_assign_lhs (stmt
);
577 rhs
= gimple_assign_rhs1 (stmt
);
578 if (TREE_CODE (lhs
) != COMPONENT_REF
579 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1))
580 || TREE_CODE (rhs
) != ADDR_EXPR
)
582 rhs
= get_base_address (TREE_OPERAND (rhs
, 0));
584 || TREE_CODE (rhs
) != VAR_DECL
585 || !DECL_VIRTUAL_P (rhs
))
588 base
= get_ref_base_and_extent (lhs
, &offset
, &size
, &max_size
);
589 if (offset
!= tci
->offset
590 || size
!= POINTER_SIZE
591 || max_size
!= POINTER_SIZE
)
593 if (TREE_CODE (base
) == MEM_REF
)
595 if (TREE_CODE (tci
->object
) != MEM_REF
596 || TREE_OPERAND (tci
->object
, 0) != TREE_OPERAND (base
, 0)
597 || !tree_int_cst_equal (TREE_OPERAND (tci
->object
, 1),
598 TREE_OPERAND (base
, 1)))
601 else if (tci
->object
!= base
)
604 return DECL_CONTEXT (rhs
);
607 /* Callback of walk_aliased_vdefs and a helper function for
608 detect_type_change to check whether a particular statement may modify
609 the virtual table pointer, and if possible also determine the new type of
610 the (sub-)object. It stores its result into DATA, which points to a
611 type_change_info structure. */
614 check_stmt_for_type_change (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef
, void *data
)
616 gimple stmt
= SSA_NAME_DEF_STMT (vdef
);
617 struct type_change_info
*tci
= (struct type_change_info
*) data
;
619 if (stmt_may_be_vtbl_ptr_store (stmt
))
622 type
= extr_type_from_vtbl_ptr_store (stmt
, tci
);
623 if (tci
->type_maybe_changed
624 && type
!= tci
->known_current_type
)
625 tci
->multiple_types_encountered
= true;
626 tci
->known_current_type
= type
;
627 tci
->type_maybe_changed
= true;
636 /* Like detect_type_change but with extra argument COMP_TYPE which will become
637 the component type part of new JFUNC of dynamic type change is detected and
638 the new base type is identified. */
641 detect_type_change_1 (tree arg
, tree base
, tree comp_type
, gimple call
,
642 struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
)
644 struct type_change_info tci
;
647 gcc_checking_assert (DECL_P (arg
)
648 || TREE_CODE (arg
) == MEM_REF
649 || handled_component_p (arg
));
650 /* Const calls cannot call virtual methods through VMT and so type changes do
652 if (!flag_devirtualize
|| !gimple_vuse (call
))
655 ao_ref_init (&ao
, arg
);
658 ao
.size
= POINTER_SIZE
;
659 ao
.max_size
= ao
.size
;
662 tci
.object
= get_base_address (arg
);
663 tci
.known_current_type
= NULL_TREE
;
664 tci
.type_maybe_changed
= false;
665 tci
.multiple_types_encountered
= false;
667 walk_aliased_vdefs (&ao
, gimple_vuse (call
), check_stmt_for_type_change
,
669 if (!tci
.type_maybe_changed
)
672 if (!tci
.known_current_type
673 || tci
.multiple_types_encountered
675 jfunc
->type
= IPA_JF_UNKNOWN
;
677 ipa_set_jf_known_type (jfunc
, 0, tci
.known_current_type
, comp_type
);
682 /* Detect whether the dynamic type of ARG has changed (before callsite CALL) by
683 looking for assignments to its virtual table pointer. If it is, return true
684 and fill in the jump function JFUNC with relevant type information or set it
685 to unknown. ARG is the object itself (not a pointer to it, unless
686 dereferenced). BASE is the base of the memory access as returned by
687 get_ref_base_and_extent, as is the offset. */
690 detect_type_change (tree arg
, tree base
, gimple call
,
691 struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
)
693 return detect_type_change_1 (arg
, base
, TREE_TYPE (arg
), call
, jfunc
, offset
);
696 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
697 SSA name (its dereference will become the base and the offset is assumed to
701 detect_type_change_ssa (tree arg
, gimple call
, struct ipa_jump_func
*jfunc
)
705 gcc_checking_assert (TREE_CODE (arg
) == SSA_NAME
);
706 if (!flag_devirtualize
707 || !POINTER_TYPE_P (TREE_TYPE (arg
))
708 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != RECORD_TYPE
)
711 comp_type
= TREE_TYPE (TREE_TYPE (arg
));
712 arg
= build2 (MEM_REF
, ptr_type_node
, arg
,
713 build_int_cst (ptr_type_node
, 0));
715 return detect_type_change_1 (arg
, arg
, comp_type
, call
, jfunc
, 0);
718 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
719 boolean variable pointed to by DATA. */
722 mark_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
725 bool *b
= (bool *) data
;
730 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
731 a value known not to be modified in this function before reaching the
732 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
733 information about the parameter. */
736 parm_preserved_before_stmt_p (struct param_analysis_info
*parm_ainfo
,
737 gimple stmt
, tree parm_load
)
739 bool modified
= false;
740 bitmap
*visited_stmts
;
743 if (parm_ainfo
&& parm_ainfo
->parm_modified
)
746 gcc_checking_assert (gimple_vuse (stmt
) != NULL_TREE
);
747 ao_ref_init (&refd
, parm_load
);
748 /* We can cache visited statements only when parm_ainfo is available and when
749 we are looking at a naked load of the whole parameter. */
750 if (!parm_ainfo
|| TREE_CODE (parm_load
) != PARM_DECL
)
751 visited_stmts
= NULL
;
753 visited_stmts
= &parm_ainfo
->parm_visited_statements
;
754 walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
, &modified
,
756 if (parm_ainfo
&& modified
)
757 parm_ainfo
->parm_modified
= true;
761 /* If STMT is an assignment that loads a value from an parameter declaration,
762 return the index of the parameter in ipa_node_params which has not been
763 modified. Otherwise return -1. */
766 load_from_unmodified_param (vec
<ipa_param_descriptor_t
> descriptors
,
767 struct param_analysis_info
*parms_ainfo
,
773 if (!gimple_assign_single_p (stmt
))
776 op1
= gimple_assign_rhs1 (stmt
);
777 if (TREE_CODE (op1
) != PARM_DECL
)
780 index
= ipa_get_param_decl_index_1 (descriptors
, op1
);
782 || !parm_preserved_before_stmt_p (parms_ainfo
? &parms_ainfo
[index
]
789 /* Return true if memory reference REF loads data that are known to be
790 unmodified in this function before reaching statement STMT. PARM_AINFO, if
791 non-NULL, is a pointer to a structure containing temporary information about
795 parm_ref_data_preserved_p (struct param_analysis_info
*parm_ainfo
,
796 gimple stmt
, tree ref
)
798 bool modified
= false;
801 gcc_checking_assert (gimple_vuse (stmt
));
802 if (parm_ainfo
&& parm_ainfo
->ref_modified
)
805 ao_ref_init (&refd
, ref
);
806 walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
, &modified
,
808 if (parm_ainfo
&& modified
)
809 parm_ainfo
->ref_modified
= true;
813 /* Return true if the data pointed to by PARM is known to be unmodified in this
814 function before reaching call statement CALL into which it is passed.
815 PARM_AINFO is a pointer to a structure containing temporary information
819 parm_ref_data_pass_through_p (struct param_analysis_info
*parm_ainfo
,
820 gimple call
, tree parm
)
822 bool modified
= false;
825 /* It's unnecessary to calculate anything about memory contnets for a const
826 function because it is not goin to use it. But do not cache the result
827 either. Also, no such calculations for non-pointers. */
828 if (!gimple_vuse (call
)
829 || !POINTER_TYPE_P (TREE_TYPE (parm
)))
832 if (parm_ainfo
->pt_modified
)
835 ao_ref_init_from_ptr_and_size (&refd
, parm
, NULL_TREE
);
836 walk_aliased_vdefs (&refd
, gimple_vuse (call
), mark_modified
, &modified
,
837 parm_ainfo
? &parm_ainfo
->pt_visited_statements
: NULL
);
839 parm_ainfo
->pt_modified
= true;
843 /* Return true if we can prove that OP is a memory reference loading unmodified
844 data from an aggregate passed as a parameter and if the aggregate is passed
845 by reference, that the alias type of the load corresponds to the type of the
846 formal parameter (so that we can rely on this type for TBAA in callers).
847 INFO and PARMS_AINFO describe parameters of the current function (but the
848 latter can be NULL), STMT is the load statement. If function returns true,
849 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
850 within the aggregate and whether it is a load from a value passed by
851 reference respectively. */
854 ipa_load_from_parm_agg_1 (vec
<ipa_param_descriptor_t
> descriptors
,
855 struct param_analysis_info
*parms_ainfo
, gimple stmt
,
856 tree op
, int *index_p
, HOST_WIDE_INT
*offset_p
,
860 HOST_WIDE_INT size
, max_size
;
861 tree base
= get_ref_base_and_extent (op
, offset_p
, &size
, &max_size
);
863 if (max_size
== -1 || max_size
!= size
|| *offset_p
< 0)
868 int index
= ipa_get_param_decl_index_1 (descriptors
, base
);
870 && parm_preserved_before_stmt_p (parms_ainfo
? &parms_ainfo
[index
]
880 if (TREE_CODE (base
) != MEM_REF
881 || TREE_CODE (TREE_OPERAND (base
, 0)) != SSA_NAME
882 || !integer_zerop (TREE_OPERAND (base
, 1)))
885 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base
, 0)))
887 tree parm
= SSA_NAME_VAR (TREE_OPERAND (base
, 0));
888 index
= ipa_get_param_decl_index_1 (descriptors
, parm
);
892 /* This branch catches situations where a pointer parameter is not a
893 gimple register, for example:
895 void hip7(S*) (struct S * p)
897 void (*<T2e4>) (struct S *) D.1867;
907 gimple def
= SSA_NAME_DEF_STMT (TREE_OPERAND (base
, 0));
908 index
= load_from_unmodified_param (descriptors
, parms_ainfo
, def
);
912 && parm_ref_data_preserved_p (parms_ainfo
? &parms_ainfo
[index
] : NULL
,
922 /* Just like the previous function, just without the param_analysis_info
923 pointer, for users outside of this file. */
926 ipa_load_from_parm_agg (struct ipa_node_params
*info
, gimple stmt
,
927 tree op
, int *index_p
, HOST_WIDE_INT
*offset_p
,
930 return ipa_load_from_parm_agg_1 (info
->descriptors
, NULL
, stmt
, op
, index_p
,
934 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
935 of an assignment statement STMT, try to determine whether we are actually
936 handling any of the following cases and construct an appropriate jump
937 function into JFUNC if so:
939 1) The passed value is loaded from a formal parameter which is not a gimple
940 register (most probably because it is addressable, the value has to be
941 scalar) and we can guarantee the value has not changed. This case can
942 therefore be described by a simple pass-through jump function. For example:
951 2) The passed value can be described by a simple arithmetic pass-through
958 D.2064_4 = a.1(D) + 4;
961 This case can also occur in combination of the previous one, e.g.:
969 D.2064_4 = a.0_3 + 4;
972 3) The passed value is an address of an object within another one (which
973 also passed by reference). Such situations are described by an ancestor
974 jump function and describe situations such as:
976 B::foo() (struct B * const this)
980 D.1845_2 = &this_1(D)->D.1748;
983 INFO is the structure describing individual parameters access different
984 stages of IPA optimizations. PARMS_AINFO contains the information that is
985 only needed for intraprocedural analysis. */
988 compute_complex_assign_jump_func (struct ipa_node_params
*info
,
989 struct param_analysis_info
*parms_ainfo
,
990 struct ipa_jump_func
*jfunc
,
991 gimple call
, gimple stmt
, tree name
)
993 HOST_WIDE_INT offset
, size
, max_size
;
994 tree op1
, tc_ssa
, base
, ssa
;
997 op1
= gimple_assign_rhs1 (stmt
);
999 if (TREE_CODE (op1
) == SSA_NAME
)
1001 if (SSA_NAME_IS_DEFAULT_DEF (op1
))
1002 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (op1
));
1004 index
= load_from_unmodified_param (info
->descriptors
, parms_ainfo
,
1005 SSA_NAME_DEF_STMT (op1
));
1010 index
= load_from_unmodified_param (info
->descriptors
, parms_ainfo
, stmt
);
1011 tc_ssa
= gimple_assign_lhs (stmt
);
1016 tree op2
= gimple_assign_rhs2 (stmt
);
1020 if (!is_gimple_ip_invariant (op2
)
1021 || (TREE_CODE_CLASS (gimple_expr_code (stmt
)) != tcc_comparison
1022 && !useless_type_conversion_p (TREE_TYPE (name
),
1026 ipa_set_jf_arith_pass_through (jfunc
, index
, op2
,
1027 gimple_assign_rhs_code (stmt
));
1029 else if (gimple_assign_single_p (stmt
))
1031 bool agg_p
= parm_ref_data_pass_through_p (&parms_ainfo
[index
],
1033 bool type_p
= !detect_type_change_ssa (tc_ssa
, call
, jfunc
);
1034 if (type_p
|| jfunc
->type
== IPA_JF_UNKNOWN
)
1035 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
, type_p
);
1040 if (TREE_CODE (op1
) != ADDR_EXPR
)
1042 op1
= TREE_OPERAND (op1
, 0);
1043 if (TREE_CODE (TREE_TYPE (op1
)) != RECORD_TYPE
)
1045 base
= get_ref_base_and_extent (op1
, &offset
, &size
, &max_size
);
1046 if (TREE_CODE (base
) != MEM_REF
1047 /* If this is a varying address, punt. */
1049 || max_size
!= size
)
1051 offset
+= mem_ref_offset (base
).low
* BITS_PER_UNIT
;
1052 ssa
= TREE_OPERAND (base
, 0);
1053 if (TREE_CODE (ssa
) != SSA_NAME
1054 || !SSA_NAME_IS_DEFAULT_DEF (ssa
)
1058 /* Dynamic types are changed in constructors and destructors. */
1059 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (ssa
));
1062 bool type_p
= !detect_type_change (op1
, base
, call
, jfunc
, offset
);
1063 if (type_p
|| jfunc
->type
== IPA_JF_UNKNOWN
)
1064 ipa_set_ancestor_jf (jfunc
, offset
, TREE_TYPE (op1
), index
,
1065 parm_ref_data_pass_through_p (&parms_ainfo
[index
],
1066 call
, ssa
), type_p
);
1070 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1073 iftmp.1_3 = &obj_2(D)->D.1762;
1075 The base of the MEM_REF must be a default definition SSA NAME of a
1076 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1077 whole MEM_REF expression is returned and the offset calculated from any
1078 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1079 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1082 get_ancestor_addr_info (gimple assign
, tree
*obj_p
, HOST_WIDE_INT
*offset
)
1084 HOST_WIDE_INT size
, max_size
;
1085 tree expr
, parm
, obj
;
1087 if (!gimple_assign_single_p (assign
))
1089 expr
= gimple_assign_rhs1 (assign
);
1091 if (TREE_CODE (expr
) != ADDR_EXPR
)
1093 expr
= TREE_OPERAND (expr
, 0);
1095 expr
= get_ref_base_and_extent (expr
, offset
, &size
, &max_size
);
1097 if (TREE_CODE (expr
) != MEM_REF
1098 /* If this is a varying address, punt. */
1103 parm
= TREE_OPERAND (expr
, 0);
1104 if (TREE_CODE (parm
) != SSA_NAME
1105 || !SSA_NAME_IS_DEFAULT_DEF (parm
)
1106 || TREE_CODE (SSA_NAME_VAR (parm
)) != PARM_DECL
)
1109 *offset
+= mem_ref_offset (expr
).low
* BITS_PER_UNIT
;
1115 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1116 statement PHI, try to find out whether NAME is in fact a
1117 multiple-inheritance typecast from a descendant into an ancestor of a formal
1118 parameter and thus can be described by an ancestor jump function and if so,
1119 write the appropriate function into JFUNC.
1121 Essentially we want to match the following pattern:
1129 iftmp.1_3 = &obj_2(D)->D.1762;
1132 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1133 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1137 compute_complex_ancestor_jump_func (struct ipa_node_params
*info
,
1138 struct param_analysis_info
*parms_ainfo
,
1139 struct ipa_jump_func
*jfunc
,
1140 gimple call
, gimple phi
)
1142 HOST_WIDE_INT offset
;
1143 gimple assign
, cond
;
1144 basic_block phi_bb
, assign_bb
, cond_bb
;
1145 tree tmp
, parm
, expr
, obj
;
1148 if (gimple_phi_num_args (phi
) != 2)
1151 if (integer_zerop (PHI_ARG_DEF (phi
, 1)))
1152 tmp
= PHI_ARG_DEF (phi
, 0);
1153 else if (integer_zerop (PHI_ARG_DEF (phi
, 0)))
1154 tmp
= PHI_ARG_DEF (phi
, 1);
1157 if (TREE_CODE (tmp
) != SSA_NAME
1158 || SSA_NAME_IS_DEFAULT_DEF (tmp
)
1159 || !POINTER_TYPE_P (TREE_TYPE (tmp
))
1160 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp
))) != RECORD_TYPE
)
1163 assign
= SSA_NAME_DEF_STMT (tmp
);
1164 assign_bb
= gimple_bb (assign
);
1165 if (!single_pred_p (assign_bb
))
1167 expr
= get_ancestor_addr_info (assign
, &obj
, &offset
);
1170 parm
= TREE_OPERAND (expr
, 0);
1171 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (parm
));
1172 gcc_assert (index
>= 0);
1174 cond_bb
= single_pred (assign_bb
);
1175 cond
= last_stmt (cond_bb
);
1177 || gimple_code (cond
) != GIMPLE_COND
1178 || gimple_cond_code (cond
) != NE_EXPR
1179 || gimple_cond_lhs (cond
) != parm
1180 || !integer_zerop (gimple_cond_rhs (cond
)))
1183 phi_bb
= gimple_bb (phi
);
1184 for (i
= 0; i
< 2; i
++)
1186 basic_block pred
= EDGE_PRED (phi_bb
, i
)->src
;
1187 if (pred
!= assign_bb
&& pred
!= cond_bb
)
1191 bool type_p
= !detect_type_change (obj
, expr
, call
, jfunc
, offset
);
1192 if (type_p
|| jfunc
->type
== IPA_JF_UNKNOWN
)
1193 ipa_set_ancestor_jf (jfunc
, offset
, TREE_TYPE (obj
), index
,
1194 parm_ref_data_pass_through_p (&parms_ainfo
[index
],
1195 call
, parm
), type_p
);
1198 /* Given OP which is passed as an actual argument to a called function,
1199 determine if it is possible to construct a KNOWN_TYPE jump function for it
1200 and if so, create one and store it to JFUNC. */
1203 compute_known_type_jump_func (tree op
, struct ipa_jump_func
*jfunc
,
1206 HOST_WIDE_INT offset
, size
, max_size
;
1209 if (!flag_devirtualize
1210 || TREE_CODE (op
) != ADDR_EXPR
1211 || TREE_CODE (TREE_TYPE (TREE_TYPE (op
))) != RECORD_TYPE
)
1214 op
= TREE_OPERAND (op
, 0);
1215 base
= get_ref_base_and_extent (op
, &offset
, &size
, &max_size
);
1219 || TREE_CODE (TREE_TYPE (base
)) != RECORD_TYPE
1220 || is_global_var (base
))
1223 if (!TYPE_BINFO (TREE_TYPE (base
))
1224 || detect_type_change (op
, base
, call
, jfunc
, offset
))
1227 ipa_set_jf_known_type (jfunc
, offset
, TREE_TYPE (base
), TREE_TYPE (op
));
1230 /* Inspect the given TYPE and return true iff it has the same structure (the
1231 same number of fields of the same types) as a C++ member pointer. If
1232 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1233 corresponding fields there. */
1236 type_like_member_ptr_p (tree type
, tree
*method_ptr
, tree
*delta
)
1240 if (TREE_CODE (type
) != RECORD_TYPE
)
1243 fld
= TYPE_FIELDS (type
);
1244 if (!fld
|| !POINTER_TYPE_P (TREE_TYPE (fld
))
1245 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld
))) != METHOD_TYPE
1246 || !host_integerp (DECL_FIELD_OFFSET (fld
), 1))
1252 fld
= DECL_CHAIN (fld
);
1253 if (!fld
|| INTEGRAL_TYPE_P (fld
)
1254 || !host_integerp (DECL_FIELD_OFFSET (fld
), 1))
1259 if (DECL_CHAIN (fld
))
1265 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1266 return the rhs of its defining statement. Otherwise return RHS as it
1270 get_ssa_def_if_simple_copy (tree rhs
)
1272 while (TREE_CODE (rhs
) == SSA_NAME
&& !SSA_NAME_IS_DEFAULT_DEF (rhs
))
1274 gimple def_stmt
= SSA_NAME_DEF_STMT (rhs
);
1276 if (gimple_assign_single_p (def_stmt
))
1277 rhs
= gimple_assign_rhs1 (def_stmt
);
1284 /* Simple linked list, describing known contents of an aggregate beforere
1287 struct ipa_known_agg_contents_list
1289 /* Offset and size of the described part of the aggregate. */
1290 HOST_WIDE_INT offset
, size
;
1291 /* Known constant value or NULL if the contents is known to be unknown. */
1293 /* Pointer to the next structure in the list. */
1294 struct ipa_known_agg_contents_list
*next
;
1297 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1298 in ARG is filled in with constant values. ARG can either be an aggregate
1299 expression or a pointer to an aggregate. JFUNC is the jump function into
1300 which the constants are subsequently stored. */
1303 determine_known_aggregate_parts (gimple call
, tree arg
,
1304 struct ipa_jump_func
*jfunc
)
1306 struct ipa_known_agg_contents_list
*list
= NULL
;
1307 int item_count
= 0, const_count
= 0;
1308 HOST_WIDE_INT arg_offset
, arg_size
;
1309 gimple_stmt_iterator gsi
;
1311 bool check_ref
, by_ref
;
1314 /* The function operates in three stages. First, we prepare check_ref, r,
1315 arg_base and arg_offset based on what is actually passed as an actual
1318 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
1321 if (TREE_CODE (arg
) == SSA_NAME
)
1324 if (!host_integerp (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg
))), 1))
1329 type_size
= TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg
)));
1330 arg_size
= tree_low_cst (type_size
, 1);
1331 ao_ref_init_from_ptr_and_size (&r
, arg_base
, NULL_TREE
);
1333 else if (TREE_CODE (arg
) == ADDR_EXPR
)
1335 HOST_WIDE_INT arg_max_size
;
1337 arg
= TREE_OPERAND (arg
, 0);
1338 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1340 if (arg_max_size
== -1
1341 || arg_max_size
!= arg_size
1344 if (DECL_P (arg_base
))
1348 size
= build_int_cst (integer_type_node
, arg_size
);
1349 ao_ref_init_from_ptr_and_size (&r
, arg_base
, size
);
1359 HOST_WIDE_INT arg_max_size
;
1361 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg
)));
1365 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1367 if (arg_max_size
== -1
1368 || arg_max_size
!= arg_size
1372 ao_ref_init (&r
, arg
);
1375 /* Second stage walks back the BB, looks at individual statements and as long
1376 as it is confident of how the statements affect contents of the
1377 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1379 gsi
= gsi_for_stmt (call
);
1381 for (; !gsi_end_p (gsi
); gsi_prev (&gsi
))
1383 struct ipa_known_agg_contents_list
*n
, **p
;
1384 gimple stmt
= gsi_stmt (gsi
);
1385 HOST_WIDE_INT lhs_offset
, lhs_size
, lhs_max_size
;
1386 tree lhs
, rhs
, lhs_base
;
1387 bool partial_overlap
;
1389 if (!stmt_may_clobber_ref_p_1 (stmt
, &r
))
1391 if (!gimple_assign_single_p (stmt
))
1394 lhs
= gimple_assign_lhs (stmt
);
1395 rhs
= gimple_assign_rhs1 (stmt
);
1396 if (!is_gimple_reg_type (rhs
)
1397 || TREE_CODE (lhs
) == BIT_FIELD_REF
1398 || contains_bitfld_component_ref_p (lhs
))
1401 lhs_base
= get_ref_base_and_extent (lhs
, &lhs_offset
, &lhs_size
,
1403 if (lhs_max_size
== -1
1404 || lhs_max_size
!= lhs_size
1405 || (lhs_offset
< arg_offset
1406 && lhs_offset
+ lhs_size
> arg_offset
)
1407 || (lhs_offset
< arg_offset
+ arg_size
1408 && lhs_offset
+ lhs_size
> arg_offset
+ arg_size
))
1413 if (TREE_CODE (lhs_base
) != MEM_REF
1414 || TREE_OPERAND (lhs_base
, 0) != arg_base
1415 || !integer_zerop (TREE_OPERAND (lhs_base
, 1)))
1418 else if (lhs_base
!= arg_base
)
1420 if (DECL_P (lhs_base
))
1426 if (lhs_offset
+ lhs_size
< arg_offset
1427 || lhs_offset
>= (arg_offset
+ arg_size
))
1430 partial_overlap
= false;
1432 while (*p
&& (*p
)->offset
< lhs_offset
)
1434 if ((*p
)->offset
+ (*p
)->size
> lhs_offset
)
1436 partial_overlap
= true;
1441 if (partial_overlap
)
1443 if (*p
&& (*p
)->offset
< lhs_offset
+ lhs_size
)
1445 if ((*p
)->offset
== lhs_offset
&& (*p
)->size
== lhs_size
)
1446 /* We already know this value is subsequently overwritten with
1450 /* Otherwise this is a partial overlap which we cannot
1455 rhs
= get_ssa_def_if_simple_copy (rhs
);
1456 n
= XALLOCA (struct ipa_known_agg_contents_list
);
1458 n
->offset
= lhs_offset
;
1459 if (is_gimple_ip_invariant (rhs
))
1465 n
->constant
= NULL_TREE
;
1470 if (const_count
== PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
)
1471 || item_count
== 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
))
1475 /* Third stage just goes over the list and creates an appropriate vector of
1476 ipa_agg_jf_item structures out of it, of sourse only if there are
1477 any known constants to begin with. */
1481 jfunc
->agg
.by_ref
= by_ref
;
1482 vec_alloc (jfunc
->agg
.items
, const_count
);
1487 struct ipa_agg_jf_item item
;
1488 item
.offset
= list
->offset
- arg_offset
;
1489 gcc_assert ((item
.offset
% BITS_PER_UNIT
) == 0);
1490 item
.value
= unshare_expr_without_location (list
->constant
);
1491 jfunc
->agg
.items
->quick_push (item
);
1498 /* Compute jump function for all arguments of callsite CS and insert the
1499 information in the jump_functions array in the ipa_edge_args corresponding
1500 to this callsite. */
1503 ipa_compute_jump_functions_for_edge (struct param_analysis_info
*parms_ainfo
,
1504 struct cgraph_edge
*cs
)
1506 struct ipa_node_params
*info
= IPA_NODE_REF (cs
->caller
);
1507 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
1508 gimple call
= cs
->call_stmt
;
1509 int n
, arg_num
= gimple_call_num_args (call
);
1511 if (arg_num
== 0 || args
->jump_functions
)
1513 vec_safe_grow_cleared (args
->jump_functions
, arg_num
);
1515 if (ipa_func_spec_opts_forbid_analysis_p (cs
->caller
))
1518 for (n
= 0; n
< arg_num
; n
++)
1520 struct ipa_jump_func
*jfunc
= ipa_get_ith_jump_func (args
, n
);
1521 tree arg
= gimple_call_arg (call
, n
);
1523 if (is_gimple_ip_invariant (arg
))
1524 ipa_set_jf_constant (jfunc
, arg
, cs
);
1525 else if (!is_gimple_reg_type (TREE_TYPE (arg
))
1526 && TREE_CODE (arg
) == PARM_DECL
)
1528 int index
= ipa_get_param_decl_index (info
, arg
);
1530 gcc_assert (index
>=0);
1531 /* Aggregate passed by value, check for pass-through, otherwise we
1532 will attempt to fill in aggregate contents later in this
1534 if (parm_preserved_before_stmt_p (&parms_ainfo
[index
], call
, arg
))
1536 ipa_set_jf_simple_pass_through (jfunc
, index
, false, false);
1540 else if (TREE_CODE (arg
) == SSA_NAME
)
1542 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
1544 int index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (arg
));
1548 agg_p
= parm_ref_data_pass_through_p (&parms_ainfo
[index
],
1550 type_p
= !detect_type_change_ssa (arg
, call
, jfunc
);
1551 if (type_p
|| jfunc
->type
== IPA_JF_UNKNOWN
)
1552 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
, type_p
);
1557 gimple stmt
= SSA_NAME_DEF_STMT (arg
);
1558 if (is_gimple_assign (stmt
))
1559 compute_complex_assign_jump_func (info
, parms_ainfo
, jfunc
,
1561 else if (gimple_code (stmt
) == GIMPLE_PHI
)
1562 compute_complex_ancestor_jump_func (info
, parms_ainfo
, jfunc
,
1567 compute_known_type_jump_func (arg
, jfunc
, call
);
1569 if ((jfunc
->type
!= IPA_JF_PASS_THROUGH
1570 || !ipa_get_jf_pass_through_agg_preserved (jfunc
))
1571 && (jfunc
->type
!= IPA_JF_ANCESTOR
1572 || !ipa_get_jf_ancestor_agg_preserved (jfunc
))
1573 && (AGGREGATE_TYPE_P (TREE_TYPE (arg
))
1574 || (POINTER_TYPE_P (TREE_TYPE (arg
)))))
1575 determine_known_aggregate_parts (call
, arg
, jfunc
);
1579 /* Compute jump functions for all edges - both direct and indirect - outgoing
1580 from NODE. Also count the actual arguments in the process. */
1583 ipa_compute_jump_functions (struct cgraph_node
*node
,
1584 struct param_analysis_info
*parms_ainfo
)
1586 struct cgraph_edge
*cs
;
1588 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
1590 struct cgraph_node
*callee
= cgraph_function_or_thunk_node (cs
->callee
,
1592 /* We do not need to bother analyzing calls to unknown
1593 functions unless they may become known during lto/whopr. */
1594 if (!callee
->symbol
.definition
&& !flag_lto
)
1596 ipa_compute_jump_functions_for_edge (parms_ainfo
, cs
);
1599 for (cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
1600 ipa_compute_jump_functions_for_edge (parms_ainfo
, cs
);
1603 /* If STMT looks like a statement loading a value from a member pointer formal
1604 parameter, return that parameter and store the offset of the field to
1605 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1606 might be clobbered). If USE_DELTA, then we look for a use of the delta
1607 field rather than the pfn. */
1610 ipa_get_stmt_member_ptr_load_param (gimple stmt
, bool use_delta
,
1611 HOST_WIDE_INT
*offset_p
)
1613 tree rhs
, rec
, ref_field
, ref_offset
, fld
, ptr_field
, delta_field
;
1615 if (!gimple_assign_single_p (stmt
))
1618 rhs
= gimple_assign_rhs1 (stmt
);
1619 if (TREE_CODE (rhs
) == COMPONENT_REF
)
1621 ref_field
= TREE_OPERAND (rhs
, 1);
1622 rhs
= TREE_OPERAND (rhs
, 0);
1625 ref_field
= NULL_TREE
;
1626 if (TREE_CODE (rhs
) != MEM_REF
)
1628 rec
= TREE_OPERAND (rhs
, 0);
1629 if (TREE_CODE (rec
) != ADDR_EXPR
)
1631 rec
= TREE_OPERAND (rec
, 0);
1632 if (TREE_CODE (rec
) != PARM_DECL
1633 || !type_like_member_ptr_p (TREE_TYPE (rec
), &ptr_field
, &delta_field
))
1635 ref_offset
= TREE_OPERAND (rhs
, 1);
1642 *offset_p
= int_bit_position (fld
);
1646 if (integer_nonzerop (ref_offset
))
1648 return ref_field
== fld
? rec
: NULL_TREE
;
1651 return tree_int_cst_equal (byte_position (fld
), ref_offset
) ? rec
1655 /* Returns true iff T is an SSA_NAME defined by a statement. */
1658 ipa_is_ssa_with_stmt_def (tree t
)
1660 if (TREE_CODE (t
) == SSA_NAME
1661 && !SSA_NAME_IS_DEFAULT_DEF (t
))
1667 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1668 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1669 indirect call graph edge. */
1671 static struct cgraph_edge
*
1672 ipa_note_param_call (struct cgraph_node
*node
, int param_index
, gimple stmt
)
1674 struct cgraph_edge
*cs
;
1676 cs
= cgraph_edge (node
, stmt
);
1677 cs
->indirect_info
->param_index
= param_index
;
1678 cs
->indirect_info
->offset
= 0;
1679 cs
->indirect_info
->polymorphic
= 0;
1680 cs
->indirect_info
->agg_contents
= 0;
1681 cs
->indirect_info
->member_ptr
= 0;
1685 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1686 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1687 intermediate information about each formal parameter. Currently it checks
1688 whether the call calls a pointer that is a formal parameter and if so, the
1689 parameter is marked with the called flag and an indirect call graph edge
1690 describing the call is created. This is very simple for ordinary pointers
1691 represented in SSA but not-so-nice when it comes to member pointers. The
1692 ugly part of this function does nothing more than trying to match the
1693 pattern of such a call. An example of such a pattern is the gimple dump
1694 below, the call is on the last line:
1697 f$__delta_5 = f.__delta;
1698 f$__pfn_24 = f.__pfn;
1702 f$__delta_5 = MEM[(struct *)&f];
1703 f$__pfn_24 = MEM[(struct *)&f + 4B];
1705 and a few lines below:
1708 D.2496_3 = (int) f$__pfn_24;
1709 D.2497_4 = D.2496_3 & 1;
1716 D.2500_7 = (unsigned int) f$__delta_5;
1717 D.2501_8 = &S + D.2500_7;
1718 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1719 D.2503_10 = *D.2502_9;
1720 D.2504_12 = f$__pfn_24 + -1;
1721 D.2505_13 = (unsigned int) D.2504_12;
1722 D.2506_14 = D.2503_10 + D.2505_13;
1723 D.2507_15 = *D.2506_14;
1724 iftmp.11_16 = (String:: *) D.2507_15;
1727 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1728 D.2500_19 = (unsigned int) f$__delta_5;
1729 D.2508_20 = &S + D.2500_19;
1730 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1732 Such patterns are results of simple calls to a member pointer:
1734 int doprinting (int (MyString::* f)(int) const)
1736 MyString S ("somestring");
1741 Moreover, the function also looks for called pointers loaded from aggregates
1742 passed by value or reference. */
1745 ipa_analyze_indirect_call_uses (struct cgraph_node
*node
,
1746 struct ipa_node_params
*info
,
1747 struct param_analysis_info
*parms_ainfo
,
1748 gimple call
, tree target
)
1753 tree rec
, rec2
, cond
;
1756 basic_block bb
, virt_bb
, join
;
1757 HOST_WIDE_INT offset
;
1760 if (SSA_NAME_IS_DEFAULT_DEF (target
))
1762 tree var
= SSA_NAME_VAR (target
);
1763 index
= ipa_get_param_decl_index (info
, var
);
1765 ipa_note_param_call (node
, index
, call
);
1769 def
= SSA_NAME_DEF_STMT (target
);
1770 if (gimple_assign_single_p (def
)
1771 && ipa_load_from_parm_agg_1 (info
->descriptors
, parms_ainfo
, def
,
1772 gimple_assign_rhs1 (def
), &index
, &offset
,
1775 struct cgraph_edge
*cs
= ipa_note_param_call (node
, index
, call
);
1776 cs
->indirect_info
->offset
= offset
;
1777 cs
->indirect_info
->agg_contents
= 1;
1778 cs
->indirect_info
->by_ref
= by_ref
;
1782 /* Now we need to try to match the complex pattern of calling a member
1784 if (gimple_code (def
) != GIMPLE_PHI
1785 || gimple_phi_num_args (def
) != 2
1786 || !POINTER_TYPE_P (TREE_TYPE (target
))
1787 || TREE_CODE (TREE_TYPE (TREE_TYPE (target
))) != METHOD_TYPE
)
1790 /* First, we need to check whether one of these is a load from a member
1791 pointer that is a parameter to this function. */
1792 n1
= PHI_ARG_DEF (def
, 0);
1793 n2
= PHI_ARG_DEF (def
, 1);
1794 if (!ipa_is_ssa_with_stmt_def (n1
) || !ipa_is_ssa_with_stmt_def (n2
))
1796 d1
= SSA_NAME_DEF_STMT (n1
);
1797 d2
= SSA_NAME_DEF_STMT (n2
);
1799 join
= gimple_bb (def
);
1800 if ((rec
= ipa_get_stmt_member_ptr_load_param (d1
, false, &offset
)))
1802 if (ipa_get_stmt_member_ptr_load_param (d2
, false, NULL
))
1805 bb
= EDGE_PRED (join
, 0)->src
;
1806 virt_bb
= gimple_bb (d2
);
1808 else if ((rec
= ipa_get_stmt_member_ptr_load_param (d2
, false, &offset
)))
1810 bb
= EDGE_PRED (join
, 1)->src
;
1811 virt_bb
= gimple_bb (d1
);
1816 /* Second, we need to check that the basic blocks are laid out in the way
1817 corresponding to the pattern. */
1819 if (!single_pred_p (virt_bb
) || !single_succ_p (virt_bb
)
1820 || single_pred (virt_bb
) != bb
1821 || single_succ (virt_bb
) != join
)
1824 /* Third, let's see that the branching is done depending on the least
1825 significant bit of the pfn. */
1827 branch
= last_stmt (bb
);
1828 if (!branch
|| gimple_code (branch
) != GIMPLE_COND
)
1831 if ((gimple_cond_code (branch
) != NE_EXPR
1832 && gimple_cond_code (branch
) != EQ_EXPR
)
1833 || !integer_zerop (gimple_cond_rhs (branch
)))
1836 cond
= gimple_cond_lhs (branch
);
1837 if (!ipa_is_ssa_with_stmt_def (cond
))
1840 def
= SSA_NAME_DEF_STMT (cond
);
1841 if (!is_gimple_assign (def
)
1842 || gimple_assign_rhs_code (def
) != BIT_AND_EXPR
1843 || !integer_onep (gimple_assign_rhs2 (def
)))
1846 cond
= gimple_assign_rhs1 (def
);
1847 if (!ipa_is_ssa_with_stmt_def (cond
))
1850 def
= SSA_NAME_DEF_STMT (cond
);
1852 if (is_gimple_assign (def
)
1853 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
1855 cond
= gimple_assign_rhs1 (def
);
1856 if (!ipa_is_ssa_with_stmt_def (cond
))
1858 def
= SSA_NAME_DEF_STMT (cond
);
1861 rec2
= ipa_get_stmt_member_ptr_load_param (def
,
1862 (TARGET_PTRMEMFUNC_VBIT_LOCATION
1863 == ptrmemfunc_vbit_in_delta
),
1868 index
= ipa_get_param_decl_index (info
, rec
);
1870 && parm_preserved_before_stmt_p (&parms_ainfo
[index
], call
, rec
))
1872 struct cgraph_edge
*cs
= ipa_note_param_call (node
, index
, call
);
1873 cs
->indirect_info
->offset
= offset
;
1874 cs
->indirect_info
->agg_contents
= 1;
1875 cs
->indirect_info
->member_ptr
= 1;
1881 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1882 object referenced in the expression is a formal parameter of the caller
1883 (described by INFO), create a call note for the statement. */
1886 ipa_analyze_virtual_call_uses (struct cgraph_node
*node
,
1887 struct ipa_node_params
*info
, gimple call
,
1890 struct cgraph_edge
*cs
;
1891 struct cgraph_indirect_call_info
*ii
;
1892 struct ipa_jump_func jfunc
;
1893 tree obj
= OBJ_TYPE_REF_OBJECT (target
);
1895 HOST_WIDE_INT anc_offset
;
1897 if (!flag_devirtualize
)
1900 if (TREE_CODE (obj
) != SSA_NAME
)
1903 if (SSA_NAME_IS_DEFAULT_DEF (obj
))
1905 if (TREE_CODE (SSA_NAME_VAR (obj
)) != PARM_DECL
)
1909 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (obj
));
1910 gcc_assert (index
>= 0);
1911 if (detect_type_change_ssa (obj
, call
, &jfunc
))
1916 gimple stmt
= SSA_NAME_DEF_STMT (obj
);
1919 expr
= get_ancestor_addr_info (stmt
, &obj
, &anc_offset
);
1922 index
= ipa_get_param_decl_index (info
,
1923 SSA_NAME_VAR (TREE_OPERAND (expr
, 0)));
1924 gcc_assert (index
>= 0);
1925 if (detect_type_change (obj
, expr
, call
, &jfunc
, anc_offset
))
1929 cs
= ipa_note_param_call (node
, index
, call
);
1930 ii
= cs
->indirect_info
;
1931 ii
->offset
= anc_offset
;
1932 ii
->otr_token
= tree_low_cst (OBJ_TYPE_REF_TOKEN (target
), 1);
1933 ii
->otr_type
= obj_type_ref_class (target
);
1934 ii
->polymorphic
= 1;
1937 /* Analyze a call statement CALL whether and how it utilizes formal parameters
1938 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
1939 containing intermediate information about each formal parameter. */
1942 ipa_analyze_call_uses (struct cgraph_node
*node
,
1943 struct ipa_node_params
*info
,
1944 struct param_analysis_info
*parms_ainfo
, gimple call
)
1946 tree target
= gimple_call_fn (call
);
1950 if (TREE_CODE (target
) == SSA_NAME
)
1951 ipa_analyze_indirect_call_uses (node
, info
, parms_ainfo
, call
, target
);
1952 else if (virtual_method_call_p (target
))
1953 ipa_analyze_virtual_call_uses (node
, info
, call
, target
);
1957 /* Analyze the call statement STMT with respect to formal parameters (described
1958 in INFO) of caller given by NODE. Currently it only checks whether formal
1959 parameters are called. PARMS_AINFO is a pointer to a vector containing
1960 intermediate information about each formal parameter. */
1963 ipa_analyze_stmt_uses (struct cgraph_node
*node
, struct ipa_node_params
*info
,
1964 struct param_analysis_info
*parms_ainfo
, gimple stmt
)
1966 if (is_gimple_call (stmt
))
1967 ipa_analyze_call_uses (node
, info
, parms_ainfo
, stmt
);
1970 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
1971 If OP is a parameter declaration, mark it as used in the info structure
1975 visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED
,
1976 tree op
, void *data
)
1978 struct ipa_node_params
*info
= (struct ipa_node_params
*) data
;
1980 op
= get_base_address (op
);
1982 && TREE_CODE (op
) == PARM_DECL
)
1984 int index
= ipa_get_param_decl_index (info
, op
);
1985 gcc_assert (index
>= 0);
1986 ipa_set_param_used (info
, index
, true);
1992 /* Scan the function body of NODE and inspect the uses of formal parameters.
1993 Store the findings in various structures of the associated ipa_node_params
1994 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
1995 vector containing intermediate information about each formal parameter. */
1998 ipa_analyze_params_uses (struct cgraph_node
*node
,
1999 struct param_analysis_info
*parms_ainfo
)
2001 tree decl
= node
->symbol
.decl
;
2003 struct function
*func
;
2004 gimple_stmt_iterator gsi
;
2005 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
2008 if (ipa_get_param_count (info
) == 0 || info
->uses_analysis_done
)
2011 info
->uses_analysis_done
= 1;
2012 if (ipa_func_spec_opts_forbid_analysis_p (node
))
2014 for (i
= 0; i
< ipa_get_param_count (info
); i
++)
2016 ipa_set_param_used (info
, i
, true);
2017 ipa_set_controlled_uses (info
, i
, IPA_UNDESCRIBED_USE
);
2022 for (i
= 0; i
< ipa_get_param_count (info
); i
++)
2024 tree parm
= ipa_get_param (info
, i
);
2025 int controlled_uses
= 0;
2027 /* For SSA regs see if parameter is used. For non-SSA we compute
2028 the flag during modification analysis. */
2029 if (is_gimple_reg (parm
))
2031 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->symbol
.decl
),
2033 if (ddef
&& !has_zero_uses (ddef
))
2035 imm_use_iterator imm_iter
;
2036 use_operand_p use_p
;
2038 ipa_set_param_used (info
, i
, true);
2039 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, ddef
)
2040 if (!is_gimple_call (USE_STMT (use_p
)))
2042 controlled_uses
= IPA_UNDESCRIBED_USE
;
2049 controlled_uses
= 0;
2052 controlled_uses
= IPA_UNDESCRIBED_USE
;
2053 ipa_set_controlled_uses (info
, i
, controlled_uses
);
2056 func
= DECL_STRUCT_FUNCTION (decl
);
2057 FOR_EACH_BB_FN (bb
, func
)
2059 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2061 gimple stmt
= gsi_stmt (gsi
);
2063 if (is_gimple_debug (stmt
))
2066 ipa_analyze_stmt_uses (node
, info
, parms_ainfo
, stmt
);
2067 walk_stmt_load_store_addr_ops (stmt
, info
,
2068 visit_ref_for_mod_analysis
,
2069 visit_ref_for_mod_analysis
,
2070 visit_ref_for_mod_analysis
);
2072 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2073 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), info
,
2074 visit_ref_for_mod_analysis
,
2075 visit_ref_for_mod_analysis
,
2076 visit_ref_for_mod_analysis
);
2080 /* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
2083 free_parms_ainfo (struct param_analysis_info
*parms_ainfo
, int param_count
)
2087 for (i
= 0; i
< param_count
; i
++)
2089 if (parms_ainfo
[i
].parm_visited_statements
)
2090 BITMAP_FREE (parms_ainfo
[i
].parm_visited_statements
);
2091 if (parms_ainfo
[i
].pt_visited_statements
)
2092 BITMAP_FREE (parms_ainfo
[i
].pt_visited_statements
);
2096 /* Initialize the array describing properties of of formal parameters
2097 of NODE, analyze their uses and compute jump functions associated
2098 with actual arguments of calls from within NODE. */
2101 ipa_analyze_node (struct cgraph_node
*node
)
2103 struct ipa_node_params
*info
;
2104 struct param_analysis_info
*parms_ainfo
;
2107 ipa_check_create_node_params ();
2108 ipa_check_create_edge_args ();
2109 info
= IPA_NODE_REF (node
);
2110 push_cfun (DECL_STRUCT_FUNCTION (node
->symbol
.decl
));
2111 ipa_initialize_node_params (node
);
2113 param_count
= ipa_get_param_count (info
);
2114 parms_ainfo
= XALLOCAVEC (struct param_analysis_info
, param_count
);
2115 memset (parms_ainfo
, 0, sizeof (struct param_analysis_info
) * param_count
);
2117 ipa_analyze_params_uses (node
, parms_ainfo
);
2118 ipa_compute_jump_functions (node
, parms_ainfo
);
2120 free_parms_ainfo (parms_ainfo
, param_count
);
2124 /* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2125 attempt a type-based devirtualization. If successful, return the
2126 target function declaration, otherwise return NULL. */
2129 ipa_intraprocedural_devirtualization (gimple call
)
2131 tree binfo
, token
, fndecl
;
2132 struct ipa_jump_func jfunc
;
2133 tree otr
= gimple_call_fn (call
);
2135 jfunc
.type
= IPA_JF_UNKNOWN
;
2136 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr
), &jfunc
,
2138 if (jfunc
.type
!= IPA_JF_KNOWN_TYPE
)
2140 binfo
= ipa_binfo_from_known_type_jfunc (&jfunc
);
2143 token
= OBJ_TYPE_REF_TOKEN (otr
);
2144 fndecl
= gimple_get_virt_method_for_binfo (tree_low_cst (token
, 1),
2149 /* Update the jump function DST when the call graph edge corresponding to SRC is
2150 is being inlined, knowing that DST is of type ancestor and src of known
2154 combine_known_type_and_ancestor_jfs (struct ipa_jump_func
*src
,
2155 struct ipa_jump_func
*dst
)
2157 HOST_WIDE_INT combined_offset
;
2160 if (!ipa_get_jf_ancestor_type_preserved (dst
))
2162 dst
->type
= IPA_JF_UNKNOWN
;
2166 combined_offset
= ipa_get_jf_known_type_offset (src
)
2167 + ipa_get_jf_ancestor_offset (dst
);
2168 combined_type
= ipa_get_jf_ancestor_type (dst
);
2170 ipa_set_jf_known_type (dst
, combined_offset
,
2171 ipa_get_jf_known_type_base_type (src
),
2175 /* Update the jump functions associated with call graph edge E when the call
2176 graph edge CS is being inlined, assuming that E->caller is already (possibly
2177 indirectly) inlined into CS->callee and that E has not been inlined. */
2180 update_jump_functions_after_inlining (struct cgraph_edge
*cs
,
2181 struct cgraph_edge
*e
)
2183 struct ipa_edge_args
*top
= IPA_EDGE_REF (cs
);
2184 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
2185 int count
= ipa_get_cs_argument_count (args
);
2188 for (i
= 0; i
< count
; i
++)
2190 struct ipa_jump_func
*dst
= ipa_get_ith_jump_func (args
, i
);
2192 if (dst
->type
== IPA_JF_ANCESTOR
)
2194 struct ipa_jump_func
*src
;
2195 int dst_fid
= dst
->value
.ancestor
.formal_id
;
2197 /* Variable number of arguments can cause havoc if we try to access
2198 one that does not exist in the inlined edge. So make sure we
2200 if (dst_fid
>= ipa_get_cs_argument_count (top
))
2202 dst
->type
= IPA_JF_UNKNOWN
;
2206 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2209 && (dst
->value
.ancestor
.agg_preserved
|| !src
->agg
.by_ref
))
2211 struct ipa_agg_jf_item
*item
;
2214 /* Currently we do not produce clobber aggregate jump functions,
2215 replace with merging when we do. */
2216 gcc_assert (!dst
->agg
.items
);
2218 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2219 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2220 FOR_EACH_VEC_SAFE_ELT (dst
->agg
.items
, j
, item
)
2221 item
->offset
-= dst
->value
.ancestor
.offset
;
2224 if (src
->type
== IPA_JF_KNOWN_TYPE
)
2225 combine_known_type_and_ancestor_jfs (src
, dst
);
2226 else if (src
->type
== IPA_JF_PASS_THROUGH
2227 && src
->value
.pass_through
.operation
== NOP_EXPR
)
2229 dst
->value
.ancestor
.formal_id
= src
->value
.pass_through
.formal_id
;
2230 dst
->value
.ancestor
.agg_preserved
&=
2231 src
->value
.pass_through
.agg_preserved
;
2232 dst
->value
.ancestor
.type_preserved
&=
2233 src
->value
.pass_through
.type_preserved
;
2235 else if (src
->type
== IPA_JF_ANCESTOR
)
2237 dst
->value
.ancestor
.formal_id
= src
->value
.ancestor
.formal_id
;
2238 dst
->value
.ancestor
.offset
+= src
->value
.ancestor
.offset
;
2239 dst
->value
.ancestor
.agg_preserved
&=
2240 src
->value
.ancestor
.agg_preserved
;
2241 dst
->value
.ancestor
.type_preserved
&=
2242 src
->value
.ancestor
.type_preserved
;
2245 dst
->type
= IPA_JF_UNKNOWN
;
2247 else if (dst
->type
== IPA_JF_PASS_THROUGH
)
2249 struct ipa_jump_func
*src
;
2250 /* We must check range due to calls with variable number of arguments
2251 and we cannot combine jump functions with operations. */
2252 if (dst
->value
.pass_through
.operation
== NOP_EXPR
2253 && (dst
->value
.pass_through
.formal_id
2254 < ipa_get_cs_argument_count (top
)))
2256 int dst_fid
= dst
->value
.pass_through
.formal_id
;
2257 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2258 bool dst_agg_p
= ipa_get_jf_pass_through_agg_preserved (dst
);
2262 case IPA_JF_UNKNOWN
:
2263 dst
->type
= IPA_JF_UNKNOWN
;
2265 case IPA_JF_KNOWN_TYPE
:
2266 ipa_set_jf_known_type (dst
,
2267 ipa_get_jf_known_type_offset (src
),
2268 ipa_get_jf_known_type_base_type (src
),
2269 ipa_get_jf_known_type_base_type (src
));
2272 ipa_set_jf_cst_copy (dst
, src
);
2275 case IPA_JF_PASS_THROUGH
:
2277 int formal_id
= ipa_get_jf_pass_through_formal_id (src
);
2278 enum tree_code operation
;
2279 operation
= ipa_get_jf_pass_through_operation (src
);
2281 if (operation
== NOP_EXPR
)
2285 && ipa_get_jf_pass_through_agg_preserved (src
);
2286 type_p
= ipa_get_jf_pass_through_type_preserved (src
)
2287 && ipa_get_jf_pass_through_type_preserved (dst
);
2288 ipa_set_jf_simple_pass_through (dst
, formal_id
,
2293 tree operand
= ipa_get_jf_pass_through_operand (src
);
2294 ipa_set_jf_arith_pass_through (dst
, formal_id
, operand
,
2299 case IPA_JF_ANCESTOR
:
2303 && ipa_get_jf_ancestor_agg_preserved (src
);
2304 type_p
= ipa_get_jf_ancestor_type_preserved (src
)
2305 && ipa_get_jf_pass_through_type_preserved (dst
);
2306 ipa_set_ancestor_jf (dst
,
2307 ipa_get_jf_ancestor_offset (src
),
2308 ipa_get_jf_ancestor_type (src
),
2309 ipa_get_jf_ancestor_formal_id (src
),
2318 && (dst_agg_p
|| !src
->agg
.by_ref
))
2320 /* Currently we do not produce clobber aggregate jump
2321 functions, replace with merging when we do. */
2322 gcc_assert (!dst
->agg
.items
);
2324 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2325 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2329 dst
->type
= IPA_JF_UNKNOWN
;
2334 /* If TARGET is an addr_expr of a function declaration, make it the destination
2335 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2337 struct cgraph_edge
*
2338 ipa_make_edge_direct_to_target (struct cgraph_edge
*ie
, tree target
)
2340 struct cgraph_node
*callee
;
2341 struct inline_edge_summary
*es
= inline_edge_summary (ie
);
2342 bool unreachable
= false;
2344 if (TREE_CODE (target
) == ADDR_EXPR
)
2345 target
= TREE_OPERAND (target
, 0);
2346 if (TREE_CODE (target
) != FUNCTION_DECL
)
2348 target
= canonicalize_constructor_val (target
, NULL
);
2349 if (!target
|| TREE_CODE (target
) != FUNCTION_DECL
)
2351 if (ie
->indirect_info
->member_ptr
)
2352 /* Member pointer call that goes through a VMT lookup. */
2356 fprintf (dump_file
, "ipa-prop: Discovered direct call to non-function"
2357 " in %s/%i, making it unreachable.\n",
2358 cgraph_node_name (ie
->caller
), ie
->caller
->symbol
.order
);
2359 target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2360 callee
= cgraph_get_create_node (target
);
2364 callee
= cgraph_get_node (target
);
2367 callee
= cgraph_get_node (target
);
2369 /* Because may-edges are not explicitely represented and vtable may be external,
2370 we may create the first reference to the object in the unit. */
2371 if (!callee
|| callee
->global
.inlined_to
)
2374 /* We are better to ensure we can refer to it.
2375 In the case of static functions we are out of luck, since we already
2376 removed its body. In the case of public functions we may or may
2377 not introduce the reference. */
2378 if (!canonicalize_constructor_val (target
, NULL
)
2379 || !TREE_PUBLIC (target
))
2382 fprintf (dump_file
, "ipa-prop: Discovered call to a known target "
2383 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2384 xstrdup (cgraph_node_name (ie
->caller
)),
2385 ie
->caller
->symbol
.order
,
2386 xstrdup (cgraph_node_name (ie
->callee
)),
2387 ie
->callee
->symbol
.order
);
2390 callee
= cgraph_get_create_real_symbol_node (target
);
2392 ipa_check_create_node_params ();
2394 /* We can not make edges to inline clones. It is bug that someone removed
2395 the cgraph node too early. */
2396 gcc_assert (!callee
->global
.inlined_to
);
2398 if (dump_file
&& !unreachable
)
2400 fprintf (dump_file
, "ipa-prop: Discovered %s call to a known target "
2401 "(%s/%i -> %s/%i), for stmt ",
2402 ie
->indirect_info
->polymorphic
? "a virtual" : "an indirect",
2403 xstrdup (cgraph_node_name (ie
->caller
)),
2404 ie
->caller
->symbol
.order
,
2405 xstrdup (cgraph_node_name (callee
)),
2406 callee
->symbol
.order
);
2408 print_gimple_stmt (dump_file
, ie
->call_stmt
, 2, TDF_SLIM
);
2410 fprintf (dump_file
, "with uid %i\n", ie
->lto_stmt_uid
);
2412 ie
= cgraph_make_edge_direct (ie
, callee
);
2413 es
= inline_edge_summary (ie
);
2414 es
->call_stmt_size
-= (eni_size_weights
.indirect_call_cost
2415 - eni_size_weights
.call_cost
);
2416 es
->call_stmt_time
-= (eni_time_weights
.indirect_call_cost
2417 - eni_time_weights
.call_cost
);
2422 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2423 return NULL if there is not any. BY_REF specifies whether the value has to
2424 be passed by reference or by value. */
2427 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function
*agg
,
2428 HOST_WIDE_INT offset
, bool by_ref
)
2430 struct ipa_agg_jf_item
*item
;
2433 if (by_ref
!= agg
->by_ref
)
2436 FOR_EACH_VEC_SAFE_ELT (agg
->items
, i
, item
)
2437 if (item
->offset
== offset
)
2439 /* Currently we do not have clobber values, return NULL for them once
2441 gcc_checking_assert (is_gimple_ip_invariant (item
->value
));
2447 /* Remove a reference to SYMBOL from the list of references of a node given by
2448 reference description RDESC. */
2451 remove_described_reference (symtab_node symbol
, struct ipa_cst_ref_desc
*rdesc
)
2453 struct ipa_ref
*to_del
;
2454 struct cgraph_edge
*origin
;
2457 to_del
= ipa_find_reference ((symtab_node
) origin
->caller
, symbol
,
2458 origin
->call_stmt
, origin
->lto_stmt_uid
);
2459 gcc_assert (to_del
);
2460 ipa_remove_reference (to_del
);
2462 fprintf (dump_file
, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2463 xstrdup (cgraph_node_name (origin
->caller
)),
2464 origin
->caller
->symbol
.order
, xstrdup (symtab_node_name (symbol
)));
2467 /* If JFUNC has a reference description with refcount different from
2468 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2469 NULL. JFUNC must be a constant jump function. */
2471 static struct ipa_cst_ref_desc
*
2472 jfunc_rdesc_usable (struct ipa_jump_func
*jfunc
)
2474 struct ipa_cst_ref_desc
*rdesc
= ipa_get_jf_constant_rdesc (jfunc
);
2475 if (rdesc
&& rdesc
->refcount
!= IPA_UNDESCRIBED_USE
)
2481 /* Try to find a destination for indirect edge IE that corresponds to a simple
2482 call or a call of a member function pointer and where the destination is a
2483 pointer formal parameter described by jump function JFUNC. If it can be
2484 determined, return the newly direct edge, otherwise return NULL.
2485 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2487 static struct cgraph_edge
*
2488 try_make_edge_direct_simple_call (struct cgraph_edge
*ie
,
2489 struct ipa_jump_func
*jfunc
,
2490 struct ipa_node_params
*new_root_info
)
2492 struct cgraph_edge
*cs
;
2494 bool agg_contents
= ie
->indirect_info
->agg_contents
;
2495 bool speculative
= ie
->speculative
;
2496 struct ipa_cst_ref_desc
*rdesc
;
2498 if (ie
->indirect_info
->agg_contents
)
2499 target
= ipa_find_agg_cst_for_param (&jfunc
->agg
,
2500 ie
->indirect_info
->offset
,
2501 ie
->indirect_info
->by_ref
);
2503 target
= ipa_value_from_jfunc (new_root_info
, jfunc
);
2506 cs
= ipa_make_edge_direct_to_target (ie
, target
);
2508 /* FIXME: speculative edges can be handled. */
2509 if (cs
&& !agg_contents
&& !speculative
2510 && jfunc
->type
== IPA_JF_CONST
2511 && (rdesc
= jfunc_rdesc_usable (jfunc
))
2512 && --rdesc
->refcount
== 0)
2513 remove_described_reference ((symtab_node
) cs
->callee
, rdesc
);
2518 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2519 call based on a formal parameter which is described by jump function JFUNC
2520 and if it can be determined, make it direct and return the direct edge.
2521 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2524 static struct cgraph_edge
*
2525 try_make_edge_direct_virtual_call (struct cgraph_edge
*ie
,
2526 struct ipa_jump_func
*jfunc
,
2527 struct ipa_node_params
*new_root_info
)
2531 binfo
= ipa_value_from_jfunc (new_root_info
, jfunc
);
2536 if (TREE_CODE (binfo
) != TREE_BINFO
)
2538 binfo
= gimple_extract_devirt_binfo_from_cst
2539 (binfo
, ie
->indirect_info
->otr_type
);
2544 binfo
= get_binfo_at_offset (binfo
, ie
->indirect_info
->offset
,
2545 ie
->indirect_info
->otr_type
);
2547 target
= gimple_get_virt_method_for_binfo (ie
->indirect_info
->otr_token
,
2553 return ipa_make_edge_direct_to_target (ie
, target
);
2558 /* Update the param called notes associated with NODE when CS is being inlined,
2559 assuming NODE is (potentially indirectly) inlined into CS->callee.
2560 Moreover, if the callee is discovered to be constant, create a new cgraph
2561 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2562 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2565 update_indirect_edges_after_inlining (struct cgraph_edge
*cs
,
2566 struct cgraph_node
*node
,
2567 vec
<cgraph_edge_p
> *new_edges
)
2569 struct ipa_edge_args
*top
;
2570 struct cgraph_edge
*ie
, *next_ie
, *new_direct_edge
;
2571 struct ipa_node_params
*new_root_info
;
2574 ipa_check_create_edge_args ();
2575 top
= IPA_EDGE_REF (cs
);
2576 new_root_info
= IPA_NODE_REF (cs
->caller
->global
.inlined_to
2577 ? cs
->caller
->global
.inlined_to
2580 for (ie
= node
->indirect_calls
; ie
; ie
= next_ie
)
2582 struct cgraph_indirect_call_info
*ici
= ie
->indirect_info
;
2583 struct ipa_jump_func
*jfunc
;
2586 next_ie
= ie
->next_callee
;
2588 if (ici
->param_index
== -1)
2591 /* We must check range due to calls with variable number of arguments: */
2592 if (ici
->param_index
>= ipa_get_cs_argument_count (top
))
2594 ici
->param_index
= -1;
2598 param_index
= ici
->param_index
;
2599 jfunc
= ipa_get_ith_jump_func (top
, param_index
);
2601 if (!flag_indirect_inlining
)
2602 new_direct_edge
= NULL
;
2603 else if (ici
->polymorphic
)
2604 new_direct_edge
= try_make_edge_direct_virtual_call (ie
, jfunc
,
2607 new_direct_edge
= try_make_edge_direct_simple_call (ie
, jfunc
,
2609 /* If speculation was removed, then we need to do nothing. */
2610 if (new_direct_edge
&& new_direct_edge
!= ie
)
2612 new_direct_edge
->indirect_inlining_edge
= 1;
2613 top
= IPA_EDGE_REF (cs
);
2616 else if (new_direct_edge
)
2618 new_direct_edge
->indirect_inlining_edge
= 1;
2619 if (new_direct_edge
->call_stmt
)
2620 new_direct_edge
->call_stmt_cannot_inline_p
2621 = !gimple_check_call_matching_types (
2622 new_direct_edge
->call_stmt
,
2623 new_direct_edge
->callee
->symbol
.decl
, false);
2626 new_edges
->safe_push (new_direct_edge
);
2629 top
= IPA_EDGE_REF (cs
);
2631 else if (jfunc
->type
== IPA_JF_PASS_THROUGH
2632 && ipa_get_jf_pass_through_operation (jfunc
) == NOP_EXPR
)
2634 if (ici
->agg_contents
2635 && !ipa_get_jf_pass_through_agg_preserved (jfunc
))
2636 ici
->param_index
= -1;
2638 ici
->param_index
= ipa_get_jf_pass_through_formal_id (jfunc
);
2640 else if (jfunc
->type
== IPA_JF_ANCESTOR
)
2642 if (ici
->agg_contents
2643 && !ipa_get_jf_ancestor_agg_preserved (jfunc
))
2644 ici
->param_index
= -1;
2647 ici
->param_index
= ipa_get_jf_ancestor_formal_id (jfunc
);
2648 ici
->offset
+= ipa_get_jf_ancestor_offset (jfunc
);
2652 /* Either we can find a destination for this edge now or never. */
2653 ici
->param_index
= -1;
2659 /* Recursively traverse subtree of NODE (including node) made of inlined
2660 cgraph_edges when CS has been inlined and invoke
2661 update_indirect_edges_after_inlining on all nodes and
2662 update_jump_functions_after_inlining on all non-inlined edges that lead out
2663 of this subtree. Newly discovered indirect edges will be added to
2664 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2668 propagate_info_to_inlined_callees (struct cgraph_edge
*cs
,
2669 struct cgraph_node
*node
,
2670 vec
<cgraph_edge_p
> *new_edges
)
2672 struct cgraph_edge
*e
;
2675 res
= update_indirect_edges_after_inlining (cs
, node
, new_edges
);
2677 for (e
= node
->callees
; e
; e
= e
->next_callee
)
2678 if (!e
->inline_failed
)
2679 res
|= propagate_info_to_inlined_callees (cs
, e
->callee
, new_edges
);
2681 update_jump_functions_after_inlining (cs
, e
);
2682 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
2683 update_jump_functions_after_inlining (cs
, e
);
2688 /* Combine two controlled uses counts as done during inlining. */
2691 combine_controlled_uses_counters (int c
, int d
)
2693 if (c
== IPA_UNDESCRIBED_USE
|| d
== IPA_UNDESCRIBED_USE
)
2694 return IPA_UNDESCRIBED_USE
;
2699 /* Propagate number of controlled users from CS->caleee to the new root of the
2700 tree of inlined nodes. */
2703 propagate_controlled_uses (struct cgraph_edge
*cs
)
2705 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
2706 struct cgraph_node
*new_root
= cs
->caller
->global
.inlined_to
2707 ? cs
->caller
->global
.inlined_to
: cs
->caller
;
2708 struct ipa_node_params
*new_root_info
= IPA_NODE_REF (new_root
);
2709 struct ipa_node_params
*old_root_info
= IPA_NODE_REF (cs
->callee
);
2712 count
= MIN (ipa_get_cs_argument_count (args
),
2713 ipa_get_param_count (old_root_info
));
2714 for (i
= 0; i
< count
; i
++)
2716 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
2717 struct ipa_cst_ref_desc
*rdesc
;
2719 if (jf
->type
== IPA_JF_PASS_THROUGH
)
2722 src_idx
= ipa_get_jf_pass_through_formal_id (jf
);
2723 c
= ipa_get_controlled_uses (new_root_info
, src_idx
);
2724 d
= ipa_get_controlled_uses (old_root_info
, i
);
2726 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf
)
2727 == NOP_EXPR
|| c
== IPA_UNDESCRIBED_USE
);
2728 c
= combine_controlled_uses_counters (c
, d
);
2729 ipa_set_controlled_uses (new_root_info
, src_idx
, c
);
2730 if (c
== 0 && new_root_info
->ipcp_orig_node
)
2732 struct cgraph_node
*n
;
2733 struct ipa_ref
*ref
;
2734 tree t
= new_root_info
->known_vals
[src_idx
];
2736 if (t
&& TREE_CODE (t
) == ADDR_EXPR
2737 && TREE_CODE (TREE_OPERAND (t
, 0)) == FUNCTION_DECL
2738 && (n
= cgraph_get_node (TREE_OPERAND (t
, 0)))
2739 && (ref
= ipa_find_reference ((symtab_node
) new_root
,
2740 (symtab_node
) n
, NULL
, 0)))
2743 fprintf (dump_file
, "ipa-prop: Removing cloning-created "
2744 "reference from %s/%i to %s/%i.\n",
2745 xstrdup (cgraph_node_name (new_root
)),
2746 new_root
->symbol
.order
,
2747 xstrdup (cgraph_node_name (n
)), n
->symbol
.order
);
2748 ipa_remove_reference (ref
);
2752 else if (jf
->type
== IPA_JF_CONST
2753 && (rdesc
= jfunc_rdesc_usable (jf
)))
2755 int d
= ipa_get_controlled_uses (old_root_info
, i
);
2756 int c
= rdesc
->refcount
;
2757 rdesc
->refcount
= combine_controlled_uses_counters (c
, d
);
2758 if (rdesc
->refcount
== 0)
2760 tree cst
= ipa_get_jf_constant (jf
);
2761 struct cgraph_node
*n
;
2762 gcc_checking_assert (TREE_CODE (cst
) == ADDR_EXPR
2763 && TREE_CODE (TREE_OPERAND (cst
, 0))
2765 n
= cgraph_get_node (TREE_OPERAND (cst
, 0));
2768 struct cgraph_node
*clone
;
2769 remove_described_reference ((symtab_node
) n
, rdesc
);
2772 while (clone
->global
.inlined_to
2773 && clone
!= rdesc
->cs
->caller
2774 && IPA_NODE_REF (clone
)->ipcp_orig_node
)
2776 struct ipa_ref
*ref
;
2777 ref
= ipa_find_reference ((symtab_node
) clone
,
2778 (symtab_node
) n
, NULL
, 0);
2782 fprintf (dump_file
, "ipa-prop: Removing "
2783 "cloning-created reference "
2784 "from %s/%i to %s/%i.\n",
2785 xstrdup (cgraph_node_name (clone
)),
2786 clone
->symbol
.order
,
2787 xstrdup (cgraph_node_name (n
)),
2789 ipa_remove_reference (ref
);
2791 clone
= clone
->callers
->caller
;
2798 for (i
= ipa_get_param_count (old_root_info
);
2799 i
< ipa_get_cs_argument_count (args
);
2802 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
2804 if (jf
->type
== IPA_JF_CONST
)
2806 struct ipa_cst_ref_desc
*rdesc
= jfunc_rdesc_usable (jf
);
2808 rdesc
->refcount
= IPA_UNDESCRIBED_USE
;
2810 else if (jf
->type
== IPA_JF_PASS_THROUGH
)
2811 ipa_set_controlled_uses (new_root_info
,
2812 jf
->value
.pass_through
.formal_id
,
2813 IPA_UNDESCRIBED_USE
);
2817 /* Update jump functions and call note functions on inlining the call site CS.
2818 CS is expected to lead to a node already cloned by
2819 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
2820 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2824 ipa_propagate_indirect_call_infos (struct cgraph_edge
*cs
,
2825 vec
<cgraph_edge_p
> *new_edges
)
2828 /* Do nothing if the preparation phase has not been carried out yet
2829 (i.e. during early inlining). */
2830 if (!ipa_node_params_vector
.exists ())
2832 gcc_assert (ipa_edge_args_vector
);
2834 propagate_controlled_uses (cs
);
2835 changed
= propagate_info_to_inlined_callees (cs
, cs
->callee
, new_edges
);
2840 /* Frees all dynamically allocated structures that the argument info points
2844 ipa_free_edge_args_substructures (struct ipa_edge_args
*args
)
2846 vec_free (args
->jump_functions
);
2847 memset (args
, 0, sizeof (*args
));
2850 /* Free all ipa_edge structures. */
2853 ipa_free_all_edge_args (void)
2856 struct ipa_edge_args
*args
;
2858 if (!ipa_edge_args_vector
)
2861 FOR_EACH_VEC_ELT (*ipa_edge_args_vector
, i
, args
)
2862 ipa_free_edge_args_substructures (args
);
2864 vec_free (ipa_edge_args_vector
);
2867 /* Frees all dynamically allocated structures that the param info points
2871 ipa_free_node_params_substructures (struct ipa_node_params
*info
)
2873 info
->descriptors
.release ();
2874 free (info
->lattices
);
2875 /* Lattice values and their sources are deallocated with their alocation
2877 info
->known_vals
.release ();
2878 memset (info
, 0, sizeof (*info
));
2881 /* Free all ipa_node_params structures. */
2884 ipa_free_all_node_params (void)
2887 struct ipa_node_params
*info
;
2889 FOR_EACH_VEC_ELT (ipa_node_params_vector
, i
, info
)
2890 ipa_free_node_params_substructures (info
);
2892 ipa_node_params_vector
.release ();
2895 /* Set the aggregate replacements of NODE to be AGGVALS. */
2898 ipa_set_node_agg_value_chain (struct cgraph_node
*node
,
2899 struct ipa_agg_replacement_value
*aggvals
)
2901 if (vec_safe_length (ipa_node_agg_replacements
) <= (unsigned) cgraph_max_uid
)
2902 vec_safe_grow_cleared (ipa_node_agg_replacements
, cgraph_max_uid
+ 1);
2904 (*ipa_node_agg_replacements
)[node
->uid
] = aggvals
;
2907 /* Hook that is called by cgraph.c when an edge is removed. */
2910 ipa_edge_removal_hook (struct cgraph_edge
*cs
, void *data ATTRIBUTE_UNUSED
)
2912 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2913 if (vec_safe_length (ipa_edge_args_vector
) <= (unsigned)cs
->uid
)
2915 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs
));
2918 /* Hook that is called by cgraph.c when a node is removed. */
2921 ipa_node_removal_hook (struct cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
2923 /* During IPA-CP updating we can be called on not-yet analyze clones. */
2924 if (ipa_node_params_vector
.length () > (unsigned)node
->uid
)
2925 ipa_free_node_params_substructures (IPA_NODE_REF (node
));
2926 if (vec_safe_length (ipa_node_agg_replacements
) > (unsigned)node
->uid
)
2927 (*ipa_node_agg_replacements
)[(unsigned)node
->uid
] = NULL
;
2930 /* Hook that is called by cgraph.c when an edge is duplicated. */
2933 ipa_edge_duplication_hook (struct cgraph_edge
*src
, struct cgraph_edge
*dst
,
2934 __attribute__((unused
)) void *data
)
2936 struct ipa_edge_args
*old_args
, *new_args
;
2939 ipa_check_create_edge_args ();
2941 old_args
= IPA_EDGE_REF (src
);
2942 new_args
= IPA_EDGE_REF (dst
);
2944 new_args
->jump_functions
= vec_safe_copy (old_args
->jump_functions
);
2946 for (i
= 0; i
< vec_safe_length (old_args
->jump_functions
); i
++)
2948 struct ipa_jump_func
*src_jf
= ipa_get_ith_jump_func (old_args
, i
);
2949 struct ipa_jump_func
*dst_jf
= ipa_get_ith_jump_func (new_args
, i
);
2951 dst_jf
->agg
.items
= vec_safe_copy (dst_jf
->agg
.items
);
2953 if (src_jf
->type
== IPA_JF_CONST
)
2955 struct ipa_cst_ref_desc
*src_rdesc
= jfunc_rdesc_usable (src_jf
);
2958 dst_jf
->value
.constant
.rdesc
= NULL
;
2959 else if (src_rdesc
->cs
== src
)
2961 struct ipa_cst_ref_desc
*dst_rdesc
;
2962 gcc_checking_assert (ipa_refdesc_pool
);
2964 = (struct ipa_cst_ref_desc
*) pool_alloc (ipa_refdesc_pool
);
2965 dst_rdesc
->cs
= dst
;
2966 dst_rdesc
->refcount
= src_rdesc
->refcount
;
2967 if (dst
->caller
->global
.inlined_to
)
2969 dst_rdesc
->next_duplicate
= src_rdesc
->next_duplicate
;
2970 src_rdesc
->next_duplicate
= dst_rdesc
;
2972 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
2976 struct ipa_cst_ref_desc
*dst_rdesc
;
2977 /* This can happen during inlining, when a JFUNC can refer to a
2978 reference taken in a function up in the tree of inline clones.
2979 We need to find the duplicate that refers to our tree of
2982 gcc_assert (dst
->caller
->global
.inlined_to
);
2983 for (dst_rdesc
= src_rdesc
->next_duplicate
;
2985 dst_rdesc
= dst_rdesc
->next_duplicate
)
2986 if (dst_rdesc
->cs
->caller
->global
.inlined_to
2987 == dst
->caller
->global
.inlined_to
)
2989 gcc_assert (dst_rdesc
);
2990 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
2996 /* Hook that is called by cgraph.c when a node is duplicated. */
2999 ipa_node_duplication_hook (struct cgraph_node
*src
, struct cgraph_node
*dst
,
3000 ATTRIBUTE_UNUSED
void *data
)
3002 struct ipa_node_params
*old_info
, *new_info
;
3003 struct ipa_agg_replacement_value
*old_av
, *new_av
;
3005 ipa_check_create_node_params ();
3006 old_info
= IPA_NODE_REF (src
);
3007 new_info
= IPA_NODE_REF (dst
);
3009 new_info
->descriptors
= old_info
->descriptors
.copy ();
3010 new_info
->lattices
= NULL
;
3011 new_info
->ipcp_orig_node
= old_info
->ipcp_orig_node
;
3013 new_info
->uses_analysis_done
= old_info
->uses_analysis_done
;
3014 new_info
->node_enqueued
= old_info
->node_enqueued
;
3016 old_av
= ipa_get_agg_replacements_for_node (src
);
3023 struct ipa_agg_replacement_value
*v
;
3025 v
= ggc_alloc_ipa_agg_replacement_value ();
3026 memcpy (v
, old_av
, sizeof (*v
));
3029 old_av
= old_av
->next
;
3031 ipa_set_node_agg_value_chain (dst
, new_av
);
3035 /* Analyze newly added function into callgraph. */
3038 ipa_add_new_function (struct cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
3040 ipa_analyze_node (node
);
3043 /* Register our cgraph hooks if they are not already there. */
3046 ipa_register_cgraph_hooks (void)
3048 if (!edge_removal_hook_holder
)
3049 edge_removal_hook_holder
=
3050 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook
, NULL
);
3051 if (!node_removal_hook_holder
)
3052 node_removal_hook_holder
=
3053 cgraph_add_node_removal_hook (&ipa_node_removal_hook
, NULL
);
3054 if (!edge_duplication_hook_holder
)
3055 edge_duplication_hook_holder
=
3056 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook
, NULL
);
3057 if (!node_duplication_hook_holder
)
3058 node_duplication_hook_holder
=
3059 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook
, NULL
);
3060 function_insertion_hook_holder
=
3061 cgraph_add_function_insertion_hook (&ipa_add_new_function
, NULL
);
3064 /* Unregister our cgraph hooks if they are not already there. */
3067 ipa_unregister_cgraph_hooks (void)
3069 cgraph_remove_edge_removal_hook (edge_removal_hook_holder
);
3070 edge_removal_hook_holder
= NULL
;
3071 cgraph_remove_node_removal_hook (node_removal_hook_holder
);
3072 node_removal_hook_holder
= NULL
;
3073 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder
);
3074 edge_duplication_hook_holder
= NULL
;
3075 cgraph_remove_node_duplication_hook (node_duplication_hook_holder
);
3076 node_duplication_hook_holder
= NULL
;
3077 cgraph_remove_function_insertion_hook (function_insertion_hook_holder
);
3078 function_insertion_hook_holder
= NULL
;
3081 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3082 longer needed after ipa-cp. */
3085 ipa_free_all_structures_after_ipa_cp (void)
3089 ipa_free_all_edge_args ();
3090 ipa_free_all_node_params ();
3091 free_alloc_pool (ipcp_sources_pool
);
3092 free_alloc_pool (ipcp_values_pool
);
3093 free_alloc_pool (ipcp_agg_lattice_pool
);
3094 ipa_unregister_cgraph_hooks ();
3095 if (ipa_refdesc_pool
)
3096 free_alloc_pool (ipa_refdesc_pool
);
3100 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3101 longer needed after indirect inlining. */
3104 ipa_free_all_structures_after_iinln (void)
3106 ipa_free_all_edge_args ();
3107 ipa_free_all_node_params ();
3108 ipa_unregister_cgraph_hooks ();
3109 if (ipcp_sources_pool
)
3110 free_alloc_pool (ipcp_sources_pool
);
3111 if (ipcp_values_pool
)
3112 free_alloc_pool (ipcp_values_pool
);
3113 if (ipcp_agg_lattice_pool
)
3114 free_alloc_pool (ipcp_agg_lattice_pool
);
3115 if (ipa_refdesc_pool
)
3116 free_alloc_pool (ipa_refdesc_pool
);
3119 /* Print ipa_tree_map data structures of all functions in the
3123 ipa_print_node_params (FILE *f
, struct cgraph_node
*node
)
3126 struct ipa_node_params
*info
;
3128 if (!node
->symbol
.definition
)
3130 info
= IPA_NODE_REF (node
);
3131 fprintf (f
, " function %s/%i parameter descriptors:\n",
3132 cgraph_node_name (node
), node
->symbol
.order
);
3133 count
= ipa_get_param_count (info
);
3134 for (i
= 0; i
< count
; i
++)
3138 ipa_dump_param (f
, info
, i
);
3139 if (ipa_is_param_used (info
, i
))
3140 fprintf (f
, " used");
3141 c
= ipa_get_controlled_uses (info
, i
);
3142 if (c
== IPA_UNDESCRIBED_USE
)
3143 fprintf (f
, " undescribed_use");
3145 fprintf (f
, " controlled_uses=%i", c
);
3150 /* Print ipa_tree_map data structures of all functions in the
3154 ipa_print_all_params (FILE * f
)
3156 struct cgraph_node
*node
;
3158 fprintf (f
, "\nFunction parameters:\n");
3159 FOR_EACH_FUNCTION (node
)
3160 ipa_print_node_params (f
, node
);
3163 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3166 ipa_get_vector_of_formal_parms (tree fndecl
)
3172 gcc_assert (!flag_wpa
);
3173 count
= count_formal_params (fndecl
);
3174 args
.create (count
);
3175 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
3176 args
.quick_push (parm
);
3181 /* Return a heap allocated vector containing types of formal parameters of
3182 function type FNTYPE. */
3184 static inline vec
<tree
>
3185 get_vector_of_formal_parm_types (tree fntype
)
3191 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
3194 types
.create (count
);
3195 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
3196 types
.quick_push (TREE_VALUE (t
));
3201 /* Modify the function declaration FNDECL and its type according to the plan in
3202 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3203 to reflect the actual parameters being modified which are determined by the
3204 base_index field. */
3207 ipa_modify_formal_parameters (tree fndecl
, ipa_parm_adjustment_vec adjustments
,
3208 const char *synth_parm_prefix
)
3210 vec
<tree
> oparms
, otypes
;
3211 tree orig_type
, new_type
= NULL
;
3212 tree old_arg_types
, t
, new_arg_types
= NULL
;
3213 tree parm
, *link
= &DECL_ARGUMENTS (fndecl
);
3214 int i
, len
= adjustments
.length ();
3215 tree new_reversed
= NULL
;
3216 bool care_for_types
, last_parm_void
;
3218 if (!synth_parm_prefix
)
3219 synth_parm_prefix
= "SYNTH";
3221 oparms
= ipa_get_vector_of_formal_parms (fndecl
);
3222 orig_type
= TREE_TYPE (fndecl
);
3223 old_arg_types
= TYPE_ARG_TYPES (orig_type
);
3225 /* The following test is an ugly hack, some functions simply don't have any
3226 arguments in their type. This is probably a bug but well... */
3227 care_for_types
= (old_arg_types
!= NULL_TREE
);
3230 last_parm_void
= (TREE_VALUE (tree_last (old_arg_types
))
3232 otypes
= get_vector_of_formal_parm_types (orig_type
);
3234 gcc_assert (oparms
.length () + 1 == otypes
.length ());
3236 gcc_assert (oparms
.length () == otypes
.length ());
3240 last_parm_void
= false;
3244 for (i
= 0; i
< len
; i
++)
3246 struct ipa_parm_adjustment
*adj
;
3249 adj
= &adjustments
[i
];
3250 parm
= oparms
[adj
->base_index
];
3253 if (adj
->copy_param
)
3256 new_arg_types
= tree_cons (NULL_TREE
, otypes
[adj
->base_index
],
3259 link
= &DECL_CHAIN (parm
);
3261 else if (!adj
->remove_param
)
3267 ptype
= build_pointer_type (adj
->type
);
3272 new_arg_types
= tree_cons (NULL_TREE
, ptype
, new_arg_types
);
3274 new_parm
= build_decl (UNKNOWN_LOCATION
, PARM_DECL
, NULL_TREE
,
3276 DECL_NAME (new_parm
) = create_tmp_var_name (synth_parm_prefix
);
3278 DECL_ARTIFICIAL (new_parm
) = 1;
3279 DECL_ARG_TYPE (new_parm
) = ptype
;
3280 DECL_CONTEXT (new_parm
) = fndecl
;
3281 TREE_USED (new_parm
) = 1;
3282 DECL_IGNORED_P (new_parm
) = 1;
3283 layout_decl (new_parm
, 0);
3286 adj
->reduction
= new_parm
;
3290 link
= &DECL_CHAIN (new_parm
);
3298 new_reversed
= nreverse (new_arg_types
);
3302 TREE_CHAIN (new_arg_types
) = void_list_node
;
3304 new_reversed
= void_list_node
;
3308 /* Use copy_node to preserve as much as possible from original type
3309 (debug info, attribute lists etc.)
3310 Exception is METHOD_TYPEs must have THIS argument.
3311 When we are asked to remove it, we need to build new FUNCTION_TYPE
3313 if (TREE_CODE (orig_type
) != METHOD_TYPE
3314 || (adjustments
[0].copy_param
3315 && adjustments
[0].base_index
== 0))
3317 new_type
= build_distinct_type_copy (orig_type
);
3318 TYPE_ARG_TYPES (new_type
) = new_reversed
;
3323 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type
),
3325 TYPE_CONTEXT (new_type
) = TYPE_CONTEXT (orig_type
);
3326 DECL_VINDEX (fndecl
) = NULL_TREE
;
3329 /* When signature changes, we need to clear builtin info. */
3330 if (DECL_BUILT_IN (fndecl
))
3332 DECL_BUILT_IN_CLASS (fndecl
) = NOT_BUILT_IN
;
3333 DECL_FUNCTION_CODE (fndecl
) = (enum built_in_function
) 0;
3336 /* This is a new type, not a copy of an old type. Need to reassociate
3337 variants. We can handle everything except the main variant lazily. */
3338 t
= TYPE_MAIN_VARIANT (orig_type
);
3341 TYPE_MAIN_VARIANT (new_type
) = t
;
3342 TYPE_NEXT_VARIANT (new_type
) = TYPE_NEXT_VARIANT (t
);
3343 TYPE_NEXT_VARIANT (t
) = new_type
;
3347 TYPE_MAIN_VARIANT (new_type
) = new_type
;
3348 TYPE_NEXT_VARIANT (new_type
) = NULL
;
3351 TREE_TYPE (fndecl
) = new_type
;
3352 DECL_VIRTUAL_P (fndecl
) = 0;
3357 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3358 If this is a directly recursive call, CS must be NULL. Otherwise it must
3359 contain the corresponding call graph edge. */
3362 ipa_modify_call_arguments (struct cgraph_edge
*cs
, gimple stmt
,
3363 ipa_parm_adjustment_vec adjustments
)
3365 struct cgraph_node
*current_node
= cgraph_get_node (current_function_decl
);
3367 vec
<tree
, va_gc
> **debug_args
= NULL
;
3369 gimple_stmt_iterator gsi
, prev_gsi
;
3373 len
= adjustments
.length ();
3375 callee_decl
= !cs
? gimple_call_fndecl (stmt
) : cs
->callee
->symbol
.decl
;
3376 ipa_remove_stmt_references ((symtab_node
) current_node
, stmt
);
3378 gsi
= gsi_for_stmt (stmt
);
3380 gsi_prev (&prev_gsi
);
3381 for (i
= 0; i
< len
; i
++)
3383 struct ipa_parm_adjustment
*adj
;
3385 adj
= &adjustments
[i
];
3387 if (adj
->copy_param
)
3389 tree arg
= gimple_call_arg (stmt
, adj
->base_index
);
3391 vargs
.quick_push (arg
);
3393 else if (!adj
->remove_param
)
3395 tree expr
, base
, off
;
3397 unsigned int deref_align
;
3398 bool deref_base
= false;
3400 /* We create a new parameter out of the value of the old one, we can
3401 do the following kind of transformations:
3403 - A scalar passed by reference is converted to a scalar passed by
3404 value. (adj->by_ref is false and the type of the original
3405 actual argument is a pointer to a scalar).
3407 - A part of an aggregate is passed instead of the whole aggregate.
3408 The part can be passed either by value or by reference, this is
3409 determined by value of adj->by_ref. Moreover, the code below
3410 handles both situations when the original aggregate is passed by
3411 value (its type is not a pointer) and when it is passed by
3412 reference (it is a pointer to an aggregate).
3414 When the new argument is passed by reference (adj->by_ref is true)
3415 it must be a part of an aggregate and therefore we form it by
3416 simply taking the address of a reference inside the original
3419 gcc_checking_assert (adj
->offset
% BITS_PER_UNIT
== 0);
3420 base
= gimple_call_arg (stmt
, adj
->base_index
);
3421 loc
= DECL_P (base
) ? DECL_SOURCE_LOCATION (base
)
3422 : EXPR_LOCATION (base
);
3424 if (TREE_CODE (base
) != ADDR_EXPR
3425 && POINTER_TYPE_P (TREE_TYPE (base
)))
3426 off
= build_int_cst (adj
->alias_ptr_type
,
3427 adj
->offset
/ BITS_PER_UNIT
);
3430 HOST_WIDE_INT base_offset
;
3434 if (TREE_CODE (base
) == ADDR_EXPR
)
3436 base
= TREE_OPERAND (base
, 0);
3442 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
3443 /* Aggregate arguments can have non-invariant addresses. */
3446 base
= build_fold_addr_expr (prev_base
);
3447 off
= build_int_cst (adj
->alias_ptr_type
,
3448 adj
->offset
/ BITS_PER_UNIT
);
3450 else if (TREE_CODE (base
) == MEM_REF
)
3455 deref_align
= TYPE_ALIGN (TREE_TYPE (base
));
3457 off
= build_int_cst (adj
->alias_ptr_type
,
3459 + adj
->offset
/ BITS_PER_UNIT
);
3460 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1),
3462 base
= TREE_OPERAND (base
, 0);
3466 off
= build_int_cst (adj
->alias_ptr_type
,
3468 + adj
->offset
/ BITS_PER_UNIT
);
3469 base
= build_fold_addr_expr (base
);
3475 tree type
= adj
->type
;
3477 unsigned HOST_WIDE_INT misalign
;
3481 align
= deref_align
;
3486 get_pointer_alignment_1 (base
, &align
, &misalign
);
3487 if (TYPE_ALIGN (type
) > align
)
3488 align
= TYPE_ALIGN (type
);
3490 misalign
+= (tree_to_double_int (off
)
3491 .sext (TYPE_PRECISION (TREE_TYPE (off
))).low
3493 misalign
= misalign
& (align
- 1);
3495 align
= (misalign
& -misalign
);
3496 if (align
< TYPE_ALIGN (type
))
3497 type
= build_aligned_type (type
, align
);
3498 expr
= fold_build2_loc (loc
, MEM_REF
, type
, base
, off
);
3502 expr
= fold_build2_loc (loc
, MEM_REF
, adj
->type
, base
, off
);
3503 expr
= build_fold_addr_expr (expr
);
3506 expr
= force_gimple_operand_gsi (&gsi
, expr
,
3508 || is_gimple_reg_type (adj
->type
),
3509 NULL
, true, GSI_SAME_STMT
);
3510 vargs
.quick_push (expr
);
3512 if (!adj
->copy_param
&& MAY_HAVE_DEBUG_STMTS
)
3515 tree ddecl
= NULL_TREE
, origin
= DECL_ORIGIN (adj
->base
), arg
;
3518 arg
= gimple_call_arg (stmt
, adj
->base_index
);
3519 if (!useless_type_conversion_p (TREE_TYPE (origin
), TREE_TYPE (arg
)))
3521 if (!fold_convertible_p (TREE_TYPE (origin
), arg
))
3523 arg
= fold_convert_loc (gimple_location (stmt
),
3524 TREE_TYPE (origin
), arg
);
3526 if (debug_args
== NULL
)
3527 debug_args
= decl_debug_args_insert (callee_decl
);
3528 for (ix
= 0; vec_safe_iterate (*debug_args
, ix
, &ddecl
); ix
+= 2)
3529 if (ddecl
== origin
)
3531 ddecl
= (**debug_args
)[ix
+ 1];
3536 ddecl
= make_node (DEBUG_EXPR_DECL
);
3537 DECL_ARTIFICIAL (ddecl
) = 1;
3538 TREE_TYPE (ddecl
) = TREE_TYPE (origin
);
3539 DECL_MODE (ddecl
) = DECL_MODE (origin
);
3541 vec_safe_push (*debug_args
, origin
);
3542 vec_safe_push (*debug_args
, ddecl
);
3544 def_temp
= gimple_build_debug_bind (ddecl
, unshare_expr (arg
), stmt
);
3545 gsi_insert_before (&gsi
, def_temp
, GSI_SAME_STMT
);
3549 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3551 fprintf (dump_file
, "replacing stmt:");
3552 print_gimple_stmt (dump_file
, gsi_stmt (gsi
), 0, 0);
3555 new_stmt
= gimple_build_call_vec (callee_decl
, vargs
);
3557 if (gimple_call_lhs (stmt
))
3558 gimple_call_set_lhs (new_stmt
, gimple_call_lhs (stmt
));
3560 gimple_set_block (new_stmt
, gimple_block (stmt
));
3561 if (gimple_has_location (stmt
))
3562 gimple_set_location (new_stmt
, gimple_location (stmt
));
3563 gimple_call_set_chain (new_stmt
, gimple_call_chain (stmt
));
3564 gimple_call_copy_flags (new_stmt
, stmt
);
3566 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3568 fprintf (dump_file
, "with stmt:");
3569 print_gimple_stmt (dump_file
, new_stmt
, 0, 0);
3570 fprintf (dump_file
, "\n");
3572 gsi_replace (&gsi
, new_stmt
, true);
3574 cgraph_set_call_stmt (cs
, new_stmt
);
3577 ipa_record_stmt_references (current_node
, gsi_stmt (gsi
));
3580 while ((gsi_end_p (prev_gsi
) && !gsi_end_p (gsi
))
3581 || (!gsi_end_p (prev_gsi
) && gsi_stmt (gsi
) == gsi_stmt (prev_gsi
)));
3583 update_ssa (TODO_update_ssa
);
3584 free_dominance_info (CDI_DOMINATORS
);
3587 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
3590 index_in_adjustments_multiple_times_p (int base_index
,
3591 ipa_parm_adjustment_vec adjustments
)
3593 int i
, len
= adjustments
.length ();
3596 for (i
= 0; i
< len
; i
++)
3598 struct ipa_parm_adjustment
*adj
;
3599 adj
= &adjustments
[i
];
3601 if (adj
->base_index
== base_index
)
3613 /* Return adjustments that should have the same effect on function parameters
3614 and call arguments as if they were first changed according to adjustments in
3615 INNER and then by adjustments in OUTER. */
3617 ipa_parm_adjustment_vec
3618 ipa_combine_adjustments (ipa_parm_adjustment_vec inner
,
3619 ipa_parm_adjustment_vec outer
)
3621 int i
, outlen
= outer
.length ();
3622 int inlen
= inner
.length ();
3624 ipa_parm_adjustment_vec adjustments
, tmp
;
3627 for (i
= 0; i
< inlen
; i
++)
3629 struct ipa_parm_adjustment
*n
;
3632 if (n
->remove_param
)
3635 tmp
.quick_push (*n
);
3638 adjustments
.create (outlen
+ removals
);
3639 for (i
= 0; i
< outlen
; i
++)
3641 struct ipa_parm_adjustment r
;
3642 struct ipa_parm_adjustment
*out
= &outer
[i
];
3643 struct ipa_parm_adjustment
*in
= &tmp
[out
->base_index
];
3645 memset (&r
, 0, sizeof (r
));
3646 gcc_assert (!in
->remove_param
);
3647 if (out
->remove_param
)
3649 if (!index_in_adjustments_multiple_times_p (in
->base_index
, tmp
))
3651 r
.remove_param
= true;
3652 adjustments
.quick_push (r
);
3657 r
.base_index
= in
->base_index
;
3660 /* FIXME: Create nonlocal value too. */
3662 if (in
->copy_param
&& out
->copy_param
)
3663 r
.copy_param
= true;
3664 else if (in
->copy_param
)
3665 r
.offset
= out
->offset
;
3666 else if (out
->copy_param
)
3667 r
.offset
= in
->offset
;
3669 r
.offset
= in
->offset
+ out
->offset
;
3670 adjustments
.quick_push (r
);
3673 for (i
= 0; i
< inlen
; i
++)
3675 struct ipa_parm_adjustment
*n
= &inner
[i
];
3677 if (n
->remove_param
)
3678 adjustments
.quick_push (*n
);
3685 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
3686 friendly way, assuming they are meant to be applied to FNDECL. */
3689 ipa_dump_param_adjustments (FILE *file
, ipa_parm_adjustment_vec adjustments
,
3692 int i
, len
= adjustments
.length ();
3694 vec
<tree
> parms
= ipa_get_vector_of_formal_parms (fndecl
);
3696 fprintf (file
, "IPA param adjustments: ");
3697 for (i
= 0; i
< len
; i
++)
3699 struct ipa_parm_adjustment
*adj
;
3700 adj
= &adjustments
[i
];
3703 fprintf (file
, " ");
3707 fprintf (file
, "%i. base_index: %i - ", i
, adj
->base_index
);
3708 print_generic_expr (file
, parms
[adj
->base_index
], 0);
3711 fprintf (file
, ", base: ");
3712 print_generic_expr (file
, adj
->base
, 0);
3716 fprintf (file
, ", reduction: ");
3717 print_generic_expr (file
, adj
->reduction
, 0);
3719 if (adj
->new_ssa_base
)
3721 fprintf (file
, ", new_ssa_base: ");
3722 print_generic_expr (file
, adj
->new_ssa_base
, 0);
3725 if (adj
->copy_param
)
3726 fprintf (file
, ", copy_param");
3727 else if (adj
->remove_param
)
3728 fprintf (file
, ", remove_param");
3730 fprintf (file
, ", offset %li", (long) adj
->offset
);
3732 fprintf (file
, ", by_ref");
3733 print_node_brief (file
, ", type: ", adj
->type
, 0);
3734 fprintf (file
, "\n");
3739 /* Dump the AV linked list. */
3742 ipa_dump_agg_replacement_values (FILE *f
, struct ipa_agg_replacement_value
*av
)
3745 fprintf (f
, " Aggregate replacements:");
3746 for (; av
; av
= av
->next
)
3748 fprintf (f
, "%s %i[" HOST_WIDE_INT_PRINT_DEC
"]=", comma
? "," : "",
3749 av
->index
, av
->offset
);
3750 print_generic_expr (f
, av
->value
, 0);
3756 /* Stream out jump function JUMP_FUNC to OB. */
3759 ipa_write_jump_function (struct output_block
*ob
,
3760 struct ipa_jump_func
*jump_func
)
3762 struct ipa_agg_jf_item
*item
;
3763 struct bitpack_d bp
;
3766 streamer_write_uhwi (ob
, jump_func
->type
);
3767 switch (jump_func
->type
)
3769 case IPA_JF_UNKNOWN
:
3771 case IPA_JF_KNOWN_TYPE
:
3772 streamer_write_uhwi (ob
, jump_func
->value
.known_type
.offset
);
3773 stream_write_tree (ob
, jump_func
->value
.known_type
.base_type
, true);
3774 stream_write_tree (ob
, jump_func
->value
.known_type
.component_type
, true);
3778 EXPR_LOCATION (jump_func
->value
.constant
.value
) == UNKNOWN_LOCATION
);
3779 stream_write_tree (ob
, jump_func
->value
.constant
.value
, true);
3781 case IPA_JF_PASS_THROUGH
:
3782 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.operation
);
3783 if (jump_func
->value
.pass_through
.operation
== NOP_EXPR
)
3785 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
3786 bp
= bitpack_create (ob
->main_stream
);
3787 bp_pack_value (&bp
, jump_func
->value
.pass_through
.agg_preserved
, 1);
3788 bp_pack_value (&bp
, jump_func
->value
.pass_through
.type_preserved
, 1);
3789 streamer_write_bitpack (&bp
);
3793 stream_write_tree (ob
, jump_func
->value
.pass_through
.operand
, true);
3794 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
3797 case IPA_JF_ANCESTOR
:
3798 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.offset
);
3799 stream_write_tree (ob
, jump_func
->value
.ancestor
.type
, true);
3800 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.formal_id
);
3801 bp
= bitpack_create (ob
->main_stream
);
3802 bp_pack_value (&bp
, jump_func
->value
.ancestor
.agg_preserved
, 1);
3803 bp_pack_value (&bp
, jump_func
->value
.ancestor
.type_preserved
, 1);
3804 streamer_write_bitpack (&bp
);
3808 count
= vec_safe_length (jump_func
->agg
.items
);
3809 streamer_write_uhwi (ob
, count
);
3812 bp
= bitpack_create (ob
->main_stream
);
3813 bp_pack_value (&bp
, jump_func
->agg
.by_ref
, 1);
3814 streamer_write_bitpack (&bp
);
3817 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, i
, item
)
3819 streamer_write_uhwi (ob
, item
->offset
);
3820 stream_write_tree (ob
, item
->value
, true);
3824 /* Read in jump function JUMP_FUNC from IB. */
3827 ipa_read_jump_function (struct lto_input_block
*ib
,
3828 struct ipa_jump_func
*jump_func
,
3829 struct cgraph_edge
*cs
,
3830 struct data_in
*data_in
)
3832 enum jump_func_type jftype
;
3833 enum tree_code operation
;
3836 jftype
= (enum jump_func_type
) streamer_read_uhwi (ib
);
3839 case IPA_JF_UNKNOWN
:
3840 jump_func
->type
= IPA_JF_UNKNOWN
;
3842 case IPA_JF_KNOWN_TYPE
:
3844 HOST_WIDE_INT offset
= streamer_read_uhwi (ib
);
3845 tree base_type
= stream_read_tree (ib
, data_in
);
3846 tree component_type
= stream_read_tree (ib
, data_in
);
3848 ipa_set_jf_known_type (jump_func
, offset
, base_type
, component_type
);
3852 ipa_set_jf_constant (jump_func
, stream_read_tree (ib
, data_in
), cs
);
3854 case IPA_JF_PASS_THROUGH
:
3855 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
3856 if (operation
== NOP_EXPR
)
3858 int formal_id
= streamer_read_uhwi (ib
);
3859 struct bitpack_d bp
= streamer_read_bitpack (ib
);
3860 bool agg_preserved
= bp_unpack_value (&bp
, 1);
3861 bool type_preserved
= bp_unpack_value (&bp
, 1);
3862 ipa_set_jf_simple_pass_through (jump_func
, formal_id
, agg_preserved
,
3867 tree operand
= stream_read_tree (ib
, data_in
);
3868 int formal_id
= streamer_read_uhwi (ib
);
3869 ipa_set_jf_arith_pass_through (jump_func
, formal_id
, operand
,
3873 case IPA_JF_ANCESTOR
:
3875 HOST_WIDE_INT offset
= streamer_read_uhwi (ib
);
3876 tree type
= stream_read_tree (ib
, data_in
);
3877 int formal_id
= streamer_read_uhwi (ib
);
3878 struct bitpack_d bp
= streamer_read_bitpack (ib
);
3879 bool agg_preserved
= bp_unpack_value (&bp
, 1);
3880 bool type_preserved
= bp_unpack_value (&bp
, 1);
3882 ipa_set_ancestor_jf (jump_func
, offset
, type
, formal_id
, agg_preserved
,
3888 count
= streamer_read_uhwi (ib
);
3889 vec_alloc (jump_func
->agg
.items
, count
);
3892 struct bitpack_d bp
= streamer_read_bitpack (ib
);
3893 jump_func
->agg
.by_ref
= bp_unpack_value (&bp
, 1);
3895 for (i
= 0; i
< count
; i
++)
3897 struct ipa_agg_jf_item item
;
3898 item
.offset
= streamer_read_uhwi (ib
);
3899 item
.value
= stream_read_tree (ib
, data_in
);
3900 jump_func
->agg
.items
->quick_push (item
);
3904 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
3905 relevant to indirect inlining to OB. */
3908 ipa_write_indirect_edge_info (struct output_block
*ob
,
3909 struct cgraph_edge
*cs
)
3911 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
3912 struct bitpack_d bp
;
3914 streamer_write_hwi (ob
, ii
->param_index
);
3915 streamer_write_hwi (ob
, ii
->offset
);
3916 bp
= bitpack_create (ob
->main_stream
);
3917 bp_pack_value (&bp
, ii
->polymorphic
, 1);
3918 bp_pack_value (&bp
, ii
->agg_contents
, 1);
3919 bp_pack_value (&bp
, ii
->member_ptr
, 1);
3920 bp_pack_value (&bp
, ii
->by_ref
, 1);
3921 streamer_write_bitpack (&bp
);
3923 if (ii
->polymorphic
)
3925 streamer_write_hwi (ob
, ii
->otr_token
);
3926 stream_write_tree (ob
, ii
->otr_type
, true);
3930 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
3931 relevant to indirect inlining from IB. */
3934 ipa_read_indirect_edge_info (struct lto_input_block
*ib
,
3935 struct data_in
*data_in ATTRIBUTE_UNUSED
,
3936 struct cgraph_edge
*cs
)
3938 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
3939 struct bitpack_d bp
;
3941 ii
->param_index
= (int) streamer_read_hwi (ib
);
3942 ii
->offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
3943 bp
= streamer_read_bitpack (ib
);
3944 ii
->polymorphic
= bp_unpack_value (&bp
, 1);
3945 ii
->agg_contents
= bp_unpack_value (&bp
, 1);
3946 ii
->member_ptr
= bp_unpack_value (&bp
, 1);
3947 ii
->by_ref
= bp_unpack_value (&bp
, 1);
3948 if (ii
->polymorphic
)
3950 ii
->otr_token
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
3951 ii
->otr_type
= stream_read_tree (ib
, data_in
);
3955 /* Stream out NODE info to OB. */
3958 ipa_write_node_info (struct output_block
*ob
, struct cgraph_node
*node
)
3961 lto_symtab_encoder_t encoder
;
3962 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
3964 struct cgraph_edge
*e
;
3965 struct bitpack_d bp
;
3967 encoder
= ob
->decl_state
->symtab_node_encoder
;
3968 node_ref
= lto_symtab_encoder_encode (encoder
, (symtab_node
) node
);
3969 streamer_write_uhwi (ob
, node_ref
);
3971 streamer_write_uhwi (ob
, ipa_get_param_count (info
));
3972 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
3973 streamer_write_uhwi (ob
, ipa_get_param_move_cost (info
, j
));
3974 bp
= bitpack_create (ob
->main_stream
);
3975 gcc_assert (info
->uses_analysis_done
3976 || ipa_get_param_count (info
) == 0);
3977 gcc_assert (!info
->node_enqueued
);
3978 gcc_assert (!info
->ipcp_orig_node
);
3979 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
3980 bp_pack_value (&bp
, ipa_is_param_used (info
, j
), 1);
3981 streamer_write_bitpack (&bp
);
3982 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
3983 streamer_write_hwi (ob
, ipa_get_controlled_uses (info
, j
));
3984 for (e
= node
->callees
; e
; e
= e
->next_callee
)
3986 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
3988 streamer_write_uhwi (ob
, ipa_get_cs_argument_count (args
));
3989 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
3990 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
3992 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
3994 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
3996 streamer_write_uhwi (ob
, ipa_get_cs_argument_count (args
));
3997 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
3998 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
3999 ipa_write_indirect_edge_info (ob
, e
);
4003 /* Stream in NODE info from IB. */
4006 ipa_read_node_info (struct lto_input_block
*ib
, struct cgraph_node
*node
,
4007 struct data_in
*data_in
)
4009 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
4011 struct cgraph_edge
*e
;
4012 struct bitpack_d bp
;
4014 ipa_alloc_node_params (node
, streamer_read_uhwi (ib
));
4016 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4017 info
->descriptors
[k
].move_cost
= streamer_read_uhwi (ib
);
4019 bp
= streamer_read_bitpack (ib
);
4020 if (ipa_get_param_count (info
) != 0)
4021 info
->uses_analysis_done
= true;
4022 info
->node_enqueued
= false;
4023 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4024 ipa_set_param_used (info
, k
, bp_unpack_value (&bp
, 1));
4025 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4026 ipa_set_controlled_uses (info
, k
, streamer_read_hwi (ib
));
4027 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4029 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4030 int count
= streamer_read_uhwi (ib
);
4034 vec_safe_grow_cleared (args
->jump_functions
, count
);
4036 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
4037 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4040 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4042 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4043 int count
= streamer_read_uhwi (ib
);
4047 vec_safe_grow_cleared (args
->jump_functions
, count
);
4048 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
4049 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4052 ipa_read_indirect_edge_info (ib
, data_in
, e
);
4056 /* Write jump functions for nodes in SET. */
4059 ipa_prop_write_jump_functions (void)
4061 struct cgraph_node
*node
;
4062 struct output_block
*ob
;
4063 unsigned int count
= 0;
4064 lto_symtab_encoder_iterator lsei
;
4065 lto_symtab_encoder_t encoder
;
4068 if (!ipa_node_params_vector
.exists ())
4071 ob
= create_output_block (LTO_section_jump_functions
);
4072 encoder
= ob
->decl_state
->symtab_node_encoder
;
4073 ob
->cgraph_node
= NULL
;
4074 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4075 lsei_next_function_in_partition (&lsei
))
4077 node
= lsei_cgraph_node (lsei
);
4078 if (cgraph_function_with_gimple_body_p (node
)
4079 && IPA_NODE_REF (node
) != NULL
)
4083 streamer_write_uhwi (ob
, count
);
4085 /* Process all of the functions. */
4086 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4087 lsei_next_function_in_partition (&lsei
))
4089 node
= lsei_cgraph_node (lsei
);
4090 if (cgraph_function_with_gimple_body_p (node
)
4091 && IPA_NODE_REF (node
) != NULL
)
4092 ipa_write_node_info (ob
, node
);
4094 streamer_write_char_stream (ob
->main_stream
, 0);
4095 produce_asm (ob
, NULL
);
4096 destroy_output_block (ob
);
4099 /* Read section in file FILE_DATA of length LEN with data DATA. */
4102 ipa_prop_read_section (struct lto_file_decl_data
*file_data
, const char *data
,
4105 const struct lto_function_header
*header
=
4106 (const struct lto_function_header
*) data
;
4107 const int cfg_offset
= sizeof (struct lto_function_header
);
4108 const int main_offset
= cfg_offset
+ header
->cfg_size
;
4109 const int string_offset
= main_offset
+ header
->main_size
;
4110 struct data_in
*data_in
;
4111 struct lto_input_block ib_main
;
4115 LTO_INIT_INPUT_BLOCK (ib_main
, (const char *) data
+ main_offset
, 0,
4119 lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
4120 header
->string_size
, vNULL
);
4121 count
= streamer_read_uhwi (&ib_main
);
4123 for (i
= 0; i
< count
; i
++)
4126 struct cgraph_node
*node
;
4127 lto_symtab_encoder_t encoder
;
4129 index
= streamer_read_uhwi (&ib_main
);
4130 encoder
= file_data
->symtab_node_encoder
;
4131 node
= cgraph (lto_symtab_encoder_deref (encoder
, index
));
4132 gcc_assert (node
->symbol
.definition
);
4133 ipa_read_node_info (&ib_main
, node
, data_in
);
4135 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
4137 lto_data_in_delete (data_in
);
4140 /* Read ipcp jump functions. */
4143 ipa_prop_read_jump_functions (void)
4145 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
4146 struct lto_file_decl_data
*file_data
;
4149 ipa_check_create_node_params ();
4150 ipa_check_create_edge_args ();
4151 ipa_register_cgraph_hooks ();
4153 while ((file_data
= file_data_vec
[j
++]))
4156 const char *data
= lto_get_section_data (file_data
, LTO_section_jump_functions
, NULL
, &len
);
4159 ipa_prop_read_section (file_data
, data
, len
);
4163 /* After merging units, we can get mismatch in argument counts.
4164 Also decl merging might've rendered parameter lists obsolete.
4165 Also compute called_with_variable_arg info. */
4168 ipa_update_after_lto_read (void)
4170 ipa_check_create_node_params ();
4171 ipa_check_create_edge_args ();
4175 write_agg_replacement_chain (struct output_block
*ob
, struct cgraph_node
*node
)
4178 unsigned int count
= 0;
4179 lto_symtab_encoder_t encoder
;
4180 struct ipa_agg_replacement_value
*aggvals
, *av
;
4182 aggvals
= ipa_get_agg_replacements_for_node (node
);
4183 encoder
= ob
->decl_state
->symtab_node_encoder
;
4184 node_ref
= lto_symtab_encoder_encode (encoder
, (symtab_node
) node
);
4185 streamer_write_uhwi (ob
, node_ref
);
4187 for (av
= aggvals
; av
; av
= av
->next
)
4189 streamer_write_uhwi (ob
, count
);
4191 for (av
= aggvals
; av
; av
= av
->next
)
4193 struct bitpack_d bp
;
4195 streamer_write_uhwi (ob
, av
->offset
);
4196 streamer_write_uhwi (ob
, av
->index
);
4197 stream_write_tree (ob
, av
->value
, true);
4199 bp
= bitpack_create (ob
->main_stream
);
4200 bp_pack_value (&bp
, av
->by_ref
, 1);
4201 streamer_write_bitpack (&bp
);
4205 /* Stream in the aggregate value replacement chain for NODE from IB. */
4208 read_agg_replacement_chain (struct lto_input_block
*ib
,
4209 struct cgraph_node
*node
,
4210 struct data_in
*data_in
)
4212 struct ipa_agg_replacement_value
*aggvals
= NULL
;
4213 unsigned int count
, i
;
4215 count
= streamer_read_uhwi (ib
);
4216 for (i
= 0; i
<count
; i
++)
4218 struct ipa_agg_replacement_value
*av
;
4219 struct bitpack_d bp
;
4221 av
= ggc_alloc_ipa_agg_replacement_value ();
4222 av
->offset
= streamer_read_uhwi (ib
);
4223 av
->index
= streamer_read_uhwi (ib
);
4224 av
->value
= stream_read_tree (ib
, data_in
);
4225 bp
= streamer_read_bitpack (ib
);
4226 av
->by_ref
= bp_unpack_value (&bp
, 1);
4230 ipa_set_node_agg_value_chain (node
, aggvals
);
4233 /* Write all aggregate replacement for nodes in set. */
4236 ipa_prop_write_all_agg_replacement (void)
4238 struct cgraph_node
*node
;
4239 struct output_block
*ob
;
4240 unsigned int count
= 0;
4241 lto_symtab_encoder_iterator lsei
;
4242 lto_symtab_encoder_t encoder
;
4244 if (!ipa_node_agg_replacements
)
4247 ob
= create_output_block (LTO_section_ipcp_transform
);
4248 encoder
= ob
->decl_state
->symtab_node_encoder
;
4249 ob
->cgraph_node
= NULL
;
4250 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4251 lsei_next_function_in_partition (&lsei
))
4253 node
= lsei_cgraph_node (lsei
);
4254 if (cgraph_function_with_gimple_body_p (node
)
4255 && ipa_get_agg_replacements_for_node (node
) != NULL
)
4259 streamer_write_uhwi (ob
, count
);
4261 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4262 lsei_next_function_in_partition (&lsei
))
4264 node
= lsei_cgraph_node (lsei
);
4265 if (cgraph_function_with_gimple_body_p (node
)
4266 && ipa_get_agg_replacements_for_node (node
) != NULL
)
4267 write_agg_replacement_chain (ob
, node
);
4269 streamer_write_char_stream (ob
->main_stream
, 0);
4270 produce_asm (ob
, NULL
);
4271 destroy_output_block (ob
);
4274 /* Read replacements section in file FILE_DATA of length LEN with data
4278 read_replacements_section (struct lto_file_decl_data
*file_data
,
4282 const struct lto_function_header
*header
=
4283 (const struct lto_function_header
*) data
;
4284 const int cfg_offset
= sizeof (struct lto_function_header
);
4285 const int main_offset
= cfg_offset
+ header
->cfg_size
;
4286 const int string_offset
= main_offset
+ header
->main_size
;
4287 struct data_in
*data_in
;
4288 struct lto_input_block ib_main
;
4292 LTO_INIT_INPUT_BLOCK (ib_main
, (const char *) data
+ main_offset
, 0,
4295 data_in
= lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
4296 header
->string_size
, vNULL
);
4297 count
= streamer_read_uhwi (&ib_main
);
4299 for (i
= 0; i
< count
; i
++)
4302 struct cgraph_node
*node
;
4303 lto_symtab_encoder_t encoder
;
4305 index
= streamer_read_uhwi (&ib_main
);
4306 encoder
= file_data
->symtab_node_encoder
;
4307 node
= cgraph (lto_symtab_encoder_deref (encoder
, index
));
4308 gcc_assert (node
->symbol
.definition
);
4309 read_agg_replacement_chain (&ib_main
, node
, data_in
);
4311 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
4313 lto_data_in_delete (data_in
);
4316 /* Read IPA-CP aggregate replacements. */
4319 ipa_prop_read_all_agg_replacement (void)
4321 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
4322 struct lto_file_decl_data
*file_data
;
4325 while ((file_data
= file_data_vec
[j
++]))
4328 const char *data
= lto_get_section_data (file_data
,
4329 LTO_section_ipcp_transform
,
4332 read_replacements_section (file_data
, data
, len
);
4336 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4340 adjust_agg_replacement_values (struct cgraph_node
*node
,
4341 struct ipa_agg_replacement_value
*aggval
)
4343 struct ipa_agg_replacement_value
*v
;
4344 int i
, c
= 0, d
= 0, *adj
;
4346 if (!node
->clone
.combined_args_to_skip
)
4349 for (v
= aggval
; v
; v
= v
->next
)
4351 gcc_assert (v
->index
>= 0);
4357 adj
= XALLOCAVEC (int, c
);
4358 for (i
= 0; i
< c
; i
++)
4359 if (bitmap_bit_p (node
->clone
.combined_args_to_skip
, i
))
4367 for (v
= aggval
; v
; v
= v
->next
)
4368 v
->index
= adj
[v
->index
];
4372 /* Function body transformation phase. */
4375 ipcp_transform_function (struct cgraph_node
*node
)
4377 vec
<ipa_param_descriptor_t
> descriptors
= vNULL
;
4378 struct param_analysis_info
*parms_ainfo
;
4379 struct ipa_agg_replacement_value
*aggval
;
4380 gimple_stmt_iterator gsi
;
4383 bool cfg_changed
= false, something_changed
= false;
4385 gcc_checking_assert (cfun
);
4386 gcc_checking_assert (current_function_decl
);
4389 fprintf (dump_file
, "Modification phase of node %s/%i\n",
4390 cgraph_node_name (node
), node
->symbol
.order
);
4392 aggval
= ipa_get_agg_replacements_for_node (node
);
4395 param_count
= count_formal_params (node
->symbol
.decl
);
4396 if (param_count
== 0)
4398 adjust_agg_replacement_values (node
, aggval
);
4400 ipa_dump_agg_replacement_values (dump_file
, aggval
);
4401 parms_ainfo
= XALLOCAVEC (struct param_analysis_info
, param_count
);
4402 memset (parms_ainfo
, 0, sizeof (struct param_analysis_info
) * param_count
);
4403 descriptors
.safe_grow_cleared (param_count
);
4404 ipa_populate_param_decls (node
, descriptors
);
4407 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4409 struct ipa_agg_replacement_value
*v
;
4410 gimple stmt
= gsi_stmt (gsi
);
4412 HOST_WIDE_INT offset
;
4416 if (!gimple_assign_load_p (stmt
))
4418 rhs
= gimple_assign_rhs1 (stmt
);
4419 if (!is_gimple_reg_type (TREE_TYPE (rhs
)))
4424 while (handled_component_p (t
))
4426 /* V_C_E can do things like convert an array of integers to one
4427 bigger integer and similar things we do not handle below. */
4428 if (TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
)
4433 t
= TREE_OPERAND (t
, 0);
4438 if (!ipa_load_from_parm_agg_1 (descriptors
, parms_ainfo
, stmt
,
4439 rhs
, &index
, &offset
, &by_ref
))
4441 for (v
= aggval
; v
; v
= v
->next
)
4442 if (v
->index
== index
4443 && v
->offset
== offset
)
4445 if (!v
|| v
->by_ref
!= by_ref
)
4448 gcc_checking_assert (is_gimple_ip_invariant (v
->value
));
4449 if (!useless_type_conversion_p (TREE_TYPE (rhs
), TREE_TYPE (v
->value
)))
4451 if (fold_convertible_p (TREE_TYPE (rhs
), v
->value
))
4452 val
= fold_build1 (NOP_EXPR
, TREE_TYPE (rhs
), v
->value
);
4453 else if (TYPE_SIZE (TREE_TYPE (rhs
))
4454 == TYPE_SIZE (TREE_TYPE (v
->value
)))
4455 val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (rhs
), v
->value
);
4460 fprintf (dump_file
, " const ");
4461 print_generic_expr (dump_file
, v
->value
, 0);
4462 fprintf (dump_file
, " can't be converted to type of ");
4463 print_generic_expr (dump_file
, rhs
, 0);
4464 fprintf (dump_file
, "\n");
4472 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4474 fprintf (dump_file
, "Modifying stmt:\n ");
4475 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4477 gimple_assign_set_rhs_from_tree (&gsi
, val
);
4480 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4482 fprintf (dump_file
, "into:\n ");
4483 print_gimple_stmt (dump_file
, stmt
, 0, 0);
4484 fprintf (dump_file
, "\n");
4487 something_changed
= true;
4488 if (maybe_clean_eh_stmt (stmt
)
4489 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
4493 (*ipa_node_agg_replacements
)[node
->uid
] = NULL
;
4494 free_parms_ainfo (parms_ainfo
, param_count
);
4495 descriptors
.release ();
4497 if (!something_changed
)
4499 else if (cfg_changed
)
4500 return TODO_update_ssa_only_virtuals
| TODO_cleanup_cfg
;
4502 return TODO_update_ssa_only_virtuals
;