ipa-polymorphic-call.c (walk_ssa_copies): Recognize NULL pointer checks.
[official-gcc.git] / gcc / ipa-prop.c
blob80acdcc21bb5ab5010bbc62b41a7f927d3285454
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "basic-block.h"
25 #include "tree-ssa-alias.h"
26 #include "internal-fn.h"
27 #include "gimple-fold.h"
28 #include "tree-eh.h"
29 #include "gimple-expr.h"
30 #include "is-a.h"
31 #include "gimple.h"
32 #include "expr.h"
33 #include "stor-layout.h"
34 #include "print-tree.h"
35 #include "gimplify.h"
36 #include "gimple-iterator.h"
37 #include "gimplify-me.h"
38 #include "gimple-walk.h"
39 #include "langhooks.h"
40 #include "target.h"
41 #include "ipa-prop.h"
42 #include "bitmap.h"
43 #include "gimple-ssa.h"
44 #include "tree-cfg.h"
45 #include "tree-phinodes.h"
46 #include "ssa-iterators.h"
47 #include "tree-into-ssa.h"
48 #include "tree-dfa.h"
49 #include "tree-pass.h"
50 #include "tree-inline.h"
51 #include "ipa-inline.h"
52 #include "flags.h"
53 #include "diagnostic.h"
54 #include "gimple-pretty-print.h"
55 #include "lto-streamer.h"
56 #include "data-streamer.h"
57 #include "tree-streamer.h"
58 #include "params.h"
59 #include "ipa-utils.h"
60 #include "stringpool.h"
61 #include "tree-ssanames.h"
62 #include "dbgcnt.h"
63 #include "domwalk.h"
64 #include "builtins.h"
65 #include "calls.h"
67 /* Intermediate information that we get from alias analysis about a particular
68 parameter in a particular basic_block. When a parameter or the memory it
69 references is marked modified, we use that information in all dominatd
70 blocks without cosulting alias analysis oracle. */
72 struct param_aa_status
74 /* Set when this structure contains meaningful information. If not, the
75 structure describing a dominating BB should be used instead. */
76 bool valid;
78 /* Whether we have seen something which might have modified the data in
79 question. PARM is for the parameter itself, REF is for data it points to
80 but using the alias type of individual accesses and PT is the same thing
81 but for computing aggregate pass-through functions using a very inclusive
82 ao_ref. */
83 bool parm_modified, ref_modified, pt_modified;
86 /* Information related to a given BB that used only when looking at function
87 body. */
89 struct ipa_bb_info
91 /* Call graph edges going out of this BB. */
92 vec<cgraph_edge *> cg_edges;
93 /* Alias analysis statuses of each formal parameter at this bb. */
94 vec<param_aa_status> param_aa_statuses;
97 /* Structure with global information that is only used when looking at function
98 body. */
100 struct func_body_info
102 /* The node that is being analyzed. */
103 cgraph_node *node;
105 /* Its info. */
106 struct ipa_node_params *info;
108 /* Information about individual BBs. */
109 vec<ipa_bb_info> bb_infos;
111 /* Number of parameters. */
112 int param_count;
114 /* Number of statements already walked by when analyzing this function. */
115 unsigned int aa_walked;
118 /* Vector where the parameter infos are actually stored. */
119 vec<ipa_node_params> ipa_node_params_vector;
120 /* Vector of known aggregate values in cloned nodes. */
121 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
122 /* Vector where the parameter infos are actually stored. */
123 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
125 /* Holders of ipa cgraph hooks: */
126 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
127 static struct cgraph_node_hook_list *node_removal_hook_holder;
128 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
129 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
130 static struct cgraph_node_hook_list *function_insertion_hook_holder;
132 /* Description of a reference to an IPA constant. */
133 struct ipa_cst_ref_desc
135 /* Edge that corresponds to the statement which took the reference. */
136 struct cgraph_edge *cs;
137 /* Linked list of duplicates created when call graph edges are cloned. */
138 struct ipa_cst_ref_desc *next_duplicate;
139 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
140 if out of control. */
141 int refcount;
144 /* Allocation pool for reference descriptions. */
146 static alloc_pool ipa_refdesc_pool;
148 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
149 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
151 static bool
152 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
154 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
155 struct cl_optimization *os;
157 if (!fs_opts)
158 return false;
159 os = TREE_OPTIMIZATION (fs_opts);
160 return !os->x_optimize || !os->x_flag_ipa_cp;
163 /* Return index of the formal whose tree is PTREE in function which corresponds
164 to INFO. */
166 static int
167 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
169 int i, count;
171 count = descriptors.length ();
172 for (i = 0; i < count; i++)
173 if (descriptors[i].decl == ptree)
174 return i;
176 return -1;
179 /* Return index of the formal whose tree is PTREE in function which corresponds
180 to INFO. */
183 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
185 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
188 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
189 NODE. */
191 static void
192 ipa_populate_param_decls (struct cgraph_node *node,
193 vec<ipa_param_descriptor> &descriptors)
195 tree fndecl;
196 tree fnargs;
197 tree parm;
198 int param_num;
200 fndecl = node->decl;
201 gcc_assert (gimple_has_body_p (fndecl));
202 fnargs = DECL_ARGUMENTS (fndecl);
203 param_num = 0;
204 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
206 descriptors[param_num].decl = parm;
207 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
208 true);
209 param_num++;
213 /* Return how many formal parameters FNDECL has. */
216 count_formal_params (tree fndecl)
218 tree parm;
219 int count = 0;
220 gcc_assert (gimple_has_body_p (fndecl));
222 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
223 count++;
225 return count;
228 /* Return the declaration of Ith formal parameter of the function corresponding
229 to INFO. Note there is no setter function as this array is built just once
230 using ipa_initialize_node_params. */
232 void
233 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
235 fprintf (file, "param #%i", i);
236 if (info->descriptors[i].decl)
238 fprintf (file, " ");
239 print_generic_expr (file, info->descriptors[i].decl, 0);
243 /* Initialize the ipa_node_params structure associated with NODE
244 to hold PARAM_COUNT parameters. */
246 void
247 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
249 struct ipa_node_params *info = IPA_NODE_REF (node);
251 if (!info->descriptors.exists () && param_count)
252 info->descriptors.safe_grow_cleared (param_count);
255 /* Initialize the ipa_node_params structure associated with NODE by counting
256 the function parameters, creating the descriptors and populating their
257 param_decls. */
259 void
260 ipa_initialize_node_params (struct cgraph_node *node)
262 struct ipa_node_params *info = IPA_NODE_REF (node);
264 if (!info->descriptors.exists ())
266 ipa_alloc_node_params (node, count_formal_params (node->decl));
267 ipa_populate_param_decls (node, info->descriptors);
271 /* Print the jump functions associated with call graph edge CS to file F. */
273 static void
274 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
276 int i, count;
278 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
279 for (i = 0; i < count; i++)
281 struct ipa_jump_func *jump_func;
282 enum jump_func_type type;
284 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
285 type = jump_func->type;
287 fprintf (f, " param %d: ", i);
288 if (type == IPA_JF_UNKNOWN)
289 fprintf (f, "UNKNOWN\n");
290 else if (type == IPA_JF_KNOWN_TYPE)
292 fprintf (f, "KNOWN TYPE: base ");
293 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
294 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
295 jump_func->value.known_type.offset);
296 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
297 fprintf (f, "\n");
299 else if (type == IPA_JF_CONST)
301 tree val = jump_func->value.constant.value;
302 fprintf (f, "CONST: ");
303 print_generic_expr (f, val, 0);
304 if (TREE_CODE (val) == ADDR_EXPR
305 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
307 fprintf (f, " -> ");
308 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
311 fprintf (f, "\n");
313 else if (type == IPA_JF_PASS_THROUGH)
315 fprintf (f, "PASS THROUGH: ");
316 fprintf (f, "%d, op %s",
317 jump_func->value.pass_through.formal_id,
318 get_tree_code_name(jump_func->value.pass_through.operation));
319 if (jump_func->value.pass_through.operation != NOP_EXPR)
321 fprintf (f, " ");
322 print_generic_expr (f,
323 jump_func->value.pass_through.operand, 0);
325 if (jump_func->value.pass_through.agg_preserved)
326 fprintf (f, ", agg_preserved");
327 if (jump_func->value.pass_through.type_preserved)
328 fprintf (f, ", type_preserved");
329 fprintf (f, "\n");
331 else if (type == IPA_JF_ANCESTOR)
333 fprintf (f, "ANCESTOR: ");
334 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
335 jump_func->value.ancestor.formal_id,
336 jump_func->value.ancestor.offset);
337 print_generic_expr (f, jump_func->value.ancestor.type, 0);
338 if (jump_func->value.ancestor.agg_preserved)
339 fprintf (f, ", agg_preserved");
340 if (jump_func->value.ancestor.type_preserved)
341 fprintf (f, ", type_preserved");
342 fprintf (f, "\n");
345 if (jump_func->agg.items)
347 struct ipa_agg_jf_item *item;
348 int j;
350 fprintf (f, " Aggregate passed by %s:\n",
351 jump_func->agg.by_ref ? "reference" : "value");
352 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
354 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
355 item->offset);
356 if (TYPE_P (item->value))
357 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
358 tree_to_uhwi (TYPE_SIZE (item->value)));
359 else
361 fprintf (f, "cst: ");
362 print_generic_expr (f, item->value, 0);
364 fprintf (f, "\n");
367 if (IPA_EDGE_REF (cs)->polymorphic_call_contexts)
368 ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i)->dump (f);
373 /* Print the jump functions of all arguments on all call graph edges going from
374 NODE to file F. */
376 void
377 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
379 struct cgraph_edge *cs;
381 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
382 node->order);
383 for (cs = node->callees; cs; cs = cs->next_callee)
385 if (!ipa_edge_args_info_available_for_edge_p (cs))
386 continue;
388 fprintf (f, " callsite %s/%i -> %s/%i : \n",
389 xstrdup (node->name ()), node->order,
390 xstrdup (cs->callee->name ()),
391 cs->callee->order);
392 ipa_print_node_jump_functions_for_edge (f, cs);
395 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
397 struct cgraph_indirect_call_info *ii;
398 if (!ipa_edge_args_info_available_for_edge_p (cs))
399 continue;
401 ii = cs->indirect_info;
402 if (ii->agg_contents)
403 fprintf (f, " indirect %s callsite, calling param %i, "
404 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
405 ii->member_ptr ? "member ptr" : "aggregate",
406 ii->param_index, ii->offset,
407 ii->by_ref ? "by reference" : "by_value");
408 else
409 fprintf (f, " indirect %s callsite, calling param %i, "
410 "offset " HOST_WIDE_INT_PRINT_DEC,
411 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
412 ii->offset);
414 if (cs->call_stmt)
416 fprintf (f, ", for stmt ");
417 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
419 else
420 fprintf (f, "\n");
421 if (ii->polymorphic)
422 ii->context.dump (f);
423 ipa_print_node_jump_functions_for_edge (f, cs);
427 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
429 void
430 ipa_print_all_jump_functions (FILE *f)
432 struct cgraph_node *node;
434 fprintf (f, "\nJump functions:\n");
435 FOR_EACH_FUNCTION (node)
437 ipa_print_node_jump_functions (f, node);
441 /* Set JFUNC to be a known type jump function. */
443 static void
444 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
445 tree base_type, tree component_type)
447 /* Recording and propagating main variants increases change that types
448 will match. */
449 base_type = TYPE_MAIN_VARIANT (base_type);
450 component_type = TYPE_MAIN_VARIANT (component_type);
452 gcc_assert (contains_polymorphic_type_p (base_type)
453 && contains_polymorphic_type_p (component_type));
454 if (!flag_devirtualize)
455 return;
456 jfunc->type = IPA_JF_KNOWN_TYPE;
457 jfunc->value.known_type.offset = offset,
458 jfunc->value.known_type.base_type = base_type;
459 jfunc->value.known_type.component_type = component_type;
460 gcc_assert (component_type);
463 /* Set JFUNC to be a copy of another jmp (to be used by jump function
464 combination code). The two functions will share their rdesc. */
466 static void
467 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
468 struct ipa_jump_func *src)
471 gcc_checking_assert (src->type == IPA_JF_CONST);
472 dst->type = IPA_JF_CONST;
473 dst->value.constant = src->value.constant;
476 /* Set JFUNC to be a constant jmp function. */
478 static void
479 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
480 struct cgraph_edge *cs)
482 constant = unshare_expr (constant);
483 if (constant && EXPR_P (constant))
484 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
485 jfunc->type = IPA_JF_CONST;
486 jfunc->value.constant.value = unshare_expr_without_location (constant);
488 if (TREE_CODE (constant) == ADDR_EXPR
489 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
491 struct ipa_cst_ref_desc *rdesc;
492 if (!ipa_refdesc_pool)
493 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
494 sizeof (struct ipa_cst_ref_desc), 32);
496 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
497 rdesc->cs = cs;
498 rdesc->next_duplicate = NULL;
499 rdesc->refcount = 1;
500 jfunc->value.constant.rdesc = rdesc;
502 else
503 jfunc->value.constant.rdesc = NULL;
506 /* Set JFUNC to be a simple pass-through jump function. */
507 static void
508 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
509 bool agg_preserved, bool type_preserved)
511 jfunc->type = IPA_JF_PASS_THROUGH;
512 jfunc->value.pass_through.operand = NULL_TREE;
513 jfunc->value.pass_through.formal_id = formal_id;
514 jfunc->value.pass_through.operation = NOP_EXPR;
515 jfunc->value.pass_through.agg_preserved = agg_preserved;
516 jfunc->value.pass_through.type_preserved = type_preserved;
519 /* Set JFUNC to be an arithmetic pass through jump function. */
521 static void
522 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
523 tree operand, enum tree_code operation)
525 jfunc->type = IPA_JF_PASS_THROUGH;
526 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
527 jfunc->value.pass_through.formal_id = formal_id;
528 jfunc->value.pass_through.operation = operation;
529 jfunc->value.pass_through.agg_preserved = false;
530 jfunc->value.pass_through.type_preserved = false;
533 /* Set JFUNC to be an ancestor jump function. */
535 static void
536 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
537 tree type, int formal_id, bool agg_preserved,
538 bool type_preserved)
540 if (!flag_devirtualize)
541 type_preserved = false;
542 if (!type_preserved)
543 type = NULL_TREE;
544 if (type)
545 type = TYPE_MAIN_VARIANT (type);
546 gcc_assert (!type_preserved || contains_polymorphic_type_p (type));
547 jfunc->type = IPA_JF_ANCESTOR;
548 jfunc->value.ancestor.formal_id = formal_id;
549 jfunc->value.ancestor.offset = offset;
550 jfunc->value.ancestor.type = type_preserved ? type : NULL;
551 jfunc->value.ancestor.agg_preserved = agg_preserved;
552 jfunc->value.ancestor.type_preserved = type_preserved;
555 /* Extract the acual BINFO being described by JFUNC which must be a known type
556 jump function. */
558 tree
559 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
561 if (!RECORD_OR_UNION_TYPE_P (jfunc->value.known_type.base_type))
562 return NULL_TREE;
564 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
566 if (!base_binfo)
567 return NULL_TREE;
568 /* FIXME: At LTO we can't propagate to non-polymorphic type, because
569 we have no ODR equivalency on those. This should be fixed by
570 propagating on types rather than binfos that would make type
571 matching here unnecesary. */
572 if (in_lto_p
573 && (TREE_CODE (jfunc->value.known_type.component_type) != RECORD_TYPE
574 || !TYPE_BINFO (jfunc->value.known_type.component_type)
575 || !BINFO_VTABLE (TYPE_BINFO (jfunc->value.known_type.component_type))))
577 if (!jfunc->value.known_type.offset)
578 return base_binfo;
579 return NULL;
581 return get_binfo_at_offset (base_binfo,
582 jfunc->value.known_type.offset,
583 jfunc->value.known_type.component_type);
586 /* Get IPA BB information about the given BB. FBI is the context of analyzis
587 of this function body. */
589 static struct ipa_bb_info *
590 ipa_get_bb_info (struct func_body_info *fbi, basic_block bb)
592 gcc_checking_assert (fbi);
593 return &fbi->bb_infos[bb->index];
596 /* Structure to be passed in between detect_type_change and
597 check_stmt_for_type_change. */
599 struct prop_type_change_info
601 /* Offset into the object where there is the virtual method pointer we are
602 looking for. */
603 HOST_WIDE_INT offset;
604 /* The declaration or SSA_NAME pointer of the base that we are checking for
605 type change. */
606 tree object;
607 /* If we actually can tell the type that the object has changed to, it is
608 stored in this field. Otherwise it remains NULL_TREE. */
609 tree known_current_type;
610 /* Set to true if dynamic type change has been detected. */
611 bool type_maybe_changed;
612 /* Set to true if multiple types have been encountered. known_current_type
613 must be disregarded in that case. */
614 bool multiple_types_encountered;
617 /* Return true if STMT can modify a virtual method table pointer.
619 This function makes special assumptions about both constructors and
620 destructors which are all the functions that are allowed to alter the VMT
621 pointers. It assumes that destructors begin with assignment into all VMT
622 pointers and that constructors essentially look in the following way:
624 1) The very first thing they do is that they call constructors of ancestor
625 sub-objects that have them.
627 2) Then VMT pointers of this and all its ancestors is set to new values
628 corresponding to the type corresponding to the constructor.
630 3) Only afterwards, other stuff such as constructor of member sub-objects
631 and the code written by the user is run. Only this may include calling
632 virtual functions, directly or indirectly.
634 There is no way to call a constructor of an ancestor sub-object in any
635 other way.
637 This means that we do not have to care whether constructors get the correct
638 type information because they will always change it (in fact, if we define
639 the type to be given by the VMT pointer, it is undefined).
641 The most important fact to derive from the above is that if, for some
642 statement in the section 3, we try to detect whether the dynamic type has
643 changed, we can safely ignore all calls as we examine the function body
644 backwards until we reach statements in section 2 because these calls cannot
645 be ancestor constructors or destructors (if the input is not bogus) and so
646 do not change the dynamic type (this holds true only for automatically
647 allocated objects but at the moment we devirtualize only these). We then
648 must detect that statements in section 2 change the dynamic type and can try
649 to derive the new type. That is enough and we can stop, we will never see
650 the calls into constructors of sub-objects in this code. Therefore we can
651 safely ignore all call statements that we traverse.
654 static bool
655 stmt_may_be_vtbl_ptr_store (gimple stmt)
657 if (is_gimple_call (stmt))
658 return false;
659 if (gimple_clobber_p (stmt))
660 return false;
661 else if (is_gimple_assign (stmt))
663 tree lhs = gimple_assign_lhs (stmt);
665 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
667 if (flag_strict_aliasing
668 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
669 return false;
671 if (TREE_CODE (lhs) == COMPONENT_REF
672 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
673 return false;
674 /* In the future we might want to use get_base_ref_and_offset to find
675 if there is a field corresponding to the offset and if so, proceed
676 almost like if it was a component ref. */
679 return true;
682 /* If STMT can be proved to be an assignment to the virtual method table
683 pointer of ANALYZED_OBJ and the type associated with the new table
684 identified, return the type. Otherwise return NULL_TREE. */
686 static tree
687 extr_type_from_vtbl_ptr_store (gimple stmt, struct prop_type_change_info *tci)
689 HOST_WIDE_INT offset, size, max_size;
690 tree lhs, rhs, base, binfo;
692 if (!gimple_assign_single_p (stmt))
693 return NULL_TREE;
695 lhs = gimple_assign_lhs (stmt);
696 rhs = gimple_assign_rhs1 (stmt);
697 if (TREE_CODE (lhs) != COMPONENT_REF
698 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
699 return NULL_TREE;
701 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
702 if (offset != tci->offset
703 || size != POINTER_SIZE
704 || max_size != POINTER_SIZE)
705 return NULL_TREE;
706 if (TREE_CODE (base) == MEM_REF)
708 if (TREE_CODE (tci->object) != MEM_REF
709 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
710 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
711 TREE_OPERAND (base, 1)))
712 return NULL_TREE;
714 else if (tci->object != base)
715 return NULL_TREE;
717 binfo = vtable_pointer_value_to_binfo (rhs);
719 /* FIXME: vtable_pointer_value_to_binfo may return BINFO of a
720 base of outer type. In this case we would need to either
721 work on binfos or translate it back to outer type and offset.
722 KNOWN_TYPE jump functions are not ready for that, yet. */
723 if (!binfo || TYPE_BINFO (BINFO_TYPE (binfo)) != binfo)
724 return NULL;
726 return BINFO_TYPE (binfo);
729 /* Callback of walk_aliased_vdefs and a helper function for
730 detect_type_change to check whether a particular statement may modify
731 the virtual table pointer, and if possible also determine the new type of
732 the (sub-)object. It stores its result into DATA, which points to a
733 prop_type_change_info structure. */
735 static bool
736 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
738 gimple stmt = SSA_NAME_DEF_STMT (vdef);
739 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
741 if (stmt_may_be_vtbl_ptr_store (stmt))
743 tree type;
745 type = extr_type_from_vtbl_ptr_store (stmt, tci);
746 gcc_assert (!type || TYPE_MAIN_VARIANT (type) == type);
747 if (tci->type_maybe_changed
748 && type != tci->known_current_type)
749 tci->multiple_types_encountered = true;
750 tci->known_current_type = type;
751 tci->type_maybe_changed = true;
752 return true;
754 else
755 return false;
758 /* See if ARG is PARAM_DECl describing instance passed by pointer
759 or reference in FUNCTION. Return false if the dynamic type may change
760 in between beggining of the function until CALL is invoked.
762 Generally functions are not allowed to change type of such instances,
763 but they call destructors. We assume that methods can not destroy the THIS
764 pointer. Also as a special cases, constructor and destructors may change
765 type of the THIS pointer. */
767 static bool
768 param_type_may_change_p (tree function, tree arg, gimple call)
770 /* Pure functions can not do any changes on the dynamic type;
771 that require writting to memory. */
772 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
773 return false;
774 /* We need to check if we are within inlined consturctor
775 or destructor (ideally we would have way to check that the
776 inline cdtor is actually working on ARG, but we don't have
777 easy tie on this, so punt on all non-pure cdtors.
778 We may also record the types of cdtors and once we know type
779 of the instance match them.
781 Also code unification optimizations may merge calls from
782 different blocks making return values unreliable. So
783 do nothing during late optimization. */
784 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
785 return true;
786 if (TREE_CODE (arg) == SSA_NAME
787 && SSA_NAME_IS_DEFAULT_DEF (arg)
788 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
790 /* Normal (non-THIS) argument. */
791 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
792 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
793 /* THIS pointer of an method - here we we want to watch constructors
794 and destructors as those definitely may change the dynamic
795 type. */
796 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
797 && !DECL_CXX_CONSTRUCTOR_P (function)
798 && !DECL_CXX_DESTRUCTOR_P (function)
799 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
801 /* Walk the inline stack and watch out for ctors/dtors. */
802 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
803 block = BLOCK_SUPERCONTEXT (block))
804 if (BLOCK_ABSTRACT_ORIGIN (block)
805 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
807 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
809 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
810 continue;
811 if (TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
812 && (DECL_CXX_CONSTRUCTOR_P (fn)
813 || DECL_CXX_DESTRUCTOR_P (fn)))
814 return true;
816 return false;
819 return true;
822 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
823 callsite CALL) by looking for assignments to its virtual table pointer. If
824 it is, return true and fill in the jump function JFUNC with relevant type
825 information or set it to unknown. ARG is the object itself (not a pointer
826 to it, unless dereferenced). BASE is the base of the memory access as
827 returned by get_ref_base_and_extent, as is the offset.
829 This is helper function for detect_type_change and detect_type_change_ssa
830 that does the heavy work which is usually unnecesary. */
832 static bool
833 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
834 gimple call, struct ipa_jump_func *jfunc,
835 HOST_WIDE_INT offset)
837 struct prop_type_change_info tci;
838 ao_ref ao;
839 bool entry_reached = false;
841 gcc_checking_assert (DECL_P (arg)
842 || TREE_CODE (arg) == MEM_REF
843 || handled_component_p (arg));
845 comp_type = TYPE_MAIN_VARIANT (comp_type);
847 /* Const calls cannot call virtual methods through VMT and so type changes do
848 not matter. */
849 if (!flag_devirtualize || !gimple_vuse (call)
850 /* Be sure expected_type is polymorphic. */
851 || !comp_type
852 || TREE_CODE (comp_type) != RECORD_TYPE
853 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
854 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
855 return true;
857 ao_ref_init (&ao, arg);
858 ao.base = base;
859 ao.offset = offset;
860 ao.size = POINTER_SIZE;
861 ao.max_size = ao.size;
863 tci.offset = offset;
864 tci.object = get_base_address (arg);
865 tci.known_current_type = NULL_TREE;
866 tci.type_maybe_changed = false;
867 tci.multiple_types_encountered = false;
869 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
870 &tci, NULL, &entry_reached);
871 if (!tci.type_maybe_changed)
872 return false;
874 if (!tci.known_current_type
875 || tci.multiple_types_encountered
876 || offset != 0
877 /* When the walk reached function entry, it means that type
878 is set along some paths but not along others. */
879 || entry_reached)
880 jfunc->type = IPA_JF_UNKNOWN;
881 else
882 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
884 return true;
887 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
888 If it is, return true and fill in the jump function JFUNC with relevant type
889 information or set it to unknown. ARG is the object itself (not a pointer
890 to it, unless dereferenced). BASE is the base of the memory access as
891 returned by get_ref_base_and_extent, as is the offset. */
893 static bool
894 detect_type_change (tree arg, tree base, tree comp_type, gimple call,
895 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
897 if (!flag_devirtualize)
898 return false;
900 if (TREE_CODE (base) == MEM_REF
901 && !param_type_may_change_p (current_function_decl,
902 TREE_OPERAND (base, 0),
903 call))
904 return false;
905 return detect_type_change_from_memory_writes (arg, base, comp_type,
906 call, jfunc, offset);
909 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
910 SSA name (its dereference will become the base and the offset is assumed to
911 be zero). */
913 static bool
914 detect_type_change_ssa (tree arg, tree comp_type,
915 gimple call, struct ipa_jump_func *jfunc)
917 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
918 if (!flag_devirtualize
919 || !POINTER_TYPE_P (TREE_TYPE (arg)))
920 return false;
922 if (!param_type_may_change_p (current_function_decl, arg, call))
923 return false;
925 arg = build2 (MEM_REF, ptr_type_node, arg,
926 build_int_cst (ptr_type_node, 0));
928 return detect_type_change_from_memory_writes (arg, arg, comp_type,
929 call, jfunc, 0);
932 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
933 boolean variable pointed to by DATA. */
935 static bool
936 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
937 void *data)
939 bool *b = (bool *) data;
940 *b = true;
941 return true;
944 /* Return true if we have already walked so many statements in AA that we
945 should really just start giving up. */
947 static bool
948 aa_overwalked (struct func_body_info *fbi)
950 gcc_checking_assert (fbi);
951 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
954 /* Find the nearest valid aa status for parameter specified by INDEX that
955 dominates BB. */
957 static struct param_aa_status *
958 find_dominating_aa_status (struct func_body_info *fbi, basic_block bb,
959 int index)
961 while (true)
963 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
964 if (!bb)
965 return NULL;
966 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
967 if (!bi->param_aa_statuses.is_empty ()
968 && bi->param_aa_statuses[index].valid)
969 return &bi->param_aa_statuses[index];
973 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
974 structures and/or intialize the result with a dominating description as
975 necessary. */
977 static struct param_aa_status *
978 parm_bb_aa_status_for_bb (struct func_body_info *fbi, basic_block bb,
979 int index)
981 gcc_checking_assert (fbi);
982 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
983 if (bi->param_aa_statuses.is_empty ())
984 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
985 struct param_aa_status *paa = &bi->param_aa_statuses[index];
986 if (!paa->valid)
988 gcc_checking_assert (!paa->parm_modified
989 && !paa->ref_modified
990 && !paa->pt_modified);
991 struct param_aa_status *dom_paa;
992 dom_paa = find_dominating_aa_status (fbi, bb, index);
993 if (dom_paa)
994 *paa = *dom_paa;
995 else
996 paa->valid = true;
999 return paa;
1002 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
1003 a value known not to be modified in this function before reaching the
1004 statement STMT. FBI holds information about the function we have so far
1005 gathered but do not survive the summary building stage. */
1007 static bool
1008 parm_preserved_before_stmt_p (struct func_body_info *fbi, int index,
1009 gimple stmt, tree parm_load)
1011 struct param_aa_status *paa;
1012 bool modified = false;
1013 ao_ref refd;
1015 /* FIXME: FBI can be NULL if we are being called from outside
1016 ipa_node_analysis or ipcp_transform_function, which currently happens
1017 during inlining analysis. It would be great to extend fbi's lifetime and
1018 always have it. Currently, we are just not afraid of too much walking in
1019 that case. */
1020 if (fbi)
1022 if (aa_overwalked (fbi))
1023 return false;
1024 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1025 if (paa->parm_modified)
1026 return false;
1028 else
1029 paa = NULL;
1031 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
1032 ao_ref_init (&refd, parm_load);
1033 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1034 &modified, NULL);
1035 if (fbi)
1036 fbi->aa_walked += walked;
1037 if (paa && modified)
1038 paa->parm_modified = true;
1039 return !modified;
1042 /* If STMT is an assignment that loads a value from an parameter declaration,
1043 return the index of the parameter in ipa_node_params which has not been
1044 modified. Otherwise return -1. */
1046 static int
1047 load_from_unmodified_param (struct func_body_info *fbi,
1048 vec<ipa_param_descriptor> descriptors,
1049 gimple stmt)
1051 int index;
1052 tree op1;
1054 if (!gimple_assign_single_p (stmt))
1055 return -1;
1057 op1 = gimple_assign_rhs1 (stmt);
1058 if (TREE_CODE (op1) != PARM_DECL)
1059 return -1;
1061 index = ipa_get_param_decl_index_1 (descriptors, op1);
1062 if (index < 0
1063 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
1064 return -1;
1066 return index;
1069 /* Return true if memory reference REF (which must be a load through parameter
1070 with INDEX) loads data that are known to be unmodified in this function
1071 before reaching statement STMT. */
1073 static bool
1074 parm_ref_data_preserved_p (struct func_body_info *fbi,
1075 int index, gimple stmt, tree ref)
1077 struct param_aa_status *paa;
1078 bool modified = false;
1079 ao_ref refd;
1081 /* FIXME: FBI can be NULL if we are being called from outside
1082 ipa_node_analysis or ipcp_transform_function, which currently happens
1083 during inlining analysis. It would be great to extend fbi's lifetime and
1084 always have it. Currently, we are just not afraid of too much walking in
1085 that case. */
1086 if (fbi)
1088 if (aa_overwalked (fbi))
1089 return false;
1090 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1091 if (paa->ref_modified)
1092 return false;
1094 else
1095 paa = NULL;
1097 gcc_checking_assert (gimple_vuse (stmt));
1098 ao_ref_init (&refd, ref);
1099 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1100 &modified, NULL);
1101 if (fbi)
1102 fbi->aa_walked += walked;
1103 if (paa && modified)
1104 paa->ref_modified = true;
1105 return !modified;
1108 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1109 is known to be unmodified in this function before reaching call statement
1110 CALL into which it is passed. FBI describes the function body. */
1112 static bool
1113 parm_ref_data_pass_through_p (struct func_body_info *fbi, int index,
1114 gimple call, tree parm)
1116 bool modified = false;
1117 ao_ref refd;
1119 /* It's unnecessary to calculate anything about memory contnets for a const
1120 function because it is not goin to use it. But do not cache the result
1121 either. Also, no such calculations for non-pointers. */
1122 if (!gimple_vuse (call)
1123 || !POINTER_TYPE_P (TREE_TYPE (parm))
1124 || aa_overwalked (fbi))
1125 return false;
1127 struct param_aa_status *paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (call),
1128 index);
1129 if (paa->pt_modified)
1130 return false;
1132 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1133 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1134 &modified, NULL);
1135 fbi->aa_walked += walked;
1136 if (modified)
1137 paa->pt_modified = true;
1138 return !modified;
1141 /* Return true if we can prove that OP is a memory reference loading unmodified
1142 data from an aggregate passed as a parameter and if the aggregate is passed
1143 by reference, that the alias type of the load corresponds to the type of the
1144 formal parameter (so that we can rely on this type for TBAA in callers).
1145 INFO and PARMS_AINFO describe parameters of the current function (but the
1146 latter can be NULL), STMT is the load statement. If function returns true,
1147 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1148 within the aggregate and whether it is a load from a value passed by
1149 reference respectively. */
1151 static bool
1152 ipa_load_from_parm_agg_1 (struct func_body_info *fbi,
1153 vec<ipa_param_descriptor> descriptors,
1154 gimple stmt, tree op, int *index_p,
1155 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1156 bool *by_ref_p)
1158 int index;
1159 HOST_WIDE_INT size, max_size;
1160 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
1162 if (max_size == -1 || max_size != size || *offset_p < 0)
1163 return false;
1165 if (DECL_P (base))
1167 int index = ipa_get_param_decl_index_1 (descriptors, base);
1168 if (index >= 0
1169 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1171 *index_p = index;
1172 *by_ref_p = false;
1173 if (size_p)
1174 *size_p = size;
1175 return true;
1177 return false;
1180 if (TREE_CODE (base) != MEM_REF
1181 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1182 || !integer_zerop (TREE_OPERAND (base, 1)))
1183 return false;
1185 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1187 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1188 index = ipa_get_param_decl_index_1 (descriptors, parm);
1190 else
1192 /* This branch catches situations where a pointer parameter is not a
1193 gimple register, for example:
1195 void hip7(S*) (struct S * p)
1197 void (*<T2e4>) (struct S *) D.1867;
1198 struct S * p.1;
1200 <bb 2>:
1201 p.1_1 = p;
1202 D.1867_2 = p.1_1->f;
1203 D.1867_2 ();
1204 gdp = &p;
1207 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1208 index = load_from_unmodified_param (fbi, descriptors, def);
1211 if (index >= 0
1212 && parm_ref_data_preserved_p (fbi, index, stmt, op))
1214 *index_p = index;
1215 *by_ref_p = true;
1216 if (size_p)
1217 *size_p = size;
1218 return true;
1220 return false;
1223 /* Just like the previous function, just without the param_analysis_info
1224 pointer, for users outside of this file. */
1226 bool
1227 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
1228 tree op, int *index_p, HOST_WIDE_INT *offset_p,
1229 bool *by_ref_p)
1231 return ipa_load_from_parm_agg_1 (NULL, info->descriptors, stmt, op, index_p,
1232 offset_p, NULL, by_ref_p);
1235 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1236 of an assignment statement STMT, try to determine whether we are actually
1237 handling any of the following cases and construct an appropriate jump
1238 function into JFUNC if so:
1240 1) The passed value is loaded from a formal parameter which is not a gimple
1241 register (most probably because it is addressable, the value has to be
1242 scalar) and we can guarantee the value has not changed. This case can
1243 therefore be described by a simple pass-through jump function. For example:
1245 foo (int a)
1247 int a.0;
1249 a.0_2 = a;
1250 bar (a.0_2);
1252 2) The passed value can be described by a simple arithmetic pass-through
1253 jump function. E.g.
1255 foo (int a)
1257 int D.2064;
1259 D.2064_4 = a.1(D) + 4;
1260 bar (D.2064_4);
1262 This case can also occur in combination of the previous one, e.g.:
1264 foo (int a, int z)
1266 int a.0;
1267 int D.2064;
1269 a.0_3 = a;
1270 D.2064_4 = a.0_3 + 4;
1271 foo (D.2064_4);
1273 3) The passed value is an address of an object within another one (which
1274 also passed by reference). Such situations are described by an ancestor
1275 jump function and describe situations such as:
1277 B::foo() (struct B * const this)
1279 struct A * D.1845;
1281 D.1845_2 = &this_1(D)->D.1748;
1282 A::bar (D.1845_2);
1284 INFO is the structure describing individual parameters access different
1285 stages of IPA optimizations. PARMS_AINFO contains the information that is
1286 only needed for intraprocedural analysis. */
1288 static void
1289 compute_complex_assign_jump_func (struct func_body_info *fbi,
1290 struct ipa_node_params *info,
1291 struct ipa_jump_func *jfunc,
1292 gimple call, gimple stmt, tree name,
1293 tree param_type)
1295 HOST_WIDE_INT offset, size, max_size;
1296 tree op1, tc_ssa, base, ssa;
1297 int index;
1299 op1 = gimple_assign_rhs1 (stmt);
1301 if (TREE_CODE (op1) == SSA_NAME)
1303 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1304 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1305 else
1306 index = load_from_unmodified_param (fbi, info->descriptors,
1307 SSA_NAME_DEF_STMT (op1));
1308 tc_ssa = op1;
1310 else
1312 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1313 tc_ssa = gimple_assign_lhs (stmt);
1316 if (index >= 0)
1318 tree op2 = gimple_assign_rhs2 (stmt);
1320 if (op2)
1322 if (!is_gimple_ip_invariant (op2)
1323 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1324 && !useless_type_conversion_p (TREE_TYPE (name),
1325 TREE_TYPE (op1))))
1326 return;
1328 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1329 gimple_assign_rhs_code (stmt));
1331 else if (gimple_assign_single_p (stmt))
1333 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1334 bool type_p = false;
1336 if (param_type && POINTER_TYPE_P (param_type))
1337 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1338 call, jfunc);
1339 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1340 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
1342 return;
1345 if (TREE_CODE (op1) != ADDR_EXPR)
1346 return;
1347 op1 = TREE_OPERAND (op1, 0);
1348 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1349 return;
1350 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1351 if (TREE_CODE (base) != MEM_REF
1352 /* If this is a varying address, punt. */
1353 || max_size == -1
1354 || max_size != size)
1355 return;
1356 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1357 ssa = TREE_OPERAND (base, 0);
1358 if (TREE_CODE (ssa) != SSA_NAME
1359 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1360 || offset < 0)
1361 return;
1363 /* Dynamic types are changed in constructors and destructors. */
1364 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1365 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1367 bool type_p = (contains_polymorphic_type_p (TREE_TYPE (param_type))
1368 && !detect_type_change (op1, base, TREE_TYPE (param_type),
1369 call, jfunc, offset));
1370 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1371 ipa_set_ancestor_jf (jfunc, offset,
1372 type_p ? TREE_TYPE (param_type) : NULL, index,
1373 parm_ref_data_pass_through_p (fbi, index,
1374 call, ssa), type_p);
1378 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1379 it looks like:
1381 iftmp.1_3 = &obj_2(D)->D.1762;
1383 The base of the MEM_REF must be a default definition SSA NAME of a
1384 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1385 whole MEM_REF expression is returned and the offset calculated from any
1386 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1387 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1389 static tree
1390 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1392 HOST_WIDE_INT size, max_size;
1393 tree expr, parm, obj;
1395 if (!gimple_assign_single_p (assign))
1396 return NULL_TREE;
1397 expr = gimple_assign_rhs1 (assign);
1399 if (TREE_CODE (expr) != ADDR_EXPR)
1400 return NULL_TREE;
1401 expr = TREE_OPERAND (expr, 0);
1402 obj = expr;
1403 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1405 if (TREE_CODE (expr) != MEM_REF
1406 /* If this is a varying address, punt. */
1407 || max_size == -1
1408 || max_size != size
1409 || *offset < 0)
1410 return NULL_TREE;
1411 parm = TREE_OPERAND (expr, 0);
1412 if (TREE_CODE (parm) != SSA_NAME
1413 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1414 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1415 return NULL_TREE;
1417 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1418 *obj_p = obj;
1419 return expr;
1423 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1424 statement PHI, try to find out whether NAME is in fact a
1425 multiple-inheritance typecast from a descendant into an ancestor of a formal
1426 parameter and thus can be described by an ancestor jump function and if so,
1427 write the appropriate function into JFUNC.
1429 Essentially we want to match the following pattern:
1431 if (obj_2(D) != 0B)
1432 goto <bb 3>;
1433 else
1434 goto <bb 4>;
1436 <bb 3>:
1437 iftmp.1_3 = &obj_2(D)->D.1762;
1439 <bb 4>:
1440 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1441 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1442 return D.1879_6; */
1444 static void
1445 compute_complex_ancestor_jump_func (struct func_body_info *fbi,
1446 struct ipa_node_params *info,
1447 struct ipa_jump_func *jfunc,
1448 gimple call, gimple phi, tree param_type)
1450 HOST_WIDE_INT offset;
1451 gimple assign, cond;
1452 basic_block phi_bb, assign_bb, cond_bb;
1453 tree tmp, parm, expr, obj;
1454 int index, i;
1456 if (gimple_phi_num_args (phi) != 2)
1457 return;
1459 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1460 tmp = PHI_ARG_DEF (phi, 0);
1461 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1462 tmp = PHI_ARG_DEF (phi, 1);
1463 else
1464 return;
1465 if (TREE_CODE (tmp) != SSA_NAME
1466 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1467 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1468 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1469 return;
1471 assign = SSA_NAME_DEF_STMT (tmp);
1472 assign_bb = gimple_bb (assign);
1473 if (!single_pred_p (assign_bb))
1474 return;
1475 expr = get_ancestor_addr_info (assign, &obj, &offset);
1476 if (!expr)
1477 return;
1478 parm = TREE_OPERAND (expr, 0);
1479 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1480 if (index < 0)
1481 return;
1483 cond_bb = single_pred (assign_bb);
1484 cond = last_stmt (cond_bb);
1485 if (!cond
1486 || gimple_code (cond) != GIMPLE_COND
1487 || gimple_cond_code (cond) != NE_EXPR
1488 || gimple_cond_lhs (cond) != parm
1489 || !integer_zerop (gimple_cond_rhs (cond)))
1490 return;
1492 phi_bb = gimple_bb (phi);
1493 for (i = 0; i < 2; i++)
1495 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1496 if (pred != assign_bb && pred != cond_bb)
1497 return;
1500 bool type_p = false;
1501 if (param_type && POINTER_TYPE_P (param_type)
1502 && contains_polymorphic_type_p (TREE_TYPE (param_type)))
1503 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1504 call, jfunc, offset);
1505 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1506 ipa_set_ancestor_jf (jfunc, offset, type_p ? TREE_TYPE (param_type) : NULL,
1507 index,
1508 parm_ref_data_pass_through_p (fbi, index, call, parm),
1509 type_p);
1512 /* Given OP which is passed as an actual argument to a called function,
1513 determine if it is possible to construct a KNOWN_TYPE jump function for it
1514 and if so, create one and store it to JFUNC.
1515 EXPECTED_TYPE represents a type the argument should be in */
1517 static void
1518 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1519 gimple call, tree expected_type)
1521 HOST_WIDE_INT offset, size, max_size;
1522 tree base;
1524 if (!flag_devirtualize
1525 || TREE_CODE (op) != ADDR_EXPR
1526 || !contains_polymorphic_type_p (TREE_TYPE (TREE_TYPE (op)))
1527 /* Be sure expected_type is polymorphic. */
1528 || !expected_type
1529 || !contains_polymorphic_type_p (expected_type))
1530 return;
1532 op = TREE_OPERAND (op, 0);
1533 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1534 if (!DECL_P (base)
1535 || max_size == -1
1536 || max_size != size
1537 || !contains_polymorphic_type_p (TREE_TYPE (base)))
1538 return;
1540 if (decl_maybe_in_construction_p (base, TREE_TYPE (base),
1541 call, current_function_decl)
1542 /* Even if the var seems to be in construction by inline call stack,
1543 we may work out the actual type by walking memory writes. */
1544 && (is_global_var (base)
1545 || detect_type_change (op, base, expected_type, call, jfunc, offset)))
1546 return;
1548 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1549 expected_type);
1552 /* Inspect the given TYPE and return true iff it has the same structure (the
1553 same number of fields of the same types) as a C++ member pointer. If
1554 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1555 corresponding fields there. */
1557 static bool
1558 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1560 tree fld;
1562 if (TREE_CODE (type) != RECORD_TYPE)
1563 return false;
1565 fld = TYPE_FIELDS (type);
1566 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1567 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1568 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1569 return false;
1571 if (method_ptr)
1572 *method_ptr = fld;
1574 fld = DECL_CHAIN (fld);
1575 if (!fld || INTEGRAL_TYPE_P (fld)
1576 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1577 return false;
1578 if (delta)
1579 *delta = fld;
1581 if (DECL_CHAIN (fld))
1582 return false;
1584 return true;
1587 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1588 return the rhs of its defining statement. Otherwise return RHS as it
1589 is. */
1591 static inline tree
1592 get_ssa_def_if_simple_copy (tree rhs)
1594 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1596 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1598 if (gimple_assign_single_p (def_stmt))
1599 rhs = gimple_assign_rhs1 (def_stmt);
1600 else
1601 break;
1603 return rhs;
1606 /* Simple linked list, describing known contents of an aggregate beforere
1607 call. */
1609 struct ipa_known_agg_contents_list
1611 /* Offset and size of the described part of the aggregate. */
1612 HOST_WIDE_INT offset, size;
1613 /* Known constant value or NULL if the contents is known to be unknown. */
1614 tree constant;
1615 /* Pointer to the next structure in the list. */
1616 struct ipa_known_agg_contents_list *next;
1619 /* Find the proper place in linked list of ipa_known_agg_contents_list
1620 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1621 unless there is a partial overlap, in which case return NULL, or such
1622 element is already there, in which case set *ALREADY_THERE to true. */
1624 static struct ipa_known_agg_contents_list **
1625 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1626 HOST_WIDE_INT lhs_offset,
1627 HOST_WIDE_INT lhs_size,
1628 bool *already_there)
1630 struct ipa_known_agg_contents_list **p = list;
1631 while (*p && (*p)->offset < lhs_offset)
1633 if ((*p)->offset + (*p)->size > lhs_offset)
1634 return NULL;
1635 p = &(*p)->next;
1638 if (*p && (*p)->offset < lhs_offset + lhs_size)
1640 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1641 /* We already know this value is subsequently overwritten with
1642 something else. */
1643 *already_there = true;
1644 else
1645 /* Otherwise this is a partial overlap which we cannot
1646 represent. */
1647 return NULL;
1649 return p;
1652 /* Build aggregate jump function from LIST, assuming there are exactly
1653 CONST_COUNT constant entries there and that th offset of the passed argument
1654 is ARG_OFFSET and store it into JFUNC. */
1656 static void
1657 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1658 int const_count, HOST_WIDE_INT arg_offset,
1659 struct ipa_jump_func *jfunc)
1661 vec_alloc (jfunc->agg.items, const_count);
1662 while (list)
1664 if (list->constant)
1666 struct ipa_agg_jf_item item;
1667 item.offset = list->offset - arg_offset;
1668 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1669 item.value = unshare_expr_without_location (list->constant);
1670 jfunc->agg.items->quick_push (item);
1672 list = list->next;
1676 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1677 in ARG is filled in with constant values. ARG can either be an aggregate
1678 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1679 aggregate. JFUNC is the jump function into which the constants are
1680 subsequently stored. */
1682 static void
1683 determine_locally_known_aggregate_parts (gimple call, tree arg, tree arg_type,
1684 struct ipa_jump_func *jfunc)
1686 struct ipa_known_agg_contents_list *list = NULL;
1687 int item_count = 0, const_count = 0;
1688 HOST_WIDE_INT arg_offset, arg_size;
1689 gimple_stmt_iterator gsi;
1690 tree arg_base;
1691 bool check_ref, by_ref;
1692 ao_ref r;
1694 /* The function operates in three stages. First, we prepare check_ref, r,
1695 arg_base and arg_offset based on what is actually passed as an actual
1696 argument. */
1698 if (POINTER_TYPE_P (arg_type))
1700 by_ref = true;
1701 if (TREE_CODE (arg) == SSA_NAME)
1703 tree type_size;
1704 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1705 return;
1706 check_ref = true;
1707 arg_base = arg;
1708 arg_offset = 0;
1709 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1710 arg_size = tree_to_uhwi (type_size);
1711 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1713 else if (TREE_CODE (arg) == ADDR_EXPR)
1715 HOST_WIDE_INT arg_max_size;
1717 arg = TREE_OPERAND (arg, 0);
1718 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1719 &arg_max_size);
1720 if (arg_max_size == -1
1721 || arg_max_size != arg_size
1722 || arg_offset < 0)
1723 return;
1724 if (DECL_P (arg_base))
1726 check_ref = false;
1727 ao_ref_init (&r, arg_base);
1729 else
1730 return;
1732 else
1733 return;
1735 else
1737 HOST_WIDE_INT arg_max_size;
1739 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1741 by_ref = false;
1742 check_ref = false;
1743 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1744 &arg_max_size);
1745 if (arg_max_size == -1
1746 || arg_max_size != arg_size
1747 || arg_offset < 0)
1748 return;
1750 ao_ref_init (&r, arg);
1753 /* Second stage walks back the BB, looks at individual statements and as long
1754 as it is confident of how the statements affect contents of the
1755 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1756 describing it. */
1757 gsi = gsi_for_stmt (call);
1758 gsi_prev (&gsi);
1759 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1761 struct ipa_known_agg_contents_list *n, **p;
1762 gimple stmt = gsi_stmt (gsi);
1763 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1764 tree lhs, rhs, lhs_base;
1766 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1767 continue;
1768 if (!gimple_assign_single_p (stmt))
1769 break;
1771 lhs = gimple_assign_lhs (stmt);
1772 rhs = gimple_assign_rhs1 (stmt);
1773 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1774 || TREE_CODE (lhs) == BIT_FIELD_REF
1775 || contains_bitfld_component_ref_p (lhs))
1776 break;
1778 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1779 &lhs_max_size);
1780 if (lhs_max_size == -1
1781 || lhs_max_size != lhs_size)
1782 break;
1784 if (check_ref)
1786 if (TREE_CODE (lhs_base) != MEM_REF
1787 || TREE_OPERAND (lhs_base, 0) != arg_base
1788 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1789 break;
1791 else if (lhs_base != arg_base)
1793 if (DECL_P (lhs_base))
1794 continue;
1795 else
1796 break;
1799 bool already_there = false;
1800 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1801 &already_there);
1802 if (!p)
1803 break;
1804 if (already_there)
1805 continue;
1807 rhs = get_ssa_def_if_simple_copy (rhs);
1808 n = XALLOCA (struct ipa_known_agg_contents_list);
1809 n->size = lhs_size;
1810 n->offset = lhs_offset;
1811 if (is_gimple_ip_invariant (rhs))
1813 n->constant = rhs;
1814 const_count++;
1816 else
1817 n->constant = NULL_TREE;
1818 n->next = *p;
1819 *p = n;
1821 item_count++;
1822 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1823 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1824 break;
1827 /* Third stage just goes over the list and creates an appropriate vector of
1828 ipa_agg_jf_item structures out of it, of sourse only if there are
1829 any known constants to begin with. */
1831 if (const_count)
1833 jfunc->agg.by_ref = by_ref;
1834 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1838 static tree
1839 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1841 int n;
1842 tree type = (e->callee
1843 ? TREE_TYPE (e->callee->decl)
1844 : gimple_call_fntype (e->call_stmt));
1845 tree t = TYPE_ARG_TYPES (type);
1847 for (n = 0; n < i; n++)
1849 if (!t)
1850 break;
1851 t = TREE_CHAIN (t);
1853 if (t)
1854 return TREE_VALUE (t);
1855 if (!e->callee)
1856 return NULL;
1857 t = DECL_ARGUMENTS (e->callee->decl);
1858 for (n = 0; n < i; n++)
1860 if (!t)
1861 return NULL;
1862 t = TREE_CHAIN (t);
1864 if (t)
1865 return TREE_TYPE (t);
1866 return NULL;
1869 /* Compute jump function for all arguments of callsite CS and insert the
1870 information in the jump_functions array in the ipa_edge_args corresponding
1871 to this callsite. */
1873 static void
1874 ipa_compute_jump_functions_for_edge (struct func_body_info *fbi,
1875 struct cgraph_edge *cs)
1877 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1878 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1879 gimple call = cs->call_stmt;
1880 int n, arg_num = gimple_call_num_args (call);
1881 bool useful_context = false;
1883 if (arg_num == 0 || args->jump_functions)
1884 return;
1885 vec_safe_grow_cleared (args->jump_functions, arg_num);
1886 if (flag_devirtualize)
1887 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1889 if (gimple_call_internal_p (call))
1890 return;
1891 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1892 return;
1894 for (n = 0; n < arg_num; n++)
1896 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1897 tree arg = gimple_call_arg (call, n);
1898 tree param_type = ipa_get_callee_param_type (cs, n);
1899 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1901 tree instance;
1902 struct ipa_polymorphic_call_context context (cs->caller->decl,
1903 arg, cs->call_stmt,
1904 &instance);
1905 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1906 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1907 if (!context.useless_p ())
1908 useful_context = true;
1911 if (is_gimple_ip_invariant (arg))
1912 ipa_set_jf_constant (jfunc, arg, cs);
1913 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1914 && TREE_CODE (arg) == PARM_DECL)
1916 int index = ipa_get_param_decl_index (info, arg);
1918 gcc_assert (index >=0);
1919 /* Aggregate passed by value, check for pass-through, otherwise we
1920 will attempt to fill in aggregate contents later in this
1921 for cycle. */
1922 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1924 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
1925 continue;
1928 else if (TREE_CODE (arg) == SSA_NAME)
1930 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1932 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1933 if (index >= 0)
1935 bool agg_p, type_p;
1936 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1937 if (param_type && POINTER_TYPE_P (param_type))
1938 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1939 call, jfunc);
1940 else
1941 type_p = false;
1942 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1943 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1944 type_p);
1947 else
1949 gimple stmt = SSA_NAME_DEF_STMT (arg);
1950 if (is_gimple_assign (stmt))
1951 compute_complex_assign_jump_func (fbi, info, jfunc,
1952 call, stmt, arg, param_type);
1953 else if (gimple_code (stmt) == GIMPLE_PHI)
1954 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1955 call, stmt, param_type);
1958 else
1959 compute_known_type_jump_func (arg, jfunc, call,
1960 param_type
1961 && POINTER_TYPE_P (param_type)
1962 ? TREE_TYPE (param_type)
1963 : NULL);
1965 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1966 passed (because type conversions are ignored in gimple). Usually we can
1967 safely get type from function declaration, but in case of K&R prototypes or
1968 variadic functions we can try our luck with type of the pointer passed.
1969 TODO: Since we look for actual initialization of the memory object, we may better
1970 work out the type based on the memory stores we find. */
1971 if (!param_type)
1972 param_type = TREE_TYPE (arg);
1974 if ((jfunc->type != IPA_JF_PASS_THROUGH
1975 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1976 && (jfunc->type != IPA_JF_ANCESTOR
1977 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1978 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1979 || POINTER_TYPE_P (param_type)))
1980 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1982 if (!useful_context)
1983 vec_free (args->polymorphic_call_contexts);
1986 /* Compute jump functions for all edges - both direct and indirect - outgoing
1987 from BB. */
1989 static void
1990 ipa_compute_jump_functions_for_bb (struct func_body_info *fbi, basic_block bb)
1992 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1993 int i;
1994 struct cgraph_edge *cs;
1996 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1998 struct cgraph_node *callee = cs->callee;
2000 if (callee)
2002 callee->ultimate_alias_target ();
2003 /* We do not need to bother analyzing calls to unknown functions
2004 unless they may become known during lto/whopr. */
2005 if (!callee->definition && !flag_lto)
2006 continue;
2008 ipa_compute_jump_functions_for_edge (fbi, cs);
2012 /* If STMT looks like a statement loading a value from a member pointer formal
2013 parameter, return that parameter and store the offset of the field to
2014 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2015 might be clobbered). If USE_DELTA, then we look for a use of the delta
2016 field rather than the pfn. */
2018 static tree
2019 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
2020 HOST_WIDE_INT *offset_p)
2022 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2024 if (!gimple_assign_single_p (stmt))
2025 return NULL_TREE;
2027 rhs = gimple_assign_rhs1 (stmt);
2028 if (TREE_CODE (rhs) == COMPONENT_REF)
2030 ref_field = TREE_OPERAND (rhs, 1);
2031 rhs = TREE_OPERAND (rhs, 0);
2033 else
2034 ref_field = NULL_TREE;
2035 if (TREE_CODE (rhs) != MEM_REF)
2036 return NULL_TREE;
2037 rec = TREE_OPERAND (rhs, 0);
2038 if (TREE_CODE (rec) != ADDR_EXPR)
2039 return NULL_TREE;
2040 rec = TREE_OPERAND (rec, 0);
2041 if (TREE_CODE (rec) != PARM_DECL
2042 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2043 return NULL_TREE;
2044 ref_offset = TREE_OPERAND (rhs, 1);
2046 if (use_delta)
2047 fld = delta_field;
2048 else
2049 fld = ptr_field;
2050 if (offset_p)
2051 *offset_p = int_bit_position (fld);
2053 if (ref_field)
2055 if (integer_nonzerop (ref_offset))
2056 return NULL_TREE;
2057 return ref_field == fld ? rec : NULL_TREE;
2059 else
2060 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2061 : NULL_TREE;
2064 /* Returns true iff T is an SSA_NAME defined by a statement. */
2066 static bool
2067 ipa_is_ssa_with_stmt_def (tree t)
2069 if (TREE_CODE (t) == SSA_NAME
2070 && !SSA_NAME_IS_DEFAULT_DEF (t))
2071 return true;
2072 else
2073 return false;
2076 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2077 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2078 indirect call graph edge. */
2080 static struct cgraph_edge *
2081 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
2083 struct cgraph_edge *cs;
2085 cs = node->get_edge (stmt);
2086 cs->indirect_info->param_index = param_index;
2087 cs->indirect_info->agg_contents = 0;
2088 cs->indirect_info->member_ptr = 0;
2089 return cs;
2092 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2093 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2094 intermediate information about each formal parameter. Currently it checks
2095 whether the call calls a pointer that is a formal parameter and if so, the
2096 parameter is marked with the called flag and an indirect call graph edge
2097 describing the call is created. This is very simple for ordinary pointers
2098 represented in SSA but not-so-nice when it comes to member pointers. The
2099 ugly part of this function does nothing more than trying to match the
2100 pattern of such a call. An example of such a pattern is the gimple dump
2101 below, the call is on the last line:
2103 <bb 2>:
2104 f$__delta_5 = f.__delta;
2105 f$__pfn_24 = f.__pfn;
2108 <bb 2>:
2109 f$__delta_5 = MEM[(struct *)&f];
2110 f$__pfn_24 = MEM[(struct *)&f + 4B];
2112 and a few lines below:
2114 <bb 5>
2115 D.2496_3 = (int) f$__pfn_24;
2116 D.2497_4 = D.2496_3 & 1;
2117 if (D.2497_4 != 0)
2118 goto <bb 3>;
2119 else
2120 goto <bb 4>;
2122 <bb 6>:
2123 D.2500_7 = (unsigned int) f$__delta_5;
2124 D.2501_8 = &S + D.2500_7;
2125 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2126 D.2503_10 = *D.2502_9;
2127 D.2504_12 = f$__pfn_24 + -1;
2128 D.2505_13 = (unsigned int) D.2504_12;
2129 D.2506_14 = D.2503_10 + D.2505_13;
2130 D.2507_15 = *D.2506_14;
2131 iftmp.11_16 = (String:: *) D.2507_15;
2133 <bb 7>:
2134 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2135 D.2500_19 = (unsigned int) f$__delta_5;
2136 D.2508_20 = &S + D.2500_19;
2137 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2139 Such patterns are results of simple calls to a member pointer:
2141 int doprinting (int (MyString::* f)(int) const)
2143 MyString S ("somestring");
2145 return (S.*f)(4);
2148 Moreover, the function also looks for called pointers loaded from aggregates
2149 passed by value or reference. */
2151 static void
2152 ipa_analyze_indirect_call_uses (struct func_body_info *fbi, gimple call,
2153 tree target)
2155 struct ipa_node_params *info = fbi->info;
2156 HOST_WIDE_INT offset;
2157 bool by_ref;
2159 if (SSA_NAME_IS_DEFAULT_DEF (target))
2161 tree var = SSA_NAME_VAR (target);
2162 int index = ipa_get_param_decl_index (info, var);
2163 if (index >= 0)
2164 ipa_note_param_call (fbi->node, index, call);
2165 return;
2168 int index;
2169 gimple def = SSA_NAME_DEF_STMT (target);
2170 if (gimple_assign_single_p (def)
2171 && ipa_load_from_parm_agg_1 (fbi, info->descriptors, def,
2172 gimple_assign_rhs1 (def), &index, &offset,
2173 NULL, &by_ref))
2175 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2176 cs->indirect_info->offset = offset;
2177 cs->indirect_info->agg_contents = 1;
2178 cs->indirect_info->by_ref = by_ref;
2179 return;
2182 /* Now we need to try to match the complex pattern of calling a member
2183 pointer. */
2184 if (gimple_code (def) != GIMPLE_PHI
2185 || gimple_phi_num_args (def) != 2
2186 || !POINTER_TYPE_P (TREE_TYPE (target))
2187 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2188 return;
2190 /* First, we need to check whether one of these is a load from a member
2191 pointer that is a parameter to this function. */
2192 tree n1 = PHI_ARG_DEF (def, 0);
2193 tree n2 = PHI_ARG_DEF (def, 1);
2194 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2195 return;
2196 gimple d1 = SSA_NAME_DEF_STMT (n1);
2197 gimple d2 = SSA_NAME_DEF_STMT (n2);
2199 tree rec;
2200 basic_block bb, virt_bb;
2201 basic_block join = gimple_bb (def);
2202 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2204 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2205 return;
2207 bb = EDGE_PRED (join, 0)->src;
2208 virt_bb = gimple_bb (d2);
2210 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2212 bb = EDGE_PRED (join, 1)->src;
2213 virt_bb = gimple_bb (d1);
2215 else
2216 return;
2218 /* Second, we need to check that the basic blocks are laid out in the way
2219 corresponding to the pattern. */
2221 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2222 || single_pred (virt_bb) != bb
2223 || single_succ (virt_bb) != join)
2224 return;
2226 /* Third, let's see that the branching is done depending on the least
2227 significant bit of the pfn. */
2229 gimple branch = last_stmt (bb);
2230 if (!branch || gimple_code (branch) != GIMPLE_COND)
2231 return;
2233 if ((gimple_cond_code (branch) != NE_EXPR
2234 && gimple_cond_code (branch) != EQ_EXPR)
2235 || !integer_zerop (gimple_cond_rhs (branch)))
2236 return;
2238 tree cond = gimple_cond_lhs (branch);
2239 if (!ipa_is_ssa_with_stmt_def (cond))
2240 return;
2242 def = SSA_NAME_DEF_STMT (cond);
2243 if (!is_gimple_assign (def)
2244 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2245 || !integer_onep (gimple_assign_rhs2 (def)))
2246 return;
2248 cond = gimple_assign_rhs1 (def);
2249 if (!ipa_is_ssa_with_stmt_def (cond))
2250 return;
2252 def = SSA_NAME_DEF_STMT (cond);
2254 if (is_gimple_assign (def)
2255 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2257 cond = gimple_assign_rhs1 (def);
2258 if (!ipa_is_ssa_with_stmt_def (cond))
2259 return;
2260 def = SSA_NAME_DEF_STMT (cond);
2263 tree rec2;
2264 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2265 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2266 == ptrmemfunc_vbit_in_delta),
2267 NULL);
2268 if (rec != rec2)
2269 return;
2271 index = ipa_get_param_decl_index (info, rec);
2272 if (index >= 0
2273 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2275 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2276 cs->indirect_info->offset = offset;
2277 cs->indirect_info->agg_contents = 1;
2278 cs->indirect_info->member_ptr = 1;
2281 return;
2284 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2285 object referenced in the expression is a formal parameter of the caller
2286 FBI->node (described by FBI->info), create a call note for the
2287 statement. */
2289 static void
2290 ipa_analyze_virtual_call_uses (struct func_body_info *fbi,
2291 gimple call, tree target)
2293 tree obj = OBJ_TYPE_REF_OBJECT (target);
2294 int index;
2295 HOST_WIDE_INT anc_offset;
2297 if (!flag_devirtualize)
2298 return;
2300 if (TREE_CODE (obj) != SSA_NAME)
2301 return;
2303 struct ipa_node_params *info = fbi->info;
2304 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2306 struct ipa_jump_func jfunc;
2307 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2308 return;
2310 anc_offset = 0;
2311 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2312 gcc_assert (index >= 0);
2313 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2314 call, &jfunc))
2315 return;
2317 else
2319 struct ipa_jump_func jfunc;
2320 gimple stmt = SSA_NAME_DEF_STMT (obj);
2321 tree expr;
2323 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2324 if (!expr)
2325 return;
2326 index = ipa_get_param_decl_index (info,
2327 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2328 gcc_assert (index >= 0);
2329 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2330 call, &jfunc, anc_offset))
2331 return;
2334 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2335 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2336 ii->offset = anc_offset;
2337 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2338 ii->otr_type = obj_type_ref_class (target);
2339 ii->polymorphic = 1;
2342 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2343 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2344 containing intermediate information about each formal parameter. */
2346 static void
2347 ipa_analyze_call_uses (struct func_body_info *fbi, gimple call)
2349 tree target = gimple_call_fn (call);
2351 if (!target
2352 || (TREE_CODE (target) != SSA_NAME
2353 && !virtual_method_call_p (target)))
2354 return;
2356 struct cgraph_edge *cs = fbi->node->get_edge (call);
2357 /* If we previously turned the call into a direct call, there is
2358 no need to analyze. */
2359 if (cs && !cs->indirect_unknown_callee)
2360 return;
2362 if (cs->indirect_info->polymorphic)
2364 tree instance;
2365 tree target = gimple_call_fn (call);
2366 ipa_polymorphic_call_context context (current_function_decl,
2367 target, call, &instance);
2369 gcc_checking_assert (cs->indirect_info->otr_type
2370 == obj_type_ref_class (target));
2371 gcc_checking_assert (cs->indirect_info->otr_token
2372 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2374 context.get_dynamic_type (instance,
2375 OBJ_TYPE_REF_OBJECT (target),
2376 obj_type_ref_class (target), call);
2377 cs->indirect_info->context = context;
2380 if (TREE_CODE (target) == SSA_NAME)
2381 ipa_analyze_indirect_call_uses (fbi, call, target);
2382 else if (virtual_method_call_p (target))
2383 ipa_analyze_virtual_call_uses (fbi, call, target);
2387 /* Analyze the call statement STMT with respect to formal parameters (described
2388 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2389 formal parameters are called. */
2391 static void
2392 ipa_analyze_stmt_uses (struct func_body_info *fbi, gimple stmt)
2394 if (is_gimple_call (stmt))
2395 ipa_analyze_call_uses (fbi, stmt);
2398 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2399 If OP is a parameter declaration, mark it as used in the info structure
2400 passed in DATA. */
2402 static bool
2403 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2405 struct ipa_node_params *info = (struct ipa_node_params *) data;
2407 op = get_base_address (op);
2408 if (op
2409 && TREE_CODE (op) == PARM_DECL)
2411 int index = ipa_get_param_decl_index (info, op);
2412 gcc_assert (index >= 0);
2413 ipa_set_param_used (info, index, true);
2416 return false;
2419 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2420 the findings in various structures of the associated ipa_node_params
2421 structure, such as parameter flags, notes etc. FBI holds various data about
2422 the function being analyzed. */
2424 static void
2425 ipa_analyze_params_uses_in_bb (struct func_body_info *fbi, basic_block bb)
2427 gimple_stmt_iterator gsi;
2428 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2430 gimple stmt = gsi_stmt (gsi);
2432 if (is_gimple_debug (stmt))
2433 continue;
2435 ipa_analyze_stmt_uses (fbi, stmt);
2436 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2437 visit_ref_for_mod_analysis,
2438 visit_ref_for_mod_analysis,
2439 visit_ref_for_mod_analysis);
2441 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2442 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2443 visit_ref_for_mod_analysis,
2444 visit_ref_for_mod_analysis,
2445 visit_ref_for_mod_analysis);
2448 /* Calculate controlled uses of parameters of NODE. */
2450 static void
2451 ipa_analyze_controlled_uses (struct cgraph_node *node)
2453 struct ipa_node_params *info = IPA_NODE_REF (node);
2455 for (int i = 0; i < ipa_get_param_count (info); i++)
2457 tree parm = ipa_get_param (info, i);
2458 int controlled_uses = 0;
2460 /* For SSA regs see if parameter is used. For non-SSA we compute
2461 the flag during modification analysis. */
2462 if (is_gimple_reg (parm))
2464 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2465 parm);
2466 if (ddef && !has_zero_uses (ddef))
2468 imm_use_iterator imm_iter;
2469 use_operand_p use_p;
2471 ipa_set_param_used (info, i, true);
2472 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2473 if (!is_gimple_call (USE_STMT (use_p)))
2475 if (!is_gimple_debug (USE_STMT (use_p)))
2477 controlled_uses = IPA_UNDESCRIBED_USE;
2478 break;
2481 else
2482 controlled_uses++;
2484 else
2485 controlled_uses = 0;
2487 else
2488 controlled_uses = IPA_UNDESCRIBED_USE;
2489 ipa_set_controlled_uses (info, i, controlled_uses);
2493 /* Free stuff in BI. */
2495 static void
2496 free_ipa_bb_info (struct ipa_bb_info *bi)
2498 bi->cg_edges.release ();
2499 bi->param_aa_statuses.release ();
2502 /* Dominator walker driving the analysis. */
2504 class analysis_dom_walker : public dom_walker
2506 public:
2507 analysis_dom_walker (struct func_body_info *fbi)
2508 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2510 virtual void before_dom_children (basic_block);
2512 private:
2513 struct func_body_info *m_fbi;
2516 void
2517 analysis_dom_walker::before_dom_children (basic_block bb)
2519 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2520 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2523 /* Initialize the array describing properties of of formal parameters
2524 of NODE, analyze their uses and compute jump functions associated
2525 with actual arguments of calls from within NODE. */
2527 void
2528 ipa_analyze_node (struct cgraph_node *node)
2530 struct func_body_info fbi;
2531 struct ipa_node_params *info;
2533 ipa_check_create_node_params ();
2534 ipa_check_create_edge_args ();
2535 info = IPA_NODE_REF (node);
2537 if (info->analysis_done)
2538 return;
2539 info->analysis_done = 1;
2541 if (ipa_func_spec_opts_forbid_analysis_p (node))
2543 for (int i = 0; i < ipa_get_param_count (info); i++)
2545 ipa_set_param_used (info, i, true);
2546 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2548 return;
2551 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2552 push_cfun (func);
2553 calculate_dominance_info (CDI_DOMINATORS);
2554 ipa_initialize_node_params (node);
2555 ipa_analyze_controlled_uses (node);
2557 fbi.node = node;
2558 fbi.info = IPA_NODE_REF (node);
2559 fbi.bb_infos = vNULL;
2560 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2561 fbi.param_count = ipa_get_param_count (info);
2562 fbi.aa_walked = 0;
2564 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2566 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2567 bi->cg_edges.safe_push (cs);
2570 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2572 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2573 bi->cg_edges.safe_push (cs);
2576 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2578 int i;
2579 struct ipa_bb_info *bi;
2580 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
2581 free_ipa_bb_info (bi);
2582 fbi.bb_infos.release ();
2583 free_dominance_info (CDI_DOMINATORS);
2584 pop_cfun ();
2587 /* Update the jump function DST when the call graph edge corresponding to SRC is
2588 is being inlined, knowing that DST is of type ancestor and src of known
2589 type. */
2591 static void
2592 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2593 struct ipa_jump_func *dst)
2595 HOST_WIDE_INT combined_offset;
2596 tree combined_type;
2598 if (!ipa_get_jf_ancestor_type_preserved (dst))
2600 dst->type = IPA_JF_UNKNOWN;
2601 return;
2604 combined_offset = ipa_get_jf_known_type_offset (src)
2605 + ipa_get_jf_ancestor_offset (dst);
2606 combined_type = ipa_get_jf_ancestor_type (dst);
2608 ipa_set_jf_known_type (dst, combined_offset,
2609 ipa_get_jf_known_type_base_type (src),
2610 combined_type);
2613 /* Update the jump functions associated with call graph edge E when the call
2614 graph edge CS is being inlined, assuming that E->caller is already (possibly
2615 indirectly) inlined into CS->callee and that E has not been inlined. */
2617 static void
2618 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2619 struct cgraph_edge *e)
2621 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2622 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2623 int count = ipa_get_cs_argument_count (args);
2624 int i;
2626 for (i = 0; i < count; i++)
2628 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2629 struct ipa_polymorphic_call_context *dst_ctx
2630 = ipa_get_ith_polymorhic_call_context (args, i);
2632 if (dst->type == IPA_JF_ANCESTOR)
2634 struct ipa_jump_func *src;
2635 int dst_fid = dst->value.ancestor.formal_id;
2636 struct ipa_polymorphic_call_context *src_ctx
2637 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2639 /* Variable number of arguments can cause havoc if we try to access
2640 one that does not exist in the inlined edge. So make sure we
2641 don't. */
2642 if (dst_fid >= ipa_get_cs_argument_count (top))
2644 dst->type = IPA_JF_UNKNOWN;
2645 continue;
2648 src = ipa_get_ith_jump_func (top, dst_fid);
2650 if (src_ctx && !src_ctx->useless_p ())
2652 struct ipa_polymorphic_call_context ctx = *src_ctx;
2654 /* TODO: Make type preserved safe WRT contexts. */
2655 if (!dst->value.ancestor.agg_preserved)
2656 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2657 ctx.offset_by (dst->value.ancestor.offset);
2658 if (!ctx.useless_p ())
2660 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2661 count);
2662 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2666 if (src->agg.items
2667 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2669 struct ipa_agg_jf_item *item;
2670 int j;
2672 /* Currently we do not produce clobber aggregate jump functions,
2673 replace with merging when we do. */
2674 gcc_assert (!dst->agg.items);
2676 dst->agg.items = vec_safe_copy (src->agg.items);
2677 dst->agg.by_ref = src->agg.by_ref;
2678 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2679 item->offset -= dst->value.ancestor.offset;
2682 if (src->type == IPA_JF_KNOWN_TYPE)
2683 combine_known_type_and_ancestor_jfs (src, dst);
2684 else if (src->type == IPA_JF_PASS_THROUGH
2685 && src->value.pass_through.operation == NOP_EXPR)
2687 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2688 dst->value.ancestor.agg_preserved &=
2689 src->value.pass_through.agg_preserved;
2690 dst->value.ancestor.type_preserved &=
2691 src->value.pass_through.type_preserved;
2693 else if (src->type == IPA_JF_ANCESTOR)
2695 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2696 dst->value.ancestor.offset += src->value.ancestor.offset;
2697 dst->value.ancestor.agg_preserved &=
2698 src->value.ancestor.agg_preserved;
2699 dst->value.ancestor.type_preserved &=
2700 src->value.ancestor.type_preserved;
2702 else
2703 dst->type = IPA_JF_UNKNOWN;
2705 else if (dst->type == IPA_JF_PASS_THROUGH)
2707 struct ipa_jump_func *src;
2708 /* We must check range due to calls with variable number of arguments
2709 and we cannot combine jump functions with operations. */
2710 if (dst->value.pass_through.operation == NOP_EXPR
2711 && (dst->value.pass_through.formal_id
2712 < ipa_get_cs_argument_count (top)))
2714 int dst_fid = dst->value.pass_through.formal_id;
2715 src = ipa_get_ith_jump_func (top, dst_fid);
2716 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2717 struct ipa_polymorphic_call_context *src_ctx
2718 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2720 if (src_ctx && !src_ctx->useless_p ())
2722 struct ipa_polymorphic_call_context ctx = *src_ctx;
2724 /* TODO: Make type preserved safe WRT contexts. */
2725 if (!dst->value.ancestor.agg_preserved)
2726 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2727 if (!ctx.useless_p ())
2729 if (!dst_ctx)
2731 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2732 count);
2733 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2735 dst_ctx->combine_with (ctx);
2738 switch (src->type)
2740 case IPA_JF_UNKNOWN:
2741 dst->type = IPA_JF_UNKNOWN;
2742 break;
2743 case IPA_JF_KNOWN_TYPE:
2744 if (ipa_get_jf_pass_through_type_preserved (dst))
2745 ipa_set_jf_known_type (dst,
2746 ipa_get_jf_known_type_offset (src),
2747 ipa_get_jf_known_type_base_type (src),
2748 ipa_get_jf_known_type_component_type (src));
2749 else
2750 dst->type = IPA_JF_UNKNOWN;
2751 break;
2752 case IPA_JF_CONST:
2753 ipa_set_jf_cst_copy (dst, src);
2754 break;
2756 case IPA_JF_PASS_THROUGH:
2758 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2759 enum tree_code operation;
2760 operation = ipa_get_jf_pass_through_operation (src);
2762 if (operation == NOP_EXPR)
2764 bool agg_p, type_p;
2765 agg_p = dst_agg_p
2766 && ipa_get_jf_pass_through_agg_preserved (src);
2767 type_p = ipa_get_jf_pass_through_type_preserved (src)
2768 && ipa_get_jf_pass_through_type_preserved (dst);
2769 ipa_set_jf_simple_pass_through (dst, formal_id,
2770 agg_p, type_p);
2772 else
2774 tree operand = ipa_get_jf_pass_through_operand (src);
2775 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2776 operation);
2778 break;
2780 case IPA_JF_ANCESTOR:
2782 bool agg_p, type_p;
2783 agg_p = dst_agg_p
2784 && ipa_get_jf_ancestor_agg_preserved (src);
2785 type_p = ipa_get_jf_ancestor_type_preserved (src)
2786 && ipa_get_jf_pass_through_type_preserved (dst);
2787 ipa_set_ancestor_jf (dst,
2788 ipa_get_jf_ancestor_offset (src),
2789 ipa_get_jf_ancestor_type (src),
2790 ipa_get_jf_ancestor_formal_id (src),
2791 agg_p, type_p);
2792 break;
2794 default:
2795 gcc_unreachable ();
2798 if (src->agg.items
2799 && (dst_agg_p || !src->agg.by_ref))
2801 /* Currently we do not produce clobber aggregate jump
2802 functions, replace with merging when we do. */
2803 gcc_assert (!dst->agg.items);
2805 dst->agg.by_ref = src->agg.by_ref;
2806 dst->agg.items = vec_safe_copy (src->agg.items);
2809 else
2810 dst->type = IPA_JF_UNKNOWN;
2815 /* If TARGET is an addr_expr of a function declaration, make it the
2816 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2817 Otherwise, return NULL. */
2819 struct cgraph_edge *
2820 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2821 bool speculative)
2823 struct cgraph_node *callee;
2824 struct inline_edge_summary *es = inline_edge_summary (ie);
2825 bool unreachable = false;
2827 if (TREE_CODE (target) == ADDR_EXPR)
2828 target = TREE_OPERAND (target, 0);
2829 if (TREE_CODE (target) != FUNCTION_DECL)
2831 target = canonicalize_constructor_val (target, NULL);
2832 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2834 if (ie->indirect_info->member_ptr)
2835 /* Member pointer call that goes through a VMT lookup. */
2836 return NULL;
2838 if (dump_enabled_p ())
2840 location_t loc = gimple_location_safe (ie->call_stmt);
2841 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2842 "discovered direct call to non-function in %s/%i, "
2843 "making it __builtin_unreachable\n",
2844 ie->caller->name (), ie->caller->order);
2847 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2848 callee = cgraph_node::get_create (target);
2849 unreachable = true;
2851 else
2852 callee = cgraph_node::get (target);
2854 else
2855 callee = cgraph_node::get (target);
2857 /* Because may-edges are not explicitely represented and vtable may be external,
2858 we may create the first reference to the object in the unit. */
2859 if (!callee || callee->global.inlined_to)
2862 /* We are better to ensure we can refer to it.
2863 In the case of static functions we are out of luck, since we already
2864 removed its body. In the case of public functions we may or may
2865 not introduce the reference. */
2866 if (!canonicalize_constructor_val (target, NULL)
2867 || !TREE_PUBLIC (target))
2869 if (dump_file)
2870 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2871 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2872 xstrdup (ie->caller->name ()),
2873 ie->caller->order,
2874 xstrdup (ie->callee->name ()),
2875 ie->callee->order);
2876 return NULL;
2878 callee = cgraph_node::get_create (target);
2881 /* If the edge is already speculated. */
2882 if (speculative && ie->speculative)
2884 struct cgraph_edge *e2;
2885 struct ipa_ref *ref;
2886 ie->speculative_call_info (e2, ie, ref);
2887 if (e2->callee->ultimate_alias_target ()
2888 != callee->ultimate_alias_target ())
2890 if (dump_file)
2891 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2892 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2893 xstrdup (ie->caller->name ()),
2894 ie->caller->order,
2895 xstrdup (callee->name ()),
2896 callee->order,
2897 xstrdup (e2->callee->name ()),
2898 e2->callee->order);
2900 else
2902 if (dump_file)
2903 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2904 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2905 xstrdup (ie->caller->name ()),
2906 ie->caller->order,
2907 xstrdup (callee->name ()),
2908 callee->order);
2910 return NULL;
2913 if (!dbg_cnt (devirt))
2914 return NULL;
2916 ipa_check_create_node_params ();
2918 /* We can not make edges to inline clones. It is bug that someone removed
2919 the cgraph node too early. */
2920 gcc_assert (!callee->global.inlined_to);
2922 if (dump_file && !unreachable)
2924 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2925 "(%s/%i -> %s/%i), for stmt ",
2926 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2927 speculative ? "speculative" : "known",
2928 xstrdup (ie->caller->name ()),
2929 ie->caller->order,
2930 xstrdup (callee->name ()),
2931 callee->order);
2932 if (ie->call_stmt)
2933 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2934 else
2935 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2937 if (dump_enabled_p ())
2939 location_t loc = gimple_location_safe (ie->call_stmt);
2941 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2942 "converting indirect call in %s to direct call to %s\n",
2943 ie->caller->name (), callee->name ());
2945 if (!speculative)
2946 ie = ie->make_direct (callee);
2947 else
2949 if (!callee->can_be_discarded_p ())
2951 cgraph_node *alias;
2952 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2953 if (alias)
2954 callee = alias;
2956 ie = ie->make_speculative
2957 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2959 es = inline_edge_summary (ie);
2960 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2961 - eni_size_weights.call_cost);
2962 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2963 - eni_time_weights.call_cost);
2965 return ie;
2968 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2969 return NULL if there is not any. BY_REF specifies whether the value has to
2970 be passed by reference or by value. */
2972 tree
2973 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2974 HOST_WIDE_INT offset, bool by_ref)
2976 struct ipa_agg_jf_item *item;
2977 int i;
2979 if (by_ref != agg->by_ref)
2980 return NULL;
2982 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2983 if (item->offset == offset)
2985 /* Currently we do not have clobber values, return NULL for them once
2986 we do. */
2987 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2988 return item->value;
2990 return NULL;
2993 /* Remove a reference to SYMBOL from the list of references of a node given by
2994 reference description RDESC. Return true if the reference has been
2995 successfully found and removed. */
2997 static bool
2998 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3000 struct ipa_ref *to_del;
3001 struct cgraph_edge *origin;
3003 origin = rdesc->cs;
3004 if (!origin)
3005 return false;
3006 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3007 origin->lto_stmt_uid);
3008 if (!to_del)
3009 return false;
3011 to_del->remove_reference ();
3012 if (dump_file)
3013 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
3014 xstrdup (origin->caller->name ()),
3015 origin->caller->order, xstrdup (symbol->name ()));
3016 return true;
3019 /* If JFUNC has a reference description with refcount different from
3020 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3021 NULL. JFUNC must be a constant jump function. */
3023 static struct ipa_cst_ref_desc *
3024 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3026 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3027 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3028 return rdesc;
3029 else
3030 return NULL;
3033 /* If the value of constant jump function JFUNC is an address of a function
3034 declaration, return the associated call graph node. Otherwise return
3035 NULL. */
3037 static cgraph_node *
3038 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3040 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3041 tree cst = ipa_get_jf_constant (jfunc);
3042 if (TREE_CODE (cst) != ADDR_EXPR
3043 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3044 return NULL;
3046 return cgraph_node::get (TREE_OPERAND (cst, 0));
3050 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3051 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3052 the edge specified in the rdesc. Return false if either the symbol or the
3053 reference could not be found, otherwise return true. */
3055 static bool
3056 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3058 struct ipa_cst_ref_desc *rdesc;
3059 if (jfunc->type == IPA_JF_CONST
3060 && (rdesc = jfunc_rdesc_usable (jfunc))
3061 && --rdesc->refcount == 0)
3063 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3064 if (!symbol)
3065 return false;
3067 return remove_described_reference (symbol, rdesc);
3069 return true;
3072 /* Try to find a destination for indirect edge IE that corresponds to a simple
3073 call or a call of a member function pointer and where the destination is a
3074 pointer formal parameter described by jump function JFUNC. If it can be
3075 determined, return the newly direct edge, otherwise return NULL.
3076 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3078 static struct cgraph_edge *
3079 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3080 struct ipa_jump_func *jfunc,
3081 struct ipa_node_params *new_root_info)
3083 struct cgraph_edge *cs;
3084 tree target;
3085 bool agg_contents = ie->indirect_info->agg_contents;
3087 if (ie->indirect_info->agg_contents)
3088 target = ipa_find_agg_cst_for_param (&jfunc->agg,
3089 ie->indirect_info->offset,
3090 ie->indirect_info->by_ref);
3091 else
3092 target = ipa_value_from_jfunc (new_root_info, jfunc);
3093 if (!target)
3094 return NULL;
3095 cs = ipa_make_edge_direct_to_target (ie, target);
3097 if (cs && !agg_contents)
3099 bool ok;
3100 gcc_checking_assert (cs->callee
3101 && (cs != ie
3102 || jfunc->type != IPA_JF_CONST
3103 || !cgraph_node_for_jfunc (jfunc)
3104 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3105 ok = try_decrement_rdesc_refcount (jfunc);
3106 gcc_checking_assert (ok);
3109 return cs;
3112 /* Return the target to be used in cases of impossible devirtualization. IE
3113 and target (the latter can be NULL) are dumped when dumping is enabled. */
3115 tree
3116 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3118 if (dump_file)
3120 if (target)
3121 fprintf (dump_file,
3122 "Type inconsistent devirtualization: %s/%i->%s\n",
3123 ie->caller->name (), ie->caller->order,
3124 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3125 else
3126 fprintf (dump_file,
3127 "No devirtualization target in %s/%i\n",
3128 ie->caller->name (), ie->caller->order);
3130 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3131 cgraph_node::get_create (new_target);
3132 return new_target;
3135 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3136 call based on a formal parameter which is described by jump function JFUNC
3137 and if it can be determined, make it direct and return the direct edge.
3138 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
3139 are relative to. */
3141 static struct cgraph_edge *
3142 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3143 struct ipa_jump_func *jfunc,
3144 struct ipa_node_params *new_root_info,
3145 struct ipa_polymorphic_call_context *ctx_ptr)
3147 tree binfo, target = NULL;
3148 bool speculative = false;
3149 bool updated = false;
3151 if (!flag_devirtualize)
3152 return NULL;
3154 /* If this is call of a function parameter, restrict its type
3155 based on knowlede of the context. */
3156 if (ctx_ptr && !ie->indirect_info->by_ref)
3158 struct ipa_polymorphic_call_context ctx = *ctx_ptr;
3160 ctx.offset_by (ie->indirect_info->offset);
3162 if (ie->indirect_info->vptr_changed)
3163 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3164 ie->indirect_info->otr_type);
3166 updated = ie->indirect_info->context.combine_with
3167 (ctx, ie->indirect_info->otr_type);
3170 /* Try to do lookup via known virtual table pointer value. */
3171 if (!ie->indirect_info->by_ref
3172 && (!ie->indirect_info->vptr_changed || flag_devirtualize_speculatively))
3174 tree vtable;
3175 unsigned HOST_WIDE_INT offset;
3176 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
3177 ie->indirect_info->offset,
3178 true);
3179 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3181 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3182 vtable, offset);
3183 if (t)
3185 if ((TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
3186 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
3187 || !possible_polymorphic_call_target_p
3188 (ie, cgraph_node::get (t)))
3190 /* Do not speculate builtin_unreachable, it is stpid! */
3191 if (!ie->indirect_info->vptr_changed)
3192 target = ipa_impossible_devirt_target (ie, target);
3194 else
3196 target = t;
3197 speculative = ie->indirect_info->vptr_changed;
3203 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
3205 if (binfo && TREE_CODE (binfo) != TREE_BINFO)
3207 struct ipa_polymorphic_call_context ctx (binfo,
3208 ie->indirect_info->otr_type,
3209 ie->indirect_info->offset);
3210 updated |= ie->indirect_info->context.combine_with
3211 (ctx, ie->indirect_info->otr_type);
3214 if (updated)
3216 ipa_polymorphic_call_context context (ie);
3217 vec <cgraph_node *>targets;
3218 bool final;
3220 targets = possible_polymorphic_call_targets
3221 (ie->indirect_info->otr_type,
3222 ie->indirect_info->otr_token,
3223 context, &final);
3224 if (final && targets.length () <= 1)
3226 if (targets.length () == 1)
3227 target = targets[0]->decl;
3228 else
3229 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3231 else if (!target && flag_devirtualize_speculatively
3232 && !ie->speculative && ie->maybe_hot_p ())
3234 cgraph_node *n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3235 ie->indirect_info->otr_token,
3236 ie->indirect_info->context);
3237 if (n)
3239 target = n->decl;
3240 speculative = true;
3245 if (binfo && TREE_CODE (binfo) == TREE_BINFO)
3247 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
3248 ie->indirect_info->otr_type);
3249 if (binfo)
3251 tree t = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
3252 binfo);
3253 if (t)
3255 gcc_assert (!target || speculative || target == t);
3256 target = t;
3257 speculative = false;
3262 if (target)
3264 if (!possible_polymorphic_call_target_p (ie, cgraph_node::get_create (target)))
3266 if (!speculative)
3267 return NULL;
3268 target = ipa_impossible_devirt_target (ie, target);
3270 return ipa_make_edge_direct_to_target (ie, target, speculative);
3272 else
3273 return NULL;
3276 /* Update the param called notes associated with NODE when CS is being inlined,
3277 assuming NODE is (potentially indirectly) inlined into CS->callee.
3278 Moreover, if the callee is discovered to be constant, create a new cgraph
3279 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3280 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3282 static bool
3283 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3284 struct cgraph_node *node,
3285 vec<cgraph_edge *> *new_edges)
3287 struct ipa_edge_args *top;
3288 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3289 struct ipa_node_params *new_root_info;
3290 bool res = false;
3292 ipa_check_create_edge_args ();
3293 top = IPA_EDGE_REF (cs);
3294 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3295 ? cs->caller->global.inlined_to
3296 : cs->caller);
3298 for (ie = node->indirect_calls; ie; ie = next_ie)
3300 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3301 struct ipa_jump_func *jfunc;
3302 int param_index;
3304 next_ie = ie->next_callee;
3306 if (ici->param_index == -1)
3307 continue;
3309 /* We must check range due to calls with variable number of arguments: */
3310 if (ici->param_index >= ipa_get_cs_argument_count (top))
3312 ici->param_index = -1;
3313 continue;
3316 param_index = ici->param_index;
3317 jfunc = ipa_get_ith_jump_func (top, param_index);
3319 if (!flag_indirect_inlining)
3320 new_direct_edge = NULL;
3321 else if (ici->polymorphic)
3323 ipa_polymorphic_call_context *ctx;
3324 ctx = ipa_get_ith_polymorhic_call_context (top, param_index);
3325 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
3326 new_root_info,
3327 ctx);
3329 else
3330 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3331 new_root_info);
3332 /* If speculation was removed, then we need to do nothing. */
3333 if (new_direct_edge && new_direct_edge != ie)
3335 new_direct_edge->indirect_inlining_edge = 1;
3336 top = IPA_EDGE_REF (cs);
3337 res = true;
3339 else if (new_direct_edge)
3341 new_direct_edge->indirect_inlining_edge = 1;
3342 if (new_direct_edge->call_stmt)
3343 new_direct_edge->call_stmt_cannot_inline_p
3344 = !gimple_check_call_matching_types (
3345 new_direct_edge->call_stmt,
3346 new_direct_edge->callee->decl, false);
3347 if (new_edges)
3349 new_edges->safe_push (new_direct_edge);
3350 res = true;
3352 top = IPA_EDGE_REF (cs);
3354 else if (jfunc->type == IPA_JF_PASS_THROUGH
3355 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3357 if ((ici->agg_contents
3358 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
3359 || (ici->polymorphic
3360 && !ipa_get_jf_pass_through_type_preserved (jfunc)))
3361 ici->param_index = -1;
3362 else
3363 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3365 else if (jfunc->type == IPA_JF_ANCESTOR)
3367 if ((ici->agg_contents
3368 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
3369 || (ici->polymorphic
3370 && !ipa_get_jf_ancestor_type_preserved (jfunc)))
3371 ici->param_index = -1;
3372 else
3374 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3375 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3378 else
3379 /* Either we can find a destination for this edge now or never. */
3380 ici->param_index = -1;
3383 return res;
3386 /* Recursively traverse subtree of NODE (including node) made of inlined
3387 cgraph_edges when CS has been inlined and invoke
3388 update_indirect_edges_after_inlining on all nodes and
3389 update_jump_functions_after_inlining on all non-inlined edges that lead out
3390 of this subtree. Newly discovered indirect edges will be added to
3391 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3392 created. */
3394 static bool
3395 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3396 struct cgraph_node *node,
3397 vec<cgraph_edge *> *new_edges)
3399 struct cgraph_edge *e;
3400 bool res;
3402 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3404 for (e = node->callees; e; e = e->next_callee)
3405 if (!e->inline_failed)
3406 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3407 else
3408 update_jump_functions_after_inlining (cs, e);
3409 for (e = node->indirect_calls; e; e = e->next_callee)
3410 update_jump_functions_after_inlining (cs, e);
3412 return res;
3415 /* Combine two controlled uses counts as done during inlining. */
3417 static int
3418 combine_controlled_uses_counters (int c, int d)
3420 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3421 return IPA_UNDESCRIBED_USE;
3422 else
3423 return c + d - 1;
3426 /* Propagate number of controlled users from CS->caleee to the new root of the
3427 tree of inlined nodes. */
3429 static void
3430 propagate_controlled_uses (struct cgraph_edge *cs)
3432 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3433 struct cgraph_node *new_root = cs->caller->global.inlined_to
3434 ? cs->caller->global.inlined_to : cs->caller;
3435 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3436 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3437 int count, i;
3439 count = MIN (ipa_get_cs_argument_count (args),
3440 ipa_get_param_count (old_root_info));
3441 for (i = 0; i < count; i++)
3443 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3444 struct ipa_cst_ref_desc *rdesc;
3446 if (jf->type == IPA_JF_PASS_THROUGH)
3448 int src_idx, c, d;
3449 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3450 c = ipa_get_controlled_uses (new_root_info, src_idx);
3451 d = ipa_get_controlled_uses (old_root_info, i);
3453 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3454 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3455 c = combine_controlled_uses_counters (c, d);
3456 ipa_set_controlled_uses (new_root_info, src_idx, c);
3457 if (c == 0 && new_root_info->ipcp_orig_node)
3459 struct cgraph_node *n;
3460 struct ipa_ref *ref;
3461 tree t = new_root_info->known_vals[src_idx];
3463 if (t && TREE_CODE (t) == ADDR_EXPR
3464 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3465 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3466 && (ref = new_root->find_reference (n, NULL, 0)))
3468 if (dump_file)
3469 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3470 "reference from %s/%i to %s/%i.\n",
3471 xstrdup (new_root->name ()),
3472 new_root->order,
3473 xstrdup (n->name ()), n->order);
3474 ref->remove_reference ();
3478 else if (jf->type == IPA_JF_CONST
3479 && (rdesc = jfunc_rdesc_usable (jf)))
3481 int d = ipa_get_controlled_uses (old_root_info, i);
3482 int c = rdesc->refcount;
3483 rdesc->refcount = combine_controlled_uses_counters (c, d);
3484 if (rdesc->refcount == 0)
3486 tree cst = ipa_get_jf_constant (jf);
3487 struct cgraph_node *n;
3488 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3489 && TREE_CODE (TREE_OPERAND (cst, 0))
3490 == FUNCTION_DECL);
3491 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3492 if (n)
3494 struct cgraph_node *clone;
3495 bool ok;
3496 ok = remove_described_reference (n, rdesc);
3497 gcc_checking_assert (ok);
3499 clone = cs->caller;
3500 while (clone->global.inlined_to
3501 && clone != rdesc->cs->caller
3502 && IPA_NODE_REF (clone)->ipcp_orig_node)
3504 struct ipa_ref *ref;
3505 ref = clone->find_reference (n, NULL, 0);
3506 if (ref)
3508 if (dump_file)
3509 fprintf (dump_file, "ipa-prop: Removing "
3510 "cloning-created reference "
3511 "from %s/%i to %s/%i.\n",
3512 xstrdup (clone->name ()),
3513 clone->order,
3514 xstrdup (n->name ()),
3515 n->order);
3516 ref->remove_reference ();
3518 clone = clone->callers->caller;
3525 for (i = ipa_get_param_count (old_root_info);
3526 i < ipa_get_cs_argument_count (args);
3527 i++)
3529 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3531 if (jf->type == IPA_JF_CONST)
3533 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3534 if (rdesc)
3535 rdesc->refcount = IPA_UNDESCRIBED_USE;
3537 else if (jf->type == IPA_JF_PASS_THROUGH)
3538 ipa_set_controlled_uses (new_root_info,
3539 jf->value.pass_through.formal_id,
3540 IPA_UNDESCRIBED_USE);
3544 /* Update jump functions and call note functions on inlining the call site CS.
3545 CS is expected to lead to a node already cloned by
3546 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3547 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3548 created. */
3550 bool
3551 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3552 vec<cgraph_edge *> *new_edges)
3554 bool changed;
3555 /* Do nothing if the preparation phase has not been carried out yet
3556 (i.e. during early inlining). */
3557 if (!ipa_node_params_vector.exists ())
3558 return false;
3559 gcc_assert (ipa_edge_args_vector);
3561 propagate_controlled_uses (cs);
3562 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3564 return changed;
3567 /* Frees all dynamically allocated structures that the argument info points
3568 to. */
3570 void
3571 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3573 vec_free (args->jump_functions);
3574 memset (args, 0, sizeof (*args));
3577 /* Free all ipa_edge structures. */
3579 void
3580 ipa_free_all_edge_args (void)
3582 int i;
3583 struct ipa_edge_args *args;
3585 if (!ipa_edge_args_vector)
3586 return;
3588 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3589 ipa_free_edge_args_substructures (args);
3591 vec_free (ipa_edge_args_vector);
3594 /* Frees all dynamically allocated structures that the param info points
3595 to. */
3597 void
3598 ipa_free_node_params_substructures (struct ipa_node_params *info)
3600 info->descriptors.release ();
3601 free (info->lattices);
3602 /* Lattice values and their sources are deallocated with their alocation
3603 pool. */
3604 info->known_vals.release ();
3605 memset (info, 0, sizeof (*info));
3608 /* Free all ipa_node_params structures. */
3610 void
3611 ipa_free_all_node_params (void)
3613 int i;
3614 struct ipa_node_params *info;
3616 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
3617 ipa_free_node_params_substructures (info);
3619 ipa_node_params_vector.release ();
3622 /* Set the aggregate replacements of NODE to be AGGVALS. */
3624 void
3625 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3626 struct ipa_agg_replacement_value *aggvals)
3628 if (vec_safe_length (ipa_node_agg_replacements)
3629 <= (unsigned) symtab->cgraph_max_uid)
3630 vec_safe_grow_cleared (ipa_node_agg_replacements,
3631 symtab->cgraph_max_uid + 1);
3633 (*ipa_node_agg_replacements)[node->uid] = aggvals;
3636 /* Hook that is called by cgraph.c when an edge is removed. */
3638 static void
3639 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3641 struct ipa_edge_args *args;
3643 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3644 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3645 return;
3647 args = IPA_EDGE_REF (cs);
3648 if (args->jump_functions)
3650 struct ipa_jump_func *jf;
3651 int i;
3652 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3654 struct ipa_cst_ref_desc *rdesc;
3655 try_decrement_rdesc_refcount (jf);
3656 if (jf->type == IPA_JF_CONST
3657 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3658 && rdesc->cs == cs)
3659 rdesc->cs = NULL;
3663 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3666 /* Hook that is called by cgraph.c when a node is removed. */
3668 static void
3669 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3671 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3672 if (ipa_node_params_vector.length () > (unsigned)node->uid)
3673 ipa_free_node_params_substructures (IPA_NODE_REF (node));
3674 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3675 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
3678 /* Hook that is called by cgraph.c when an edge is duplicated. */
3680 static void
3681 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3682 __attribute__((unused)) void *data)
3684 struct ipa_edge_args *old_args, *new_args;
3685 unsigned int i;
3687 ipa_check_create_edge_args ();
3689 old_args = IPA_EDGE_REF (src);
3690 new_args = IPA_EDGE_REF (dst);
3692 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3693 if (old_args->polymorphic_call_contexts)
3694 new_args->polymorphic_call_contexts
3695 = vec_safe_copy (old_args->polymorphic_call_contexts);
3697 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3699 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3700 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3702 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3704 if (src_jf->type == IPA_JF_CONST)
3706 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3708 if (!src_rdesc)
3709 dst_jf->value.constant.rdesc = NULL;
3710 else if (src->caller == dst->caller)
3712 struct ipa_ref *ref;
3713 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3714 gcc_checking_assert (n);
3715 ref = src->caller->find_reference (n, src->call_stmt,
3716 src->lto_stmt_uid);
3717 gcc_checking_assert (ref);
3718 dst->caller->clone_reference (ref, ref->stmt);
3720 gcc_checking_assert (ipa_refdesc_pool);
3721 struct ipa_cst_ref_desc *dst_rdesc
3722 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3723 dst_rdesc->cs = dst;
3724 dst_rdesc->refcount = src_rdesc->refcount;
3725 dst_rdesc->next_duplicate = NULL;
3726 dst_jf->value.constant.rdesc = dst_rdesc;
3728 else if (src_rdesc->cs == src)
3730 struct ipa_cst_ref_desc *dst_rdesc;
3731 gcc_checking_assert (ipa_refdesc_pool);
3732 dst_rdesc
3733 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3734 dst_rdesc->cs = dst;
3735 dst_rdesc->refcount = src_rdesc->refcount;
3736 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3737 src_rdesc->next_duplicate = dst_rdesc;
3738 dst_jf->value.constant.rdesc = dst_rdesc;
3740 else
3742 struct ipa_cst_ref_desc *dst_rdesc;
3743 /* This can happen during inlining, when a JFUNC can refer to a
3744 reference taken in a function up in the tree of inline clones.
3745 We need to find the duplicate that refers to our tree of
3746 inline clones. */
3748 gcc_assert (dst->caller->global.inlined_to);
3749 for (dst_rdesc = src_rdesc->next_duplicate;
3750 dst_rdesc;
3751 dst_rdesc = dst_rdesc->next_duplicate)
3753 struct cgraph_node *top;
3754 top = dst_rdesc->cs->caller->global.inlined_to
3755 ? dst_rdesc->cs->caller->global.inlined_to
3756 : dst_rdesc->cs->caller;
3757 if (dst->caller->global.inlined_to == top)
3758 break;
3760 gcc_assert (dst_rdesc);
3761 dst_jf->value.constant.rdesc = dst_rdesc;
3764 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3765 && src->caller == dst->caller)
3767 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3768 ? dst->caller->global.inlined_to : dst->caller;
3769 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3770 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3772 int c = ipa_get_controlled_uses (root_info, idx);
3773 if (c != IPA_UNDESCRIBED_USE)
3775 c++;
3776 ipa_set_controlled_uses (root_info, idx, c);
3782 /* Hook that is called by cgraph.c when a node is duplicated. */
3784 static void
3785 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3786 ATTRIBUTE_UNUSED void *data)
3788 struct ipa_node_params *old_info, *new_info;
3789 struct ipa_agg_replacement_value *old_av, *new_av;
3791 ipa_check_create_node_params ();
3792 old_info = IPA_NODE_REF (src);
3793 new_info = IPA_NODE_REF (dst);
3795 new_info->descriptors = old_info->descriptors.copy ();
3796 new_info->lattices = NULL;
3797 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3799 new_info->analysis_done = old_info->analysis_done;
3800 new_info->node_enqueued = old_info->node_enqueued;
3802 old_av = ipa_get_agg_replacements_for_node (src);
3803 if (!old_av)
3804 return;
3806 new_av = NULL;
3807 while (old_av)
3809 struct ipa_agg_replacement_value *v;
3811 v = ggc_alloc<ipa_agg_replacement_value> ();
3812 memcpy (v, old_av, sizeof (*v));
3813 v->next = new_av;
3814 new_av = v;
3815 old_av = old_av->next;
3817 ipa_set_node_agg_value_chain (dst, new_av);
3821 /* Analyze newly added function into callgraph. */
3823 static void
3824 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3826 if (node->has_gimple_body_p ())
3827 ipa_analyze_node (node);
3830 /* Register our cgraph hooks if they are not already there. */
3832 void
3833 ipa_register_cgraph_hooks (void)
3835 if (!edge_removal_hook_holder)
3836 edge_removal_hook_holder =
3837 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3838 if (!node_removal_hook_holder)
3839 node_removal_hook_holder =
3840 symtab->add_cgraph_removal_hook (&ipa_node_removal_hook, NULL);
3841 if (!edge_duplication_hook_holder)
3842 edge_duplication_hook_holder =
3843 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3844 if (!node_duplication_hook_holder)
3845 node_duplication_hook_holder =
3846 symtab->add_cgraph_duplication_hook (&ipa_node_duplication_hook, NULL);
3847 function_insertion_hook_holder =
3848 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3851 /* Unregister our cgraph hooks if they are not already there. */
3853 static void
3854 ipa_unregister_cgraph_hooks (void)
3856 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
3857 edge_removal_hook_holder = NULL;
3858 symtab->remove_cgraph_removal_hook (node_removal_hook_holder);
3859 node_removal_hook_holder = NULL;
3860 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
3861 edge_duplication_hook_holder = NULL;
3862 symtab->remove_cgraph_duplication_hook (node_duplication_hook_holder);
3863 node_duplication_hook_holder = NULL;
3864 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3865 function_insertion_hook_holder = NULL;
3868 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3869 longer needed after ipa-cp. */
3871 void
3872 ipa_free_all_structures_after_ipa_cp (void)
3874 if (!optimize)
3876 ipa_free_all_edge_args ();
3877 ipa_free_all_node_params ();
3878 free_alloc_pool (ipcp_sources_pool);
3879 free_alloc_pool (ipcp_values_pool);
3880 free_alloc_pool (ipcp_agg_lattice_pool);
3881 ipa_unregister_cgraph_hooks ();
3882 if (ipa_refdesc_pool)
3883 free_alloc_pool (ipa_refdesc_pool);
3887 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3888 longer needed after indirect inlining. */
3890 void
3891 ipa_free_all_structures_after_iinln (void)
3893 ipa_free_all_edge_args ();
3894 ipa_free_all_node_params ();
3895 ipa_unregister_cgraph_hooks ();
3896 if (ipcp_sources_pool)
3897 free_alloc_pool (ipcp_sources_pool);
3898 if (ipcp_values_pool)
3899 free_alloc_pool (ipcp_values_pool);
3900 if (ipcp_agg_lattice_pool)
3901 free_alloc_pool (ipcp_agg_lattice_pool);
3902 if (ipa_refdesc_pool)
3903 free_alloc_pool (ipa_refdesc_pool);
3906 /* Print ipa_tree_map data structures of all functions in the
3907 callgraph to F. */
3909 void
3910 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3912 int i, count;
3913 struct ipa_node_params *info;
3915 if (!node->definition)
3916 return;
3917 info = IPA_NODE_REF (node);
3918 fprintf (f, " function %s/%i parameter descriptors:\n",
3919 node->name (), node->order);
3920 count = ipa_get_param_count (info);
3921 for (i = 0; i < count; i++)
3923 int c;
3925 fprintf (f, " ");
3926 ipa_dump_param (f, info, i);
3927 if (ipa_is_param_used (info, i))
3928 fprintf (f, " used");
3929 c = ipa_get_controlled_uses (info, i);
3930 if (c == IPA_UNDESCRIBED_USE)
3931 fprintf (f, " undescribed_use");
3932 else
3933 fprintf (f, " controlled_uses=%i", c);
3934 fprintf (f, "\n");
3938 /* Print ipa_tree_map data structures of all functions in the
3939 callgraph to F. */
3941 void
3942 ipa_print_all_params (FILE * f)
3944 struct cgraph_node *node;
3946 fprintf (f, "\nFunction parameters:\n");
3947 FOR_EACH_FUNCTION (node)
3948 ipa_print_node_params (f, node);
3951 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3953 vec<tree>
3954 ipa_get_vector_of_formal_parms (tree fndecl)
3956 vec<tree> args;
3957 int count;
3958 tree parm;
3960 gcc_assert (!flag_wpa);
3961 count = count_formal_params (fndecl);
3962 args.create (count);
3963 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3964 args.quick_push (parm);
3966 return args;
3969 /* Return a heap allocated vector containing types of formal parameters of
3970 function type FNTYPE. */
3972 vec<tree>
3973 ipa_get_vector_of_formal_parm_types (tree fntype)
3975 vec<tree> types;
3976 int count = 0;
3977 tree t;
3979 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3980 count++;
3982 types.create (count);
3983 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3984 types.quick_push (TREE_VALUE (t));
3986 return types;
3989 /* Modify the function declaration FNDECL and its type according to the plan in
3990 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3991 to reflect the actual parameters being modified which are determined by the
3992 base_index field. */
3994 void
3995 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3997 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3998 tree orig_type = TREE_TYPE (fndecl);
3999 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
4001 /* The following test is an ugly hack, some functions simply don't have any
4002 arguments in their type. This is probably a bug but well... */
4003 bool care_for_types = (old_arg_types != NULL_TREE);
4004 bool last_parm_void;
4005 vec<tree> otypes;
4006 if (care_for_types)
4008 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
4009 == void_type_node);
4010 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
4011 if (last_parm_void)
4012 gcc_assert (oparms.length () + 1 == otypes.length ());
4013 else
4014 gcc_assert (oparms.length () == otypes.length ());
4016 else
4018 last_parm_void = false;
4019 otypes.create (0);
4022 int len = adjustments.length ();
4023 tree *link = &DECL_ARGUMENTS (fndecl);
4024 tree new_arg_types = NULL;
4025 for (int i = 0; i < len; i++)
4027 struct ipa_parm_adjustment *adj;
4028 gcc_assert (link);
4030 adj = &adjustments[i];
4031 tree parm;
4032 if (adj->op == IPA_PARM_OP_NEW)
4033 parm = NULL;
4034 else
4035 parm = oparms[adj->base_index];
4036 adj->base = parm;
4038 if (adj->op == IPA_PARM_OP_COPY)
4040 if (care_for_types)
4041 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
4042 new_arg_types);
4043 *link = parm;
4044 link = &DECL_CHAIN (parm);
4046 else if (adj->op != IPA_PARM_OP_REMOVE)
4048 tree new_parm;
4049 tree ptype;
4051 if (adj->by_ref)
4052 ptype = build_pointer_type (adj->type);
4053 else
4055 ptype = adj->type;
4056 if (is_gimple_reg_type (ptype))
4058 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
4059 if (TYPE_ALIGN (ptype) < malign)
4060 ptype = build_aligned_type (ptype, malign);
4064 if (care_for_types)
4065 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
4067 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
4068 ptype);
4069 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
4070 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
4071 DECL_ARTIFICIAL (new_parm) = 1;
4072 DECL_ARG_TYPE (new_parm) = ptype;
4073 DECL_CONTEXT (new_parm) = fndecl;
4074 TREE_USED (new_parm) = 1;
4075 DECL_IGNORED_P (new_parm) = 1;
4076 layout_decl (new_parm, 0);
4078 if (adj->op == IPA_PARM_OP_NEW)
4079 adj->base = NULL;
4080 else
4081 adj->base = parm;
4082 adj->new_decl = new_parm;
4084 *link = new_parm;
4085 link = &DECL_CHAIN (new_parm);
4089 *link = NULL_TREE;
4091 tree new_reversed = NULL;
4092 if (care_for_types)
4094 new_reversed = nreverse (new_arg_types);
4095 if (last_parm_void)
4097 if (new_reversed)
4098 TREE_CHAIN (new_arg_types) = void_list_node;
4099 else
4100 new_reversed = void_list_node;
4104 /* Use copy_node to preserve as much as possible from original type
4105 (debug info, attribute lists etc.)
4106 Exception is METHOD_TYPEs must have THIS argument.
4107 When we are asked to remove it, we need to build new FUNCTION_TYPE
4108 instead. */
4109 tree new_type = NULL;
4110 if (TREE_CODE (orig_type) != METHOD_TYPE
4111 || (adjustments[0].op == IPA_PARM_OP_COPY
4112 && adjustments[0].base_index == 0))
4114 new_type = build_distinct_type_copy (orig_type);
4115 TYPE_ARG_TYPES (new_type) = new_reversed;
4117 else
4119 new_type
4120 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
4121 new_reversed));
4122 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
4123 DECL_VINDEX (fndecl) = NULL_TREE;
4126 /* When signature changes, we need to clear builtin info. */
4127 if (DECL_BUILT_IN (fndecl))
4129 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
4130 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
4133 TREE_TYPE (fndecl) = new_type;
4134 DECL_VIRTUAL_P (fndecl) = 0;
4135 DECL_LANG_SPECIFIC (fndecl) = NULL;
4136 otypes.release ();
4137 oparms.release ();
4140 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
4141 If this is a directly recursive call, CS must be NULL. Otherwise it must
4142 contain the corresponding call graph edge. */
4144 void
4145 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
4146 ipa_parm_adjustment_vec adjustments)
4148 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
4149 vec<tree> vargs;
4150 vec<tree, va_gc> **debug_args = NULL;
4151 gimple new_stmt;
4152 gimple_stmt_iterator gsi, prev_gsi;
4153 tree callee_decl;
4154 int i, len;
4156 len = adjustments.length ();
4157 vargs.create (len);
4158 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
4159 current_node->remove_stmt_references (stmt);
4161 gsi = gsi_for_stmt (stmt);
4162 prev_gsi = gsi;
4163 gsi_prev (&prev_gsi);
4164 for (i = 0; i < len; i++)
4166 struct ipa_parm_adjustment *adj;
4168 adj = &adjustments[i];
4170 if (adj->op == IPA_PARM_OP_COPY)
4172 tree arg = gimple_call_arg (stmt, adj->base_index);
4174 vargs.quick_push (arg);
4176 else if (adj->op != IPA_PARM_OP_REMOVE)
4178 tree expr, base, off;
4179 location_t loc;
4180 unsigned int deref_align = 0;
4181 bool deref_base = false;
4183 /* We create a new parameter out of the value of the old one, we can
4184 do the following kind of transformations:
4186 - A scalar passed by reference is converted to a scalar passed by
4187 value. (adj->by_ref is false and the type of the original
4188 actual argument is a pointer to a scalar).
4190 - A part of an aggregate is passed instead of the whole aggregate.
4191 The part can be passed either by value or by reference, this is
4192 determined by value of adj->by_ref. Moreover, the code below
4193 handles both situations when the original aggregate is passed by
4194 value (its type is not a pointer) and when it is passed by
4195 reference (it is a pointer to an aggregate).
4197 When the new argument is passed by reference (adj->by_ref is true)
4198 it must be a part of an aggregate and therefore we form it by
4199 simply taking the address of a reference inside the original
4200 aggregate. */
4202 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
4203 base = gimple_call_arg (stmt, adj->base_index);
4204 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
4205 : EXPR_LOCATION (base);
4207 if (TREE_CODE (base) != ADDR_EXPR
4208 && POINTER_TYPE_P (TREE_TYPE (base)))
4209 off = build_int_cst (adj->alias_ptr_type,
4210 adj->offset / BITS_PER_UNIT);
4211 else
4213 HOST_WIDE_INT base_offset;
4214 tree prev_base;
4215 bool addrof;
4217 if (TREE_CODE (base) == ADDR_EXPR)
4219 base = TREE_OPERAND (base, 0);
4220 addrof = true;
4222 else
4223 addrof = false;
4224 prev_base = base;
4225 base = get_addr_base_and_unit_offset (base, &base_offset);
4226 /* Aggregate arguments can have non-invariant addresses. */
4227 if (!base)
4229 base = build_fold_addr_expr (prev_base);
4230 off = build_int_cst (adj->alias_ptr_type,
4231 adj->offset / BITS_PER_UNIT);
4233 else if (TREE_CODE (base) == MEM_REF)
4235 if (!addrof)
4237 deref_base = true;
4238 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4240 off = build_int_cst (adj->alias_ptr_type,
4241 base_offset
4242 + adj->offset / BITS_PER_UNIT);
4243 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
4244 off);
4245 base = TREE_OPERAND (base, 0);
4247 else
4249 off = build_int_cst (adj->alias_ptr_type,
4250 base_offset
4251 + adj->offset / BITS_PER_UNIT);
4252 base = build_fold_addr_expr (base);
4256 if (!adj->by_ref)
4258 tree type = adj->type;
4259 unsigned int align;
4260 unsigned HOST_WIDE_INT misalign;
4262 if (deref_base)
4264 align = deref_align;
4265 misalign = 0;
4267 else
4269 get_pointer_alignment_1 (base, &align, &misalign);
4270 if (TYPE_ALIGN (type) > align)
4271 align = TYPE_ALIGN (type);
4273 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
4274 * BITS_PER_UNIT);
4275 misalign = misalign & (align - 1);
4276 if (misalign != 0)
4277 align = (misalign & -misalign);
4278 if (align < TYPE_ALIGN (type))
4279 type = build_aligned_type (type, align);
4280 base = force_gimple_operand_gsi (&gsi, base,
4281 true, NULL, true, GSI_SAME_STMT);
4282 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4283 /* If expr is not a valid gimple call argument emit
4284 a load into a temporary. */
4285 if (is_gimple_reg_type (TREE_TYPE (expr)))
4287 gimple tem = gimple_build_assign (NULL_TREE, expr);
4288 if (gimple_in_ssa_p (cfun))
4290 gimple_set_vuse (tem, gimple_vuse (stmt));
4291 expr = make_ssa_name (TREE_TYPE (expr), tem);
4293 else
4294 expr = create_tmp_reg (TREE_TYPE (expr), NULL);
4295 gimple_assign_set_lhs (tem, expr);
4296 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4299 else
4301 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4302 expr = build_fold_addr_expr (expr);
4303 expr = force_gimple_operand_gsi (&gsi, expr,
4304 true, NULL, true, GSI_SAME_STMT);
4306 vargs.quick_push (expr);
4308 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4310 unsigned int ix;
4311 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4312 gimple def_temp;
4314 arg = gimple_call_arg (stmt, adj->base_index);
4315 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4317 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4318 continue;
4319 arg = fold_convert_loc (gimple_location (stmt),
4320 TREE_TYPE (origin), arg);
4322 if (debug_args == NULL)
4323 debug_args = decl_debug_args_insert (callee_decl);
4324 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4325 if (ddecl == origin)
4327 ddecl = (**debug_args)[ix + 1];
4328 break;
4330 if (ddecl == NULL)
4332 ddecl = make_node (DEBUG_EXPR_DECL);
4333 DECL_ARTIFICIAL (ddecl) = 1;
4334 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4335 DECL_MODE (ddecl) = DECL_MODE (origin);
4337 vec_safe_push (*debug_args, origin);
4338 vec_safe_push (*debug_args, ddecl);
4340 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4341 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4345 if (dump_file && (dump_flags & TDF_DETAILS))
4347 fprintf (dump_file, "replacing stmt:");
4348 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4351 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4352 vargs.release ();
4353 if (gimple_call_lhs (stmt))
4354 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4356 gimple_set_block (new_stmt, gimple_block (stmt));
4357 if (gimple_has_location (stmt))
4358 gimple_set_location (new_stmt, gimple_location (stmt));
4359 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4360 gimple_call_copy_flags (new_stmt, stmt);
4361 if (gimple_in_ssa_p (cfun))
4363 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4364 if (gimple_vdef (stmt))
4366 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4367 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4371 if (dump_file && (dump_flags & TDF_DETAILS))
4373 fprintf (dump_file, "with stmt:");
4374 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4375 fprintf (dump_file, "\n");
4377 gsi_replace (&gsi, new_stmt, true);
4378 if (cs)
4379 cs->set_call_stmt (new_stmt);
4382 current_node->record_stmt_references (gsi_stmt (gsi));
4383 gsi_prev (&gsi);
4385 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4388 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4389 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4390 specifies whether the function should care about type incompatibility the
4391 current and new expressions. If it is false, the function will leave
4392 incompatibility issues to the caller. Return true iff the expression
4393 was modified. */
4395 bool
4396 ipa_modify_expr (tree *expr, bool convert,
4397 ipa_parm_adjustment_vec adjustments)
4399 struct ipa_parm_adjustment *cand
4400 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4401 if (!cand)
4402 return false;
4404 tree src;
4405 if (cand->by_ref)
4406 src = build_simple_mem_ref (cand->new_decl);
4407 else
4408 src = cand->new_decl;
4410 if (dump_file && (dump_flags & TDF_DETAILS))
4412 fprintf (dump_file, "About to replace expr ");
4413 print_generic_expr (dump_file, *expr, 0);
4414 fprintf (dump_file, " with ");
4415 print_generic_expr (dump_file, src, 0);
4416 fprintf (dump_file, "\n");
4419 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4421 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4422 *expr = vce;
4424 else
4425 *expr = src;
4426 return true;
4429 /* If T is an SSA_NAME, return NULL if it is not a default def or
4430 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4431 the base variable is always returned, regardless if it is a default
4432 def. Return T if it is not an SSA_NAME. */
4434 static tree
4435 get_ssa_base_param (tree t, bool ignore_default_def)
4437 if (TREE_CODE (t) == SSA_NAME)
4439 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4440 return SSA_NAME_VAR (t);
4441 else
4442 return NULL_TREE;
4444 return t;
4447 /* Given an expression, return an adjustment entry specifying the
4448 transformation to be done on EXPR. If no suitable adjustment entry
4449 was found, returns NULL.
4451 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4452 default def, otherwise bail on them.
4454 If CONVERT is non-NULL, this function will set *CONVERT if the
4455 expression provided is a component reference. ADJUSTMENTS is the
4456 adjustments vector. */
4458 ipa_parm_adjustment *
4459 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4460 ipa_parm_adjustment_vec adjustments,
4461 bool ignore_default_def)
4463 if (TREE_CODE (**expr) == BIT_FIELD_REF
4464 || TREE_CODE (**expr) == IMAGPART_EXPR
4465 || TREE_CODE (**expr) == REALPART_EXPR)
4467 *expr = &TREE_OPERAND (**expr, 0);
4468 if (convert)
4469 *convert = true;
4472 HOST_WIDE_INT offset, size, max_size;
4473 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
4474 if (!base || size == -1 || max_size == -1)
4475 return NULL;
4477 if (TREE_CODE (base) == MEM_REF)
4479 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4480 base = TREE_OPERAND (base, 0);
4483 base = get_ssa_base_param (base, ignore_default_def);
4484 if (!base || TREE_CODE (base) != PARM_DECL)
4485 return NULL;
4487 struct ipa_parm_adjustment *cand = NULL;
4488 unsigned int len = adjustments.length ();
4489 for (unsigned i = 0; i < len; i++)
4491 struct ipa_parm_adjustment *adj = &adjustments[i];
4493 if (adj->base == base
4494 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4496 cand = adj;
4497 break;
4501 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4502 return NULL;
4503 return cand;
4506 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4508 static bool
4509 index_in_adjustments_multiple_times_p (int base_index,
4510 ipa_parm_adjustment_vec adjustments)
4512 int i, len = adjustments.length ();
4513 bool one = false;
4515 for (i = 0; i < len; i++)
4517 struct ipa_parm_adjustment *adj;
4518 adj = &adjustments[i];
4520 if (adj->base_index == base_index)
4522 if (one)
4523 return true;
4524 else
4525 one = true;
4528 return false;
4532 /* Return adjustments that should have the same effect on function parameters
4533 and call arguments as if they were first changed according to adjustments in
4534 INNER and then by adjustments in OUTER. */
4536 ipa_parm_adjustment_vec
4537 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4538 ipa_parm_adjustment_vec outer)
4540 int i, outlen = outer.length ();
4541 int inlen = inner.length ();
4542 int removals = 0;
4543 ipa_parm_adjustment_vec adjustments, tmp;
4545 tmp.create (inlen);
4546 for (i = 0; i < inlen; i++)
4548 struct ipa_parm_adjustment *n;
4549 n = &inner[i];
4551 if (n->op == IPA_PARM_OP_REMOVE)
4552 removals++;
4553 else
4555 /* FIXME: Handling of new arguments are not implemented yet. */
4556 gcc_assert (n->op != IPA_PARM_OP_NEW);
4557 tmp.quick_push (*n);
4561 adjustments.create (outlen + removals);
4562 for (i = 0; i < outlen; i++)
4564 struct ipa_parm_adjustment r;
4565 struct ipa_parm_adjustment *out = &outer[i];
4566 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4568 memset (&r, 0, sizeof (r));
4569 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4570 if (out->op == IPA_PARM_OP_REMOVE)
4572 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4574 r.op = IPA_PARM_OP_REMOVE;
4575 adjustments.quick_push (r);
4577 continue;
4579 else
4581 /* FIXME: Handling of new arguments are not implemented yet. */
4582 gcc_assert (out->op != IPA_PARM_OP_NEW);
4585 r.base_index = in->base_index;
4586 r.type = out->type;
4588 /* FIXME: Create nonlocal value too. */
4590 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4591 r.op = IPA_PARM_OP_COPY;
4592 else if (in->op == IPA_PARM_OP_COPY)
4593 r.offset = out->offset;
4594 else if (out->op == IPA_PARM_OP_COPY)
4595 r.offset = in->offset;
4596 else
4597 r.offset = in->offset + out->offset;
4598 adjustments.quick_push (r);
4601 for (i = 0; i < inlen; i++)
4603 struct ipa_parm_adjustment *n = &inner[i];
4605 if (n->op == IPA_PARM_OP_REMOVE)
4606 adjustments.quick_push (*n);
4609 tmp.release ();
4610 return adjustments;
4613 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4614 friendly way, assuming they are meant to be applied to FNDECL. */
4616 void
4617 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4618 tree fndecl)
4620 int i, len = adjustments.length ();
4621 bool first = true;
4622 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4624 fprintf (file, "IPA param adjustments: ");
4625 for (i = 0; i < len; i++)
4627 struct ipa_parm_adjustment *adj;
4628 adj = &adjustments[i];
4630 if (!first)
4631 fprintf (file, " ");
4632 else
4633 first = false;
4635 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4636 print_generic_expr (file, parms[adj->base_index], 0);
4637 if (adj->base)
4639 fprintf (file, ", base: ");
4640 print_generic_expr (file, adj->base, 0);
4642 if (adj->new_decl)
4644 fprintf (file, ", new_decl: ");
4645 print_generic_expr (file, adj->new_decl, 0);
4647 if (adj->new_ssa_base)
4649 fprintf (file, ", new_ssa_base: ");
4650 print_generic_expr (file, adj->new_ssa_base, 0);
4653 if (adj->op == IPA_PARM_OP_COPY)
4654 fprintf (file, ", copy_param");
4655 else if (adj->op == IPA_PARM_OP_REMOVE)
4656 fprintf (file, ", remove_param");
4657 else
4658 fprintf (file, ", offset %li", (long) adj->offset);
4659 if (adj->by_ref)
4660 fprintf (file, ", by_ref");
4661 print_node_brief (file, ", type: ", adj->type, 0);
4662 fprintf (file, "\n");
4664 parms.release ();
4667 /* Dump the AV linked list. */
4669 void
4670 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4672 bool comma = false;
4673 fprintf (f, " Aggregate replacements:");
4674 for (; av; av = av->next)
4676 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4677 av->index, av->offset);
4678 print_generic_expr (f, av->value, 0);
4679 comma = true;
4681 fprintf (f, "\n");
4684 /* Stream out jump function JUMP_FUNC to OB. */
4686 static void
4687 ipa_write_jump_function (struct output_block *ob,
4688 struct ipa_jump_func *jump_func)
4690 struct ipa_agg_jf_item *item;
4691 struct bitpack_d bp;
4692 int i, count;
4694 streamer_write_uhwi (ob, jump_func->type);
4695 switch (jump_func->type)
4697 case IPA_JF_UNKNOWN:
4698 break;
4699 case IPA_JF_KNOWN_TYPE:
4700 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
4701 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
4702 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
4703 break;
4704 case IPA_JF_CONST:
4705 gcc_assert (
4706 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4707 stream_write_tree (ob, jump_func->value.constant.value, true);
4708 break;
4709 case IPA_JF_PASS_THROUGH:
4710 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4711 if (jump_func->value.pass_through.operation == NOP_EXPR)
4713 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4714 bp = bitpack_create (ob->main_stream);
4715 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4716 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
4717 streamer_write_bitpack (&bp);
4719 else
4721 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4722 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4724 break;
4725 case IPA_JF_ANCESTOR:
4726 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4727 stream_write_tree (ob, jump_func->value.ancestor.type, true);
4728 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4729 bp = bitpack_create (ob->main_stream);
4730 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4731 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
4732 streamer_write_bitpack (&bp);
4733 break;
4736 count = vec_safe_length (jump_func->agg.items);
4737 streamer_write_uhwi (ob, count);
4738 if (count)
4740 bp = bitpack_create (ob->main_stream);
4741 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4742 streamer_write_bitpack (&bp);
4745 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4747 streamer_write_uhwi (ob, item->offset);
4748 stream_write_tree (ob, item->value, true);
4752 /* Read in jump function JUMP_FUNC from IB. */
4754 static void
4755 ipa_read_jump_function (struct lto_input_block *ib,
4756 struct ipa_jump_func *jump_func,
4757 struct cgraph_edge *cs,
4758 struct data_in *data_in)
4760 enum jump_func_type jftype;
4761 enum tree_code operation;
4762 int i, count;
4764 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4765 switch (jftype)
4767 case IPA_JF_UNKNOWN:
4768 jump_func->type = IPA_JF_UNKNOWN;
4769 break;
4770 case IPA_JF_KNOWN_TYPE:
4772 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4773 tree base_type = stream_read_tree (ib, data_in);
4774 tree component_type = stream_read_tree (ib, data_in);
4776 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
4777 break;
4779 case IPA_JF_CONST:
4780 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4781 break;
4782 case IPA_JF_PASS_THROUGH:
4783 operation = (enum tree_code) streamer_read_uhwi (ib);
4784 if (operation == NOP_EXPR)
4786 int formal_id = streamer_read_uhwi (ib);
4787 struct bitpack_d bp = streamer_read_bitpack (ib);
4788 bool agg_preserved = bp_unpack_value (&bp, 1);
4789 bool type_preserved = bp_unpack_value (&bp, 1);
4790 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4791 type_preserved);
4793 else
4795 tree operand = stream_read_tree (ib, data_in);
4796 int formal_id = streamer_read_uhwi (ib);
4797 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4798 operation);
4800 break;
4801 case IPA_JF_ANCESTOR:
4803 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4804 tree type = stream_read_tree (ib, data_in);
4805 int formal_id = streamer_read_uhwi (ib);
4806 struct bitpack_d bp = streamer_read_bitpack (ib);
4807 bool agg_preserved = bp_unpack_value (&bp, 1);
4808 bool type_preserved = bp_unpack_value (&bp, 1);
4810 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4811 type_preserved);
4812 break;
4816 count = streamer_read_uhwi (ib);
4817 vec_alloc (jump_func->agg.items, count);
4818 if (count)
4820 struct bitpack_d bp = streamer_read_bitpack (ib);
4821 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4823 for (i = 0; i < count; i++)
4825 struct ipa_agg_jf_item item;
4826 item.offset = streamer_read_uhwi (ib);
4827 item.value = stream_read_tree (ib, data_in);
4828 jump_func->agg.items->quick_push (item);
4832 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4833 relevant to indirect inlining to OB. */
4835 static void
4836 ipa_write_indirect_edge_info (struct output_block *ob,
4837 struct cgraph_edge *cs)
4839 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4840 struct bitpack_d bp;
4842 streamer_write_hwi (ob, ii->param_index);
4843 bp = bitpack_create (ob->main_stream);
4844 bp_pack_value (&bp, ii->polymorphic, 1);
4845 bp_pack_value (&bp, ii->agg_contents, 1);
4846 bp_pack_value (&bp, ii->member_ptr, 1);
4847 bp_pack_value (&bp, ii->by_ref, 1);
4848 bp_pack_value (&bp, ii->vptr_changed, 1);
4849 streamer_write_bitpack (&bp);
4850 if (ii->agg_contents || ii->polymorphic)
4851 streamer_write_hwi (ob, ii->offset);
4852 else
4853 gcc_assert (ii->offset == 0);
4855 if (ii->polymorphic)
4857 streamer_write_hwi (ob, ii->otr_token);
4858 stream_write_tree (ob, ii->otr_type, true);
4859 ii->context.stream_out (ob);
4863 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4864 relevant to indirect inlining from IB. */
4866 static void
4867 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4868 struct data_in *data_in,
4869 struct cgraph_edge *cs)
4871 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4872 struct bitpack_d bp;
4874 ii->param_index = (int) streamer_read_hwi (ib);
4875 bp = streamer_read_bitpack (ib);
4876 ii->polymorphic = bp_unpack_value (&bp, 1);
4877 ii->agg_contents = bp_unpack_value (&bp, 1);
4878 ii->member_ptr = bp_unpack_value (&bp, 1);
4879 ii->by_ref = bp_unpack_value (&bp, 1);
4880 ii->vptr_changed = bp_unpack_value (&bp, 1);
4881 if (ii->agg_contents || ii->polymorphic)
4882 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4883 else
4884 ii->offset = 0;
4885 if (ii->polymorphic)
4887 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4888 ii->otr_type = stream_read_tree (ib, data_in);
4889 ii->context.stream_in (ib, data_in);
4893 /* Stream out NODE info to OB. */
4895 static void
4896 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4898 int node_ref;
4899 lto_symtab_encoder_t encoder;
4900 struct ipa_node_params *info = IPA_NODE_REF (node);
4901 int j;
4902 struct cgraph_edge *e;
4903 struct bitpack_d bp;
4905 encoder = ob->decl_state->symtab_node_encoder;
4906 node_ref = lto_symtab_encoder_encode (encoder, node);
4907 streamer_write_uhwi (ob, node_ref);
4909 streamer_write_uhwi (ob, ipa_get_param_count (info));
4910 for (j = 0; j < ipa_get_param_count (info); j++)
4911 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4912 bp = bitpack_create (ob->main_stream);
4913 gcc_assert (info->analysis_done
4914 || ipa_get_param_count (info) == 0);
4915 gcc_assert (!info->node_enqueued);
4916 gcc_assert (!info->ipcp_orig_node);
4917 for (j = 0; j < ipa_get_param_count (info); j++)
4918 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4919 streamer_write_bitpack (&bp);
4920 for (j = 0; j < ipa_get_param_count (info); j++)
4921 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4922 for (e = node->callees; e; e = e->next_callee)
4924 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4926 streamer_write_uhwi (ob,
4927 ipa_get_cs_argument_count (args) * 2
4928 + (args->polymorphic_call_contexts != NULL));
4929 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4931 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4932 if (args->polymorphic_call_contexts != NULL)
4933 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4936 for (e = node->indirect_calls; e; e = e->next_callee)
4938 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4940 streamer_write_uhwi (ob,
4941 ipa_get_cs_argument_count (args) * 2
4942 + (args->polymorphic_call_contexts != NULL));
4943 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4945 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4946 if (args->polymorphic_call_contexts != NULL)
4947 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4949 ipa_write_indirect_edge_info (ob, e);
4953 /* Stream in NODE info from IB. */
4955 static void
4956 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4957 struct data_in *data_in)
4959 struct ipa_node_params *info = IPA_NODE_REF (node);
4960 int k;
4961 struct cgraph_edge *e;
4962 struct bitpack_d bp;
4964 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4966 for (k = 0; k < ipa_get_param_count (info); k++)
4967 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4969 bp = streamer_read_bitpack (ib);
4970 if (ipa_get_param_count (info) != 0)
4971 info->analysis_done = true;
4972 info->node_enqueued = false;
4973 for (k = 0; k < ipa_get_param_count (info); k++)
4974 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4975 for (k = 0; k < ipa_get_param_count (info); k++)
4976 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4977 for (e = node->callees; e; e = e->next_callee)
4979 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4980 int count = streamer_read_uhwi (ib);
4981 bool contexts_computed = count & 1;
4982 count /= 2;
4984 if (!count)
4985 continue;
4986 vec_safe_grow_cleared (args->jump_functions, count);
4987 if (contexts_computed)
4988 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4990 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4992 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4993 data_in);
4994 if (contexts_computed)
4995 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4998 for (e = node->indirect_calls; e; e = e->next_callee)
5000 struct ipa_edge_args *args = IPA_EDGE_REF (e);
5001 int count = streamer_read_uhwi (ib);
5002 bool contexts_computed = count & 1;
5003 count /= 2;
5005 if (count)
5007 vec_safe_grow_cleared (args->jump_functions, count);
5008 if (contexts_computed)
5009 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
5010 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5012 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5013 data_in);
5014 if (contexts_computed)
5015 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
5018 ipa_read_indirect_edge_info (ib, data_in, e);
5022 /* Write jump functions for nodes in SET. */
5024 void
5025 ipa_prop_write_jump_functions (void)
5027 struct cgraph_node *node;
5028 struct output_block *ob;
5029 unsigned int count = 0;
5030 lto_symtab_encoder_iterator lsei;
5031 lto_symtab_encoder_t encoder;
5034 if (!ipa_node_params_vector.exists ())
5035 return;
5037 ob = create_output_block (LTO_section_jump_functions);
5038 encoder = ob->decl_state->symtab_node_encoder;
5039 ob->symbol = NULL;
5040 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5041 lsei_next_function_in_partition (&lsei))
5043 node = lsei_cgraph_node (lsei);
5044 if (node->has_gimple_body_p ()
5045 && IPA_NODE_REF (node) != NULL)
5046 count++;
5049 streamer_write_uhwi (ob, count);
5051 /* Process all of the functions. */
5052 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5053 lsei_next_function_in_partition (&lsei))
5055 node = lsei_cgraph_node (lsei);
5056 if (node->has_gimple_body_p ()
5057 && IPA_NODE_REF (node) != NULL)
5058 ipa_write_node_info (ob, node);
5060 streamer_write_char_stream (ob->main_stream, 0);
5061 produce_asm (ob, NULL);
5062 destroy_output_block (ob);
5065 /* Read section in file FILE_DATA of length LEN with data DATA. */
5067 static void
5068 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5069 size_t len)
5071 const struct lto_function_header *header =
5072 (const struct lto_function_header *) data;
5073 const int cfg_offset = sizeof (struct lto_function_header);
5074 const int main_offset = cfg_offset + header->cfg_size;
5075 const int string_offset = main_offset + header->main_size;
5076 struct data_in *data_in;
5077 unsigned int i;
5078 unsigned int count;
5080 lto_input_block ib_main ((const char *) data + main_offset,
5081 header->main_size);
5083 data_in =
5084 lto_data_in_create (file_data, (const char *) data + string_offset,
5085 header->string_size, vNULL);
5086 count = streamer_read_uhwi (&ib_main);
5088 for (i = 0; i < count; i++)
5090 unsigned int index;
5091 struct cgraph_node *node;
5092 lto_symtab_encoder_t encoder;
5094 index = streamer_read_uhwi (&ib_main);
5095 encoder = file_data->symtab_node_encoder;
5096 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5097 index));
5098 gcc_assert (node->definition);
5099 ipa_read_node_info (&ib_main, node, data_in);
5101 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5102 len);
5103 lto_data_in_delete (data_in);
5106 /* Read ipcp jump functions. */
5108 void
5109 ipa_prop_read_jump_functions (void)
5111 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5112 struct lto_file_decl_data *file_data;
5113 unsigned int j = 0;
5115 ipa_check_create_node_params ();
5116 ipa_check_create_edge_args ();
5117 ipa_register_cgraph_hooks ();
5119 while ((file_data = file_data_vec[j++]))
5121 size_t len;
5122 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
5124 if (data)
5125 ipa_prop_read_section (file_data, data, len);
5129 /* After merging units, we can get mismatch in argument counts.
5130 Also decl merging might've rendered parameter lists obsolete.
5131 Also compute called_with_variable_arg info. */
5133 void
5134 ipa_update_after_lto_read (void)
5136 ipa_check_create_node_params ();
5137 ipa_check_create_edge_args ();
5140 void
5141 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
5143 int node_ref;
5144 unsigned int count = 0;
5145 lto_symtab_encoder_t encoder;
5146 struct ipa_agg_replacement_value *aggvals, *av;
5148 aggvals = ipa_get_agg_replacements_for_node (node);
5149 encoder = ob->decl_state->symtab_node_encoder;
5150 node_ref = lto_symtab_encoder_encode (encoder, node);
5151 streamer_write_uhwi (ob, node_ref);
5153 for (av = aggvals; av; av = av->next)
5154 count++;
5155 streamer_write_uhwi (ob, count);
5157 for (av = aggvals; av; av = av->next)
5159 struct bitpack_d bp;
5161 streamer_write_uhwi (ob, av->offset);
5162 streamer_write_uhwi (ob, av->index);
5163 stream_write_tree (ob, av->value, true);
5165 bp = bitpack_create (ob->main_stream);
5166 bp_pack_value (&bp, av->by_ref, 1);
5167 streamer_write_bitpack (&bp);
5171 /* Stream in the aggregate value replacement chain for NODE from IB. */
5173 static void
5174 read_agg_replacement_chain (struct lto_input_block *ib,
5175 struct cgraph_node *node,
5176 struct data_in *data_in)
5178 struct ipa_agg_replacement_value *aggvals = NULL;
5179 unsigned int count, i;
5181 count = streamer_read_uhwi (ib);
5182 for (i = 0; i <count; i++)
5184 struct ipa_agg_replacement_value *av;
5185 struct bitpack_d bp;
5187 av = ggc_alloc<ipa_agg_replacement_value> ();
5188 av->offset = streamer_read_uhwi (ib);
5189 av->index = streamer_read_uhwi (ib);
5190 av->value = stream_read_tree (ib, data_in);
5191 bp = streamer_read_bitpack (ib);
5192 av->by_ref = bp_unpack_value (&bp, 1);
5193 av->next = aggvals;
5194 aggvals = av;
5196 ipa_set_node_agg_value_chain (node, aggvals);
5199 /* Write all aggregate replacement for nodes in set. */
5201 void
5202 ipa_prop_write_all_agg_replacement (void)
5204 struct cgraph_node *node;
5205 struct output_block *ob;
5206 unsigned int count = 0;
5207 lto_symtab_encoder_iterator lsei;
5208 lto_symtab_encoder_t encoder;
5210 if (!ipa_node_agg_replacements)
5211 return;
5213 ob = create_output_block (LTO_section_ipcp_transform);
5214 encoder = ob->decl_state->symtab_node_encoder;
5215 ob->symbol = NULL;
5216 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5217 lsei_next_function_in_partition (&lsei))
5219 node = lsei_cgraph_node (lsei);
5220 if (node->has_gimple_body_p ()
5221 && ipa_get_agg_replacements_for_node (node) != NULL)
5222 count++;
5225 streamer_write_uhwi (ob, count);
5227 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5228 lsei_next_function_in_partition (&lsei))
5230 node = lsei_cgraph_node (lsei);
5231 if (node->has_gimple_body_p ()
5232 && ipa_get_agg_replacements_for_node (node) != NULL)
5233 write_agg_replacement_chain (ob, node);
5235 streamer_write_char_stream (ob->main_stream, 0);
5236 produce_asm (ob, NULL);
5237 destroy_output_block (ob);
5240 /* Read replacements section in file FILE_DATA of length LEN with data
5241 DATA. */
5243 static void
5244 read_replacements_section (struct lto_file_decl_data *file_data,
5245 const char *data,
5246 size_t len)
5248 const struct lto_function_header *header =
5249 (const struct lto_function_header *) data;
5250 const int cfg_offset = sizeof (struct lto_function_header);
5251 const int main_offset = cfg_offset + header->cfg_size;
5252 const int string_offset = main_offset + header->main_size;
5253 struct data_in *data_in;
5254 unsigned int i;
5255 unsigned int count;
5257 lto_input_block ib_main ((const char *) data + main_offset,
5258 header->main_size);
5260 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5261 header->string_size, vNULL);
5262 count = streamer_read_uhwi (&ib_main);
5264 for (i = 0; i < count; i++)
5266 unsigned int index;
5267 struct cgraph_node *node;
5268 lto_symtab_encoder_t encoder;
5270 index = streamer_read_uhwi (&ib_main);
5271 encoder = file_data->symtab_node_encoder;
5272 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5273 index));
5274 gcc_assert (node->definition);
5275 read_agg_replacement_chain (&ib_main, node, data_in);
5277 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5278 len);
5279 lto_data_in_delete (data_in);
5282 /* Read IPA-CP aggregate replacements. */
5284 void
5285 ipa_prop_read_all_agg_replacement (void)
5287 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5288 struct lto_file_decl_data *file_data;
5289 unsigned int j = 0;
5291 while ((file_data = file_data_vec[j++]))
5293 size_t len;
5294 const char *data = lto_get_section_data (file_data,
5295 LTO_section_ipcp_transform,
5296 NULL, &len);
5297 if (data)
5298 read_replacements_section (file_data, data, len);
5302 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5303 NODE. */
5305 static void
5306 adjust_agg_replacement_values (struct cgraph_node *node,
5307 struct ipa_agg_replacement_value *aggval)
5309 struct ipa_agg_replacement_value *v;
5310 int i, c = 0, d = 0, *adj;
5312 if (!node->clone.combined_args_to_skip)
5313 return;
5315 for (v = aggval; v; v = v->next)
5317 gcc_assert (v->index >= 0);
5318 if (c < v->index)
5319 c = v->index;
5321 c++;
5323 adj = XALLOCAVEC (int, c);
5324 for (i = 0; i < c; i++)
5325 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5327 adj[i] = -1;
5328 d++;
5330 else
5331 adj[i] = i - d;
5333 for (v = aggval; v; v = v->next)
5334 v->index = adj[v->index];
5337 /* Dominator walker driving the ipcp modification phase. */
5339 class ipcp_modif_dom_walker : public dom_walker
5341 public:
5342 ipcp_modif_dom_walker (struct func_body_info *fbi,
5343 vec<ipa_param_descriptor> descs,
5344 struct ipa_agg_replacement_value *av,
5345 bool *sc, bool *cc)
5346 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5347 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5349 virtual void before_dom_children (basic_block);
5351 private:
5352 struct func_body_info *m_fbi;
5353 vec<ipa_param_descriptor> m_descriptors;
5354 struct ipa_agg_replacement_value *m_aggval;
5355 bool *m_something_changed, *m_cfg_changed;
5358 void
5359 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5361 gimple_stmt_iterator gsi;
5362 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5364 struct ipa_agg_replacement_value *v;
5365 gimple stmt = gsi_stmt (gsi);
5366 tree rhs, val, t;
5367 HOST_WIDE_INT offset, size;
5368 int index;
5369 bool by_ref, vce;
5371 if (!gimple_assign_load_p (stmt))
5372 continue;
5373 rhs = gimple_assign_rhs1 (stmt);
5374 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5375 continue;
5377 vce = false;
5378 t = rhs;
5379 while (handled_component_p (t))
5381 /* V_C_E can do things like convert an array of integers to one
5382 bigger integer and similar things we do not handle below. */
5383 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5385 vce = true;
5386 break;
5388 t = TREE_OPERAND (t, 0);
5390 if (vce)
5391 continue;
5393 if (!ipa_load_from_parm_agg_1 (m_fbi, m_descriptors, stmt, rhs, &index,
5394 &offset, &size, &by_ref))
5395 continue;
5396 for (v = m_aggval; v; v = v->next)
5397 if (v->index == index
5398 && v->offset == offset)
5399 break;
5400 if (!v
5401 || v->by_ref != by_ref
5402 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5403 continue;
5405 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5406 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5408 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5409 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5410 else if (TYPE_SIZE (TREE_TYPE (rhs))
5411 == TYPE_SIZE (TREE_TYPE (v->value)))
5412 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5413 else
5415 if (dump_file)
5417 fprintf (dump_file, " const ");
5418 print_generic_expr (dump_file, v->value, 0);
5419 fprintf (dump_file, " can't be converted to type of ");
5420 print_generic_expr (dump_file, rhs, 0);
5421 fprintf (dump_file, "\n");
5423 continue;
5426 else
5427 val = v->value;
5429 if (dump_file && (dump_flags & TDF_DETAILS))
5431 fprintf (dump_file, "Modifying stmt:\n ");
5432 print_gimple_stmt (dump_file, stmt, 0, 0);
5434 gimple_assign_set_rhs_from_tree (&gsi, val);
5435 update_stmt (stmt);
5437 if (dump_file && (dump_flags & TDF_DETAILS))
5439 fprintf (dump_file, "into:\n ");
5440 print_gimple_stmt (dump_file, stmt, 0, 0);
5441 fprintf (dump_file, "\n");
5444 *m_something_changed = true;
5445 if (maybe_clean_eh_stmt (stmt)
5446 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5447 *m_cfg_changed = true;
5452 /* IPCP transformation phase doing propagation of aggregate values. */
5454 unsigned int
5455 ipcp_transform_function (struct cgraph_node *node)
5457 vec<ipa_param_descriptor> descriptors = vNULL;
5458 struct func_body_info fbi;
5459 struct ipa_agg_replacement_value *aggval;
5460 int param_count;
5461 bool cfg_changed = false, something_changed = false;
5463 gcc_checking_assert (cfun);
5464 gcc_checking_assert (current_function_decl);
5466 if (dump_file)
5467 fprintf (dump_file, "Modification phase of node %s/%i\n",
5468 node->name (), node->order);
5470 aggval = ipa_get_agg_replacements_for_node (node);
5471 if (!aggval)
5472 return 0;
5473 param_count = count_formal_params (node->decl);
5474 if (param_count == 0)
5475 return 0;
5476 adjust_agg_replacement_values (node, aggval);
5477 if (dump_file)
5478 ipa_dump_agg_replacement_values (dump_file, aggval);
5480 fbi.node = node;
5481 fbi.info = NULL;
5482 fbi.bb_infos = vNULL;
5483 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5484 fbi.param_count = param_count;
5485 fbi.aa_walked = 0;
5487 descriptors.safe_grow_cleared (param_count);
5488 ipa_populate_param_decls (node, descriptors);
5489 calculate_dominance_info (CDI_DOMINATORS);
5490 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5491 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5493 int i;
5494 struct ipa_bb_info *bi;
5495 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5496 free_ipa_bb_info (bi);
5497 fbi.bb_infos.release ();
5498 free_dominance_info (CDI_DOMINATORS);
5499 (*ipa_node_agg_replacements)[node->uid] = NULL;
5500 descriptors.release ();
5502 if (!something_changed)
5503 return 0;
5504 else if (cfg_changed)
5505 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5506 else
5507 return TODO_update_ssa_only_virtuals;