Concretize gimple_cond_set_{lhs|rhs}
[official-gcc.git] / gcc / ipa-prop.c
blob5a861f8d832e112758b886bc79a2571bf68ca1cb
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "basic-block.h"
25 #include "tree-ssa-alias.h"
26 #include "internal-fn.h"
27 #include "gimple-fold.h"
28 #include "tree-eh.h"
29 #include "gimple-expr.h"
30 #include "is-a.h"
31 #include "gimple.h"
32 #include "expr.h"
33 #include "stor-layout.h"
34 #include "print-tree.h"
35 #include "gimplify.h"
36 #include "gimple-iterator.h"
37 #include "gimplify-me.h"
38 #include "gimple-walk.h"
39 #include "langhooks.h"
40 #include "target.h"
41 #include "ipa-prop.h"
42 #include "bitmap.h"
43 #include "gimple-ssa.h"
44 #include "tree-cfg.h"
45 #include "tree-phinodes.h"
46 #include "ssa-iterators.h"
47 #include "tree-into-ssa.h"
48 #include "tree-dfa.h"
49 #include "tree-pass.h"
50 #include "tree-inline.h"
51 #include "ipa-inline.h"
52 #include "flags.h"
53 #include "diagnostic.h"
54 #include "gimple-pretty-print.h"
55 #include "lto-streamer.h"
56 #include "data-streamer.h"
57 #include "tree-streamer.h"
58 #include "params.h"
59 #include "ipa-utils.h"
60 #include "stringpool.h"
61 #include "tree-ssanames.h"
62 #include "dbgcnt.h"
63 #include "domwalk.h"
64 #include "builtins.h"
65 #include "calls.h"
67 /* Intermediate information that we get from alias analysis about a particular
68 parameter in a particular basic_block. When a parameter or the memory it
69 references is marked modified, we use that information in all dominatd
70 blocks without cosulting alias analysis oracle. */
72 struct param_aa_status
74 /* Set when this structure contains meaningful information. If not, the
75 structure describing a dominating BB should be used instead. */
76 bool valid;
78 /* Whether we have seen something which might have modified the data in
79 question. PARM is for the parameter itself, REF is for data it points to
80 but using the alias type of individual accesses and PT is the same thing
81 but for computing aggregate pass-through functions using a very inclusive
82 ao_ref. */
83 bool parm_modified, ref_modified, pt_modified;
86 /* Information related to a given BB that used only when looking at function
87 body. */
89 struct ipa_bb_info
91 /* Call graph edges going out of this BB. */
92 vec<cgraph_edge *> cg_edges;
93 /* Alias analysis statuses of each formal parameter at this bb. */
94 vec<param_aa_status> param_aa_statuses;
97 /* Structure with global information that is only used when looking at function
98 body. */
100 struct func_body_info
102 /* The node that is being analyzed. */
103 cgraph_node *node;
105 /* Its info. */
106 struct ipa_node_params *info;
108 /* Information about individual BBs. */
109 vec<ipa_bb_info> bb_infos;
111 /* Number of parameters. */
112 int param_count;
114 /* Number of statements already walked by when analyzing this function. */
115 unsigned int aa_walked;
118 /* Vector where the parameter infos are actually stored. */
119 vec<ipa_node_params> ipa_node_params_vector;
120 /* Vector of known aggregate values in cloned nodes. */
121 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
122 /* Vector where the parameter infos are actually stored. */
123 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
125 /* Holders of ipa cgraph hooks: */
126 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
127 static struct cgraph_node_hook_list *node_removal_hook_holder;
128 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
129 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
130 static struct cgraph_node_hook_list *function_insertion_hook_holder;
132 /* Description of a reference to an IPA constant. */
133 struct ipa_cst_ref_desc
135 /* Edge that corresponds to the statement which took the reference. */
136 struct cgraph_edge *cs;
137 /* Linked list of duplicates created when call graph edges are cloned. */
138 struct ipa_cst_ref_desc *next_duplicate;
139 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
140 if out of control. */
141 int refcount;
144 /* Allocation pool for reference descriptions. */
146 static alloc_pool ipa_refdesc_pool;
148 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
149 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
151 static bool
152 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
154 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
155 struct cl_optimization *os;
157 if (!fs_opts)
158 return false;
159 os = TREE_OPTIMIZATION (fs_opts);
160 return !os->x_optimize || !os->x_flag_ipa_cp;
163 /* Return index of the formal whose tree is PTREE in function which corresponds
164 to INFO. */
166 static int
167 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
169 int i, count;
171 count = descriptors.length ();
172 for (i = 0; i < count; i++)
173 if (descriptors[i].decl == ptree)
174 return i;
176 return -1;
179 /* Return index of the formal whose tree is PTREE in function which corresponds
180 to INFO. */
183 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
185 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
188 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
189 NODE. */
191 static void
192 ipa_populate_param_decls (struct cgraph_node *node,
193 vec<ipa_param_descriptor> &descriptors)
195 tree fndecl;
196 tree fnargs;
197 tree parm;
198 int param_num;
200 fndecl = node->decl;
201 gcc_assert (gimple_has_body_p (fndecl));
202 fnargs = DECL_ARGUMENTS (fndecl);
203 param_num = 0;
204 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
206 descriptors[param_num].decl = parm;
207 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
208 true);
209 param_num++;
213 /* Return how many formal parameters FNDECL has. */
216 count_formal_params (tree fndecl)
218 tree parm;
219 int count = 0;
220 gcc_assert (gimple_has_body_p (fndecl));
222 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
223 count++;
225 return count;
228 /* Return the declaration of Ith formal parameter of the function corresponding
229 to INFO. Note there is no setter function as this array is built just once
230 using ipa_initialize_node_params. */
232 void
233 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
235 fprintf (file, "param #%i", i);
236 if (info->descriptors[i].decl)
238 fprintf (file, " ");
239 print_generic_expr (file, info->descriptors[i].decl, 0);
243 /* Initialize the ipa_node_params structure associated with NODE
244 to hold PARAM_COUNT parameters. */
246 void
247 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
249 struct ipa_node_params *info = IPA_NODE_REF (node);
251 if (!info->descriptors.exists () && param_count)
252 info->descriptors.safe_grow_cleared (param_count);
255 /* Initialize the ipa_node_params structure associated with NODE by counting
256 the function parameters, creating the descriptors and populating their
257 param_decls. */
259 void
260 ipa_initialize_node_params (struct cgraph_node *node)
262 struct ipa_node_params *info = IPA_NODE_REF (node);
264 if (!info->descriptors.exists ())
266 ipa_alloc_node_params (node, count_formal_params (node->decl));
267 ipa_populate_param_decls (node, info->descriptors);
271 /* Print the jump functions associated with call graph edge CS to file F. */
273 static void
274 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
276 int i, count;
278 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
279 for (i = 0; i < count; i++)
281 struct ipa_jump_func *jump_func;
282 enum jump_func_type type;
284 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
285 type = jump_func->type;
287 fprintf (f, " param %d: ", i);
288 if (type == IPA_JF_UNKNOWN)
289 fprintf (f, "UNKNOWN\n");
290 else if (type == IPA_JF_KNOWN_TYPE)
292 fprintf (f, "KNOWN TYPE: base ");
293 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
294 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
295 jump_func->value.known_type.offset);
296 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
297 fprintf (f, "\n");
299 else if (type == IPA_JF_CONST)
301 tree val = jump_func->value.constant.value;
302 fprintf (f, "CONST: ");
303 print_generic_expr (f, val, 0);
304 if (TREE_CODE (val) == ADDR_EXPR
305 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
307 fprintf (f, " -> ");
308 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
311 fprintf (f, "\n");
313 else if (type == IPA_JF_PASS_THROUGH)
315 fprintf (f, "PASS THROUGH: ");
316 fprintf (f, "%d, op %s",
317 jump_func->value.pass_through.formal_id,
318 get_tree_code_name(jump_func->value.pass_through.operation));
319 if (jump_func->value.pass_through.operation != NOP_EXPR)
321 fprintf (f, " ");
322 print_generic_expr (f,
323 jump_func->value.pass_through.operand, 0);
325 if (jump_func->value.pass_through.agg_preserved)
326 fprintf (f, ", agg_preserved");
327 if (jump_func->value.pass_through.type_preserved)
328 fprintf (f, ", type_preserved");
329 fprintf (f, "\n");
331 else if (type == IPA_JF_ANCESTOR)
333 fprintf (f, "ANCESTOR: ");
334 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
335 jump_func->value.ancestor.formal_id,
336 jump_func->value.ancestor.offset);
337 print_generic_expr (f, jump_func->value.ancestor.type, 0);
338 if (jump_func->value.ancestor.agg_preserved)
339 fprintf (f, ", agg_preserved");
340 if (jump_func->value.ancestor.type_preserved)
341 fprintf (f, ", type_preserved");
342 fprintf (f, "\n");
345 if (jump_func->agg.items)
347 struct ipa_agg_jf_item *item;
348 int j;
350 fprintf (f, " Aggregate passed by %s:\n",
351 jump_func->agg.by_ref ? "reference" : "value");
352 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
354 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
355 item->offset);
356 if (TYPE_P (item->value))
357 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
358 tree_to_uhwi (TYPE_SIZE (item->value)));
359 else
361 fprintf (f, "cst: ");
362 print_generic_expr (f, item->value, 0);
364 fprintf (f, "\n");
367 if (IPA_EDGE_REF (cs)->polymorphic_call_contexts)
368 ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i)->dump (f);
373 /* Print the jump functions of all arguments on all call graph edges going from
374 NODE to file F. */
376 void
377 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
379 struct cgraph_edge *cs;
381 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
382 node->order);
383 for (cs = node->callees; cs; cs = cs->next_callee)
385 if (!ipa_edge_args_info_available_for_edge_p (cs))
386 continue;
388 fprintf (f, " callsite %s/%i -> %s/%i : \n",
389 xstrdup (node->name ()), node->order,
390 xstrdup (cs->callee->name ()),
391 cs->callee->order);
392 ipa_print_node_jump_functions_for_edge (f, cs);
395 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
397 struct cgraph_indirect_call_info *ii;
398 if (!ipa_edge_args_info_available_for_edge_p (cs))
399 continue;
401 ii = cs->indirect_info;
402 if (ii->agg_contents)
403 fprintf (f, " indirect %s callsite, calling param %i, "
404 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
405 ii->member_ptr ? "member ptr" : "aggregate",
406 ii->param_index, ii->offset,
407 ii->by_ref ? "by reference" : "by_value");
408 else
409 fprintf (f, " indirect %s callsite, calling param %i, "
410 "offset " HOST_WIDE_INT_PRINT_DEC,
411 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
412 ii->offset);
414 if (cs->call_stmt)
416 fprintf (f, ", for stmt ");
417 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
419 else
420 fprintf (f, "\n");
421 if (ii->polymorphic)
422 ii->context.dump (f);
423 ipa_print_node_jump_functions_for_edge (f, cs);
427 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
429 void
430 ipa_print_all_jump_functions (FILE *f)
432 struct cgraph_node *node;
434 fprintf (f, "\nJump functions:\n");
435 FOR_EACH_FUNCTION (node)
437 ipa_print_node_jump_functions (f, node);
441 /* Set JFUNC to be a known type jump function. */
443 static void
444 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
445 tree base_type, tree component_type)
447 /* Recording and propagating main variants increases change that types
448 will match. */
449 base_type = TYPE_MAIN_VARIANT (base_type);
450 component_type = TYPE_MAIN_VARIANT (component_type);
452 gcc_assert (contains_polymorphic_type_p (base_type)
453 && contains_polymorphic_type_p (component_type));
454 if (!flag_devirtualize)
455 return;
456 jfunc->type = IPA_JF_KNOWN_TYPE;
457 jfunc->value.known_type.offset = offset,
458 jfunc->value.known_type.base_type = base_type;
459 jfunc->value.known_type.component_type = component_type;
460 gcc_assert (component_type);
463 /* Set JFUNC to be a copy of another jmp (to be used by jump function
464 combination code). The two functions will share their rdesc. */
466 static void
467 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
468 struct ipa_jump_func *src)
471 gcc_checking_assert (src->type == IPA_JF_CONST);
472 dst->type = IPA_JF_CONST;
473 dst->value.constant = src->value.constant;
476 /* Set JFUNC to be a constant jmp function. */
478 static void
479 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
480 struct cgraph_edge *cs)
482 constant = unshare_expr (constant);
483 if (constant && EXPR_P (constant))
484 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
485 jfunc->type = IPA_JF_CONST;
486 jfunc->value.constant.value = unshare_expr_without_location (constant);
488 if (TREE_CODE (constant) == ADDR_EXPR
489 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
491 struct ipa_cst_ref_desc *rdesc;
492 if (!ipa_refdesc_pool)
493 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
494 sizeof (struct ipa_cst_ref_desc), 32);
496 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
497 rdesc->cs = cs;
498 rdesc->next_duplicate = NULL;
499 rdesc->refcount = 1;
500 jfunc->value.constant.rdesc = rdesc;
502 else
503 jfunc->value.constant.rdesc = NULL;
506 /* Set JFUNC to be a simple pass-through jump function. */
507 static void
508 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
509 bool agg_preserved, bool type_preserved)
511 jfunc->type = IPA_JF_PASS_THROUGH;
512 jfunc->value.pass_through.operand = NULL_TREE;
513 jfunc->value.pass_through.formal_id = formal_id;
514 jfunc->value.pass_through.operation = NOP_EXPR;
515 jfunc->value.pass_through.agg_preserved = agg_preserved;
516 jfunc->value.pass_through.type_preserved = type_preserved;
519 /* Set JFUNC to be an arithmetic pass through jump function. */
521 static void
522 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
523 tree operand, enum tree_code operation)
525 jfunc->type = IPA_JF_PASS_THROUGH;
526 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
527 jfunc->value.pass_through.formal_id = formal_id;
528 jfunc->value.pass_through.operation = operation;
529 jfunc->value.pass_through.agg_preserved = false;
530 jfunc->value.pass_through.type_preserved = false;
533 /* Set JFUNC to be an ancestor jump function. */
535 static void
536 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
537 tree type, int formal_id, bool agg_preserved,
538 bool type_preserved)
540 if (!flag_devirtualize)
541 type_preserved = false;
542 if (!type_preserved)
543 type = NULL_TREE;
544 if (type)
545 type = TYPE_MAIN_VARIANT (type);
546 gcc_assert (!type_preserved || contains_polymorphic_type_p (type));
547 jfunc->type = IPA_JF_ANCESTOR;
548 jfunc->value.ancestor.formal_id = formal_id;
549 jfunc->value.ancestor.offset = offset;
550 jfunc->value.ancestor.type = type_preserved ? type : NULL;
551 jfunc->value.ancestor.agg_preserved = agg_preserved;
552 jfunc->value.ancestor.type_preserved = type_preserved;
555 /* Extract the acual BINFO being described by JFUNC which must be a known type
556 jump function. */
558 tree
559 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
561 if (!RECORD_OR_UNION_TYPE_P (jfunc->value.known_type.base_type))
562 return NULL_TREE;
564 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
566 if (!base_binfo)
567 return NULL_TREE;
568 /* FIXME: At LTO we can't propagate to non-polymorphic type, because
569 we have no ODR equivalency on those. This should be fixed by
570 propagating on types rather than binfos that would make type
571 matching here unnecesary. */
572 if (in_lto_p
573 && (TREE_CODE (jfunc->value.known_type.component_type) != RECORD_TYPE
574 || !TYPE_BINFO (jfunc->value.known_type.component_type)
575 || !BINFO_VTABLE (TYPE_BINFO (jfunc->value.known_type.component_type))))
577 if (!jfunc->value.known_type.offset)
578 return base_binfo;
579 return NULL;
581 return get_binfo_at_offset (base_binfo,
582 jfunc->value.known_type.offset,
583 jfunc->value.known_type.component_type);
586 /* Get IPA BB information about the given BB. FBI is the context of analyzis
587 of this function body. */
589 static struct ipa_bb_info *
590 ipa_get_bb_info (struct func_body_info *fbi, basic_block bb)
592 gcc_checking_assert (fbi);
593 return &fbi->bb_infos[bb->index];
596 /* Structure to be passed in between detect_type_change and
597 check_stmt_for_type_change. */
599 struct prop_type_change_info
601 /* Offset into the object where there is the virtual method pointer we are
602 looking for. */
603 HOST_WIDE_INT offset;
604 /* The declaration or SSA_NAME pointer of the base that we are checking for
605 type change. */
606 tree object;
607 /* If we actually can tell the type that the object has changed to, it is
608 stored in this field. Otherwise it remains NULL_TREE. */
609 tree known_current_type;
610 /* Set to true if dynamic type change has been detected. */
611 bool type_maybe_changed;
612 /* Set to true if multiple types have been encountered. known_current_type
613 must be disregarded in that case. */
614 bool multiple_types_encountered;
617 /* Return true if STMT can modify a virtual method table pointer.
619 This function makes special assumptions about both constructors and
620 destructors which are all the functions that are allowed to alter the VMT
621 pointers. It assumes that destructors begin with assignment into all VMT
622 pointers and that constructors essentially look in the following way:
624 1) The very first thing they do is that they call constructors of ancestor
625 sub-objects that have them.
627 2) Then VMT pointers of this and all its ancestors is set to new values
628 corresponding to the type corresponding to the constructor.
630 3) Only afterwards, other stuff such as constructor of member sub-objects
631 and the code written by the user is run. Only this may include calling
632 virtual functions, directly or indirectly.
634 There is no way to call a constructor of an ancestor sub-object in any
635 other way.
637 This means that we do not have to care whether constructors get the correct
638 type information because they will always change it (in fact, if we define
639 the type to be given by the VMT pointer, it is undefined).
641 The most important fact to derive from the above is that if, for some
642 statement in the section 3, we try to detect whether the dynamic type has
643 changed, we can safely ignore all calls as we examine the function body
644 backwards until we reach statements in section 2 because these calls cannot
645 be ancestor constructors or destructors (if the input is not bogus) and so
646 do not change the dynamic type (this holds true only for automatically
647 allocated objects but at the moment we devirtualize only these). We then
648 must detect that statements in section 2 change the dynamic type and can try
649 to derive the new type. That is enough and we can stop, we will never see
650 the calls into constructors of sub-objects in this code. Therefore we can
651 safely ignore all call statements that we traverse.
654 static bool
655 stmt_may_be_vtbl_ptr_store (gimple stmt)
657 if (is_gimple_call (stmt))
658 return false;
659 if (gimple_clobber_p (stmt))
660 return false;
661 else if (is_gimple_assign (stmt))
663 tree lhs = gimple_assign_lhs (stmt);
665 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
667 if (flag_strict_aliasing
668 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
669 return false;
671 if (TREE_CODE (lhs) == COMPONENT_REF
672 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
673 return false;
674 /* In the future we might want to use get_base_ref_and_offset to find
675 if there is a field corresponding to the offset and if so, proceed
676 almost like if it was a component ref. */
679 return true;
682 /* If STMT can be proved to be an assignment to the virtual method table
683 pointer of ANALYZED_OBJ and the type associated with the new table
684 identified, return the type. Otherwise return NULL_TREE. */
686 static tree
687 extr_type_from_vtbl_ptr_store (gimple stmt, struct prop_type_change_info *tci)
689 HOST_WIDE_INT offset, size, max_size;
690 tree lhs, rhs, base, binfo;
692 if (!gimple_assign_single_p (stmt))
693 return NULL_TREE;
695 lhs = gimple_assign_lhs (stmt);
696 rhs = gimple_assign_rhs1 (stmt);
697 if (TREE_CODE (lhs) != COMPONENT_REF
698 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
699 return NULL_TREE;
701 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
702 if (offset != tci->offset
703 || size != POINTER_SIZE
704 || max_size != POINTER_SIZE)
705 return NULL_TREE;
706 if (TREE_CODE (base) == MEM_REF)
708 if (TREE_CODE (tci->object) != MEM_REF
709 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
710 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
711 TREE_OPERAND (base, 1)))
712 return NULL_TREE;
714 else if (tci->object != base)
715 return NULL_TREE;
717 binfo = vtable_pointer_value_to_binfo (rhs);
719 /* FIXME: vtable_pointer_value_to_binfo may return BINFO of a
720 base of outer type. In this case we would need to either
721 work on binfos or translate it back to outer type and offset.
722 KNOWN_TYPE jump functions are not ready for that, yet. */
723 if (!binfo || TYPE_BINFO (BINFO_TYPE (binfo)) != binfo)
724 return NULL;
726 return BINFO_TYPE (binfo);
729 /* Callback of walk_aliased_vdefs and a helper function for
730 detect_type_change to check whether a particular statement may modify
731 the virtual table pointer, and if possible also determine the new type of
732 the (sub-)object. It stores its result into DATA, which points to a
733 prop_type_change_info structure. */
735 static bool
736 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
738 gimple stmt = SSA_NAME_DEF_STMT (vdef);
739 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
741 if (stmt_may_be_vtbl_ptr_store (stmt))
743 tree type;
745 type = extr_type_from_vtbl_ptr_store (stmt, tci);
746 gcc_assert (!type || TYPE_MAIN_VARIANT (type) == type);
747 if (tci->type_maybe_changed
748 && type != tci->known_current_type)
749 tci->multiple_types_encountered = true;
750 tci->known_current_type = type;
751 tci->type_maybe_changed = true;
752 return true;
754 else
755 return false;
758 /* See if ARG is PARAM_DECl describing instance passed by pointer
759 or reference in FUNCTION. Return false if the dynamic type may change
760 in between beggining of the function until CALL is invoked.
762 Generally functions are not allowed to change type of such instances,
763 but they call destructors. We assume that methods can not destroy the THIS
764 pointer. Also as a special cases, constructor and destructors may change
765 type of the THIS pointer. */
767 static bool
768 param_type_may_change_p (tree function, tree arg, gimple call)
770 /* Pure functions can not do any changes on the dynamic type;
771 that require writting to memory. */
772 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
773 return false;
774 /* We need to check if we are within inlined consturctor
775 or destructor (ideally we would have way to check that the
776 inline cdtor is actually working on ARG, but we don't have
777 easy tie on this, so punt on all non-pure cdtors.
778 We may also record the types of cdtors and once we know type
779 of the instance match them.
781 Also code unification optimizations may merge calls from
782 different blocks making return values unreliable. So
783 do nothing during late optimization. */
784 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
785 return true;
786 if (TREE_CODE (arg) == SSA_NAME
787 && SSA_NAME_IS_DEFAULT_DEF (arg)
788 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
790 /* Normal (non-THIS) argument. */
791 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
792 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
793 /* THIS pointer of an method - here we we want to watch constructors
794 and destructors as those definitely may change the dynamic
795 type. */
796 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
797 && !DECL_CXX_CONSTRUCTOR_P (function)
798 && !DECL_CXX_DESTRUCTOR_P (function)
799 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
801 /* Walk the inline stack and watch out for ctors/dtors. */
802 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
803 block = BLOCK_SUPERCONTEXT (block))
804 if (BLOCK_ABSTRACT_ORIGIN (block)
805 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
807 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
809 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
810 continue;
811 if (TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
812 && (DECL_CXX_CONSTRUCTOR_P (fn)
813 || DECL_CXX_DESTRUCTOR_P (fn)))
814 return true;
816 return false;
819 return true;
822 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
823 callsite CALL) by looking for assignments to its virtual table pointer. If
824 it is, return true and fill in the jump function JFUNC with relevant type
825 information or set it to unknown. ARG is the object itself (not a pointer
826 to it, unless dereferenced). BASE is the base of the memory access as
827 returned by get_ref_base_and_extent, as is the offset.
829 This is helper function for detect_type_change and detect_type_change_ssa
830 that does the heavy work which is usually unnecesary. */
832 static bool
833 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
834 gimple_call call, struct ipa_jump_func *jfunc,
835 HOST_WIDE_INT offset)
837 struct prop_type_change_info tci;
838 ao_ref ao;
839 bool entry_reached = false;
841 gcc_checking_assert (DECL_P (arg)
842 || TREE_CODE (arg) == MEM_REF
843 || handled_component_p (arg));
845 comp_type = TYPE_MAIN_VARIANT (comp_type);
847 /* Const calls cannot call virtual methods through VMT and so type changes do
848 not matter. */
849 if (!flag_devirtualize || !gimple_vuse (call)
850 /* Be sure expected_type is polymorphic. */
851 || !comp_type
852 || TREE_CODE (comp_type) != RECORD_TYPE
853 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
854 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
855 return true;
857 ao_ref_init (&ao, arg);
858 ao.base = base;
859 ao.offset = offset;
860 ao.size = POINTER_SIZE;
861 ao.max_size = ao.size;
863 tci.offset = offset;
864 tci.object = get_base_address (arg);
865 tci.known_current_type = NULL_TREE;
866 tci.type_maybe_changed = false;
867 tci.multiple_types_encountered = false;
869 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
870 &tci, NULL, &entry_reached);
871 if (!tci.type_maybe_changed)
872 return false;
874 if (!tci.known_current_type
875 || tci.multiple_types_encountered
876 || offset != 0
877 /* When the walk reached function entry, it means that type
878 is set along some paths but not along others. */
879 || entry_reached)
880 jfunc->type = IPA_JF_UNKNOWN;
881 else
882 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
884 return true;
887 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
888 If it is, return true and fill in the jump function JFUNC with relevant type
889 information or set it to unknown. ARG is the object itself (not a pointer
890 to it, unless dereferenced). BASE is the base of the memory access as
891 returned by get_ref_base_and_extent, as is the offset. */
893 static bool
894 detect_type_change (tree arg, tree base, tree comp_type, gimple_call call,
895 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
897 if (!flag_devirtualize)
898 return false;
900 if (TREE_CODE (base) == MEM_REF
901 && !param_type_may_change_p (current_function_decl,
902 TREE_OPERAND (base, 0),
903 call))
904 return false;
905 return detect_type_change_from_memory_writes (arg, base, comp_type,
906 call, jfunc, offset);
909 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
910 SSA name (its dereference will become the base and the offset is assumed to
911 be zero). */
913 static bool
914 detect_type_change_ssa (tree arg, tree comp_type,
915 gimple_call call, struct ipa_jump_func *jfunc)
917 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
918 if (!flag_devirtualize
919 || !POINTER_TYPE_P (TREE_TYPE (arg)))
920 return false;
922 if (!param_type_may_change_p (current_function_decl, arg, call))
923 return false;
925 arg = build2 (MEM_REF, ptr_type_node, arg,
926 build_int_cst (ptr_type_node, 0));
928 return detect_type_change_from_memory_writes (arg, arg, comp_type,
929 call, jfunc, 0);
932 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
933 boolean variable pointed to by DATA. */
935 static bool
936 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
937 void *data)
939 bool *b = (bool *) data;
940 *b = true;
941 return true;
944 /* Return true if we have already walked so many statements in AA that we
945 should really just start giving up. */
947 static bool
948 aa_overwalked (struct func_body_info *fbi)
950 gcc_checking_assert (fbi);
951 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
954 /* Find the nearest valid aa status for parameter specified by INDEX that
955 dominates BB. */
957 static struct param_aa_status *
958 find_dominating_aa_status (struct func_body_info *fbi, basic_block bb,
959 int index)
961 while (true)
963 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
964 if (!bb)
965 return NULL;
966 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
967 if (!bi->param_aa_statuses.is_empty ()
968 && bi->param_aa_statuses[index].valid)
969 return &bi->param_aa_statuses[index];
973 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
974 structures and/or intialize the result with a dominating description as
975 necessary. */
977 static struct param_aa_status *
978 parm_bb_aa_status_for_bb (struct func_body_info *fbi, basic_block bb,
979 int index)
981 gcc_checking_assert (fbi);
982 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
983 if (bi->param_aa_statuses.is_empty ())
984 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
985 struct param_aa_status *paa = &bi->param_aa_statuses[index];
986 if (!paa->valid)
988 gcc_checking_assert (!paa->parm_modified
989 && !paa->ref_modified
990 && !paa->pt_modified);
991 struct param_aa_status *dom_paa;
992 dom_paa = find_dominating_aa_status (fbi, bb, index);
993 if (dom_paa)
994 *paa = *dom_paa;
995 else
996 paa->valid = true;
999 return paa;
1002 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
1003 a value known not to be modified in this function before reaching the
1004 statement STMT. FBI holds information about the function we have so far
1005 gathered but do not survive the summary building stage. */
1007 static bool
1008 parm_preserved_before_stmt_p (struct func_body_info *fbi, int index,
1009 gimple stmt, tree parm_load)
1011 struct param_aa_status *paa;
1012 bool modified = false;
1013 ao_ref refd;
1015 /* FIXME: FBI can be NULL if we are being called from outside
1016 ipa_node_analysis or ipcp_transform_function, which currently happens
1017 during inlining analysis. It would be great to extend fbi's lifetime and
1018 always have it. Currently, we are just not afraid of too much walking in
1019 that case. */
1020 if (fbi)
1022 if (aa_overwalked (fbi))
1023 return false;
1024 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1025 if (paa->parm_modified)
1026 return false;
1028 else
1029 paa = NULL;
1031 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
1032 ao_ref_init (&refd, parm_load);
1033 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1034 &modified, NULL);
1035 if (fbi)
1036 fbi->aa_walked += walked;
1037 if (paa && modified)
1038 paa->parm_modified = true;
1039 return !modified;
1042 /* If STMT is an assignment that loads a value from an parameter declaration,
1043 return the index of the parameter in ipa_node_params which has not been
1044 modified. Otherwise return -1. */
1046 static int
1047 load_from_unmodified_param (struct func_body_info *fbi,
1048 vec<ipa_param_descriptor> descriptors,
1049 gimple stmt)
1051 int index;
1052 tree op1;
1054 if (!gimple_assign_single_p (stmt))
1055 return -1;
1057 op1 = gimple_assign_rhs1 (stmt);
1058 if (TREE_CODE (op1) != PARM_DECL)
1059 return -1;
1061 index = ipa_get_param_decl_index_1 (descriptors, op1);
1062 if (index < 0
1063 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
1064 return -1;
1066 return index;
1069 /* Return true if memory reference REF (which must be a load through parameter
1070 with INDEX) loads data that are known to be unmodified in this function
1071 before reaching statement STMT. */
1073 static bool
1074 parm_ref_data_preserved_p (struct func_body_info *fbi,
1075 int index, gimple stmt, tree ref)
1077 struct param_aa_status *paa;
1078 bool modified = false;
1079 ao_ref refd;
1081 /* FIXME: FBI can be NULL if we are being called from outside
1082 ipa_node_analysis or ipcp_transform_function, which currently happens
1083 during inlining analysis. It would be great to extend fbi's lifetime and
1084 always have it. Currently, we are just not afraid of too much walking in
1085 that case. */
1086 if (fbi)
1088 if (aa_overwalked (fbi))
1089 return false;
1090 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1091 if (paa->ref_modified)
1092 return false;
1094 else
1095 paa = NULL;
1097 gcc_checking_assert (gimple_vuse (stmt));
1098 ao_ref_init (&refd, ref);
1099 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1100 &modified, NULL);
1101 if (fbi)
1102 fbi->aa_walked += walked;
1103 if (paa && modified)
1104 paa->ref_modified = true;
1105 return !modified;
1108 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1109 is known to be unmodified in this function before reaching call statement
1110 CALL into which it is passed. FBI describes the function body. */
1112 static bool
1113 parm_ref_data_pass_through_p (struct func_body_info *fbi, int index,
1114 gimple call, tree parm)
1116 bool modified = false;
1117 ao_ref refd;
1119 /* It's unnecessary to calculate anything about memory contnets for a const
1120 function because it is not goin to use it. But do not cache the result
1121 either. Also, no such calculations for non-pointers. */
1122 if (!gimple_vuse (call)
1123 || !POINTER_TYPE_P (TREE_TYPE (parm))
1124 || aa_overwalked (fbi))
1125 return false;
1127 struct param_aa_status *paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (call),
1128 index);
1129 if (paa->pt_modified)
1130 return false;
1132 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1133 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1134 &modified, NULL);
1135 fbi->aa_walked += walked;
1136 if (modified)
1137 paa->pt_modified = true;
1138 return !modified;
1141 /* Return true if we can prove that OP is a memory reference loading unmodified
1142 data from an aggregate passed as a parameter and if the aggregate is passed
1143 by reference, that the alias type of the load corresponds to the type of the
1144 formal parameter (so that we can rely on this type for TBAA in callers).
1145 INFO and PARMS_AINFO describe parameters of the current function (but the
1146 latter can be NULL), STMT is the load statement. If function returns true,
1147 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1148 within the aggregate and whether it is a load from a value passed by
1149 reference respectively. */
1151 static bool
1152 ipa_load_from_parm_agg_1 (struct func_body_info *fbi,
1153 vec<ipa_param_descriptor> descriptors,
1154 gimple stmt, tree op, int *index_p,
1155 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1156 bool *by_ref_p)
1158 int index;
1159 HOST_WIDE_INT size, max_size;
1160 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
1162 if (max_size == -1 || max_size != size || *offset_p < 0)
1163 return false;
1165 if (DECL_P (base))
1167 int index = ipa_get_param_decl_index_1 (descriptors, base);
1168 if (index >= 0
1169 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1171 *index_p = index;
1172 *by_ref_p = false;
1173 if (size_p)
1174 *size_p = size;
1175 return true;
1177 return false;
1180 if (TREE_CODE (base) != MEM_REF
1181 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1182 || !integer_zerop (TREE_OPERAND (base, 1)))
1183 return false;
1185 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1187 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1188 index = ipa_get_param_decl_index_1 (descriptors, parm);
1190 else
1192 /* This branch catches situations where a pointer parameter is not a
1193 gimple register, for example:
1195 void hip7(S*) (struct S * p)
1197 void (*<T2e4>) (struct S *) D.1867;
1198 struct S * p.1;
1200 <bb 2>:
1201 p.1_1 = p;
1202 D.1867_2 = p.1_1->f;
1203 D.1867_2 ();
1204 gdp = &p;
1207 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1208 index = load_from_unmodified_param (fbi, descriptors, def);
1211 if (index >= 0
1212 && parm_ref_data_preserved_p (fbi, index, stmt, op))
1214 *index_p = index;
1215 *by_ref_p = true;
1216 if (size_p)
1217 *size_p = size;
1218 return true;
1220 return false;
1223 /* Just like the previous function, just without the param_analysis_info
1224 pointer, for users outside of this file. */
1226 bool
1227 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
1228 tree op, int *index_p, HOST_WIDE_INT *offset_p,
1229 bool *by_ref_p)
1231 return ipa_load_from_parm_agg_1 (NULL, info->descriptors, stmt, op, index_p,
1232 offset_p, NULL, by_ref_p);
1235 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1236 of an assignment statement STMT, try to determine whether we are actually
1237 handling any of the following cases and construct an appropriate jump
1238 function into JFUNC if so:
1240 1) The passed value is loaded from a formal parameter which is not a gimple
1241 register (most probably because it is addressable, the value has to be
1242 scalar) and we can guarantee the value has not changed. This case can
1243 therefore be described by a simple pass-through jump function. For example:
1245 foo (int a)
1247 int a.0;
1249 a.0_2 = a;
1250 bar (a.0_2);
1252 2) The passed value can be described by a simple arithmetic pass-through
1253 jump function. E.g.
1255 foo (int a)
1257 int D.2064;
1259 D.2064_4 = a.1(D) + 4;
1260 bar (D.2064_4);
1262 This case can also occur in combination of the previous one, e.g.:
1264 foo (int a, int z)
1266 int a.0;
1267 int D.2064;
1269 a.0_3 = a;
1270 D.2064_4 = a.0_3 + 4;
1271 foo (D.2064_4);
1273 3) The passed value is an address of an object within another one (which
1274 also passed by reference). Such situations are described by an ancestor
1275 jump function and describe situations such as:
1277 B::foo() (struct B * const this)
1279 struct A * D.1845;
1281 D.1845_2 = &this_1(D)->D.1748;
1282 A::bar (D.1845_2);
1284 INFO is the structure describing individual parameters access different
1285 stages of IPA optimizations. PARMS_AINFO contains the information that is
1286 only needed for intraprocedural analysis. */
1288 static void
1289 compute_complex_assign_jump_func (struct func_body_info *fbi,
1290 struct ipa_node_params *info,
1291 struct ipa_jump_func *jfunc,
1292 gimple_call call, gimple stmt, tree name,
1293 tree param_type)
1295 HOST_WIDE_INT offset, size, max_size;
1296 tree op1, tc_ssa, base, ssa;
1297 int index;
1299 op1 = gimple_assign_rhs1 (stmt);
1301 if (TREE_CODE (op1) == SSA_NAME)
1303 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1304 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1305 else
1306 index = load_from_unmodified_param (fbi, info->descriptors,
1307 SSA_NAME_DEF_STMT (op1));
1308 tc_ssa = op1;
1310 else
1312 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1313 tc_ssa = gimple_assign_lhs (stmt);
1316 if (index >= 0)
1318 tree op2 = gimple_assign_rhs2 (stmt);
1320 if (op2)
1322 if (!is_gimple_ip_invariant (op2)
1323 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1324 && !useless_type_conversion_p (TREE_TYPE (name),
1325 TREE_TYPE (op1))))
1326 return;
1328 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1329 gimple_assign_rhs_code (stmt));
1331 else if (gimple_assign_single_p (stmt))
1333 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1334 bool type_p = false;
1336 if (param_type && POINTER_TYPE_P (param_type))
1337 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1338 call, jfunc);
1339 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1340 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
1342 return;
1345 if (TREE_CODE (op1) != ADDR_EXPR)
1346 return;
1347 op1 = TREE_OPERAND (op1, 0);
1348 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1349 return;
1350 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1351 if (TREE_CODE (base) != MEM_REF
1352 /* If this is a varying address, punt. */
1353 || max_size == -1
1354 || max_size != size)
1355 return;
1356 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1357 ssa = TREE_OPERAND (base, 0);
1358 if (TREE_CODE (ssa) != SSA_NAME
1359 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1360 || offset < 0)
1361 return;
1363 /* Dynamic types are changed in constructors and destructors. */
1364 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1365 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1367 bool type_p = (contains_polymorphic_type_p (TREE_TYPE (param_type))
1368 && !detect_type_change (op1, base, TREE_TYPE (param_type),
1369 call, jfunc, offset));
1370 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1371 ipa_set_ancestor_jf (jfunc, offset,
1372 type_p ? TREE_TYPE (param_type) : NULL, index,
1373 parm_ref_data_pass_through_p (fbi, index,
1374 call, ssa), type_p);
1378 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1379 it looks like:
1381 iftmp.1_3 = &obj_2(D)->D.1762;
1383 The base of the MEM_REF must be a default definition SSA NAME of a
1384 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1385 whole MEM_REF expression is returned and the offset calculated from any
1386 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1387 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1389 static tree
1390 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1392 HOST_WIDE_INT size, max_size;
1393 tree expr, parm, obj;
1395 if (!gimple_assign_single_p (assign))
1396 return NULL_TREE;
1397 expr = gimple_assign_rhs1 (assign);
1399 if (TREE_CODE (expr) != ADDR_EXPR)
1400 return NULL_TREE;
1401 expr = TREE_OPERAND (expr, 0);
1402 obj = expr;
1403 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1405 if (TREE_CODE (expr) != MEM_REF
1406 /* If this is a varying address, punt. */
1407 || max_size == -1
1408 || max_size != size
1409 || *offset < 0)
1410 return NULL_TREE;
1411 parm = TREE_OPERAND (expr, 0);
1412 if (TREE_CODE (parm) != SSA_NAME
1413 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1414 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1415 return NULL_TREE;
1417 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1418 *obj_p = obj;
1419 return expr;
1423 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1424 statement PHI, try to find out whether NAME is in fact a
1425 multiple-inheritance typecast from a descendant into an ancestor of a formal
1426 parameter and thus can be described by an ancestor jump function and if so,
1427 write the appropriate function into JFUNC.
1429 Essentially we want to match the following pattern:
1431 if (obj_2(D) != 0B)
1432 goto <bb 3>;
1433 else
1434 goto <bb 4>;
1436 <bb 3>:
1437 iftmp.1_3 = &obj_2(D)->D.1762;
1439 <bb 4>:
1440 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1441 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1442 return D.1879_6; */
1444 static void
1445 compute_complex_ancestor_jump_func (struct func_body_info *fbi,
1446 struct ipa_node_params *info,
1447 struct ipa_jump_func *jfunc,
1448 gimple_call call, gimple_phi phi,
1449 tree param_type)
1451 HOST_WIDE_INT offset;
1452 gimple assign, cond;
1453 basic_block phi_bb, assign_bb, cond_bb;
1454 tree tmp, parm, expr, obj;
1455 int index, i;
1457 if (gimple_phi_num_args (phi) != 2)
1458 return;
1460 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1461 tmp = PHI_ARG_DEF (phi, 0);
1462 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1463 tmp = PHI_ARG_DEF (phi, 1);
1464 else
1465 return;
1466 if (TREE_CODE (tmp) != SSA_NAME
1467 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1468 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1469 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1470 return;
1472 assign = SSA_NAME_DEF_STMT (tmp);
1473 assign_bb = gimple_bb (assign);
1474 if (!single_pred_p (assign_bb))
1475 return;
1476 expr = get_ancestor_addr_info (assign, &obj, &offset);
1477 if (!expr)
1478 return;
1479 parm = TREE_OPERAND (expr, 0);
1480 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1481 if (index < 0)
1482 return;
1484 cond_bb = single_pred (assign_bb);
1485 cond = last_stmt (cond_bb);
1486 if (!cond
1487 || gimple_code (cond) != GIMPLE_COND
1488 || gimple_cond_code (cond) != NE_EXPR
1489 || gimple_cond_lhs (cond) != parm
1490 || !integer_zerop (gimple_cond_rhs (cond)))
1491 return;
1493 phi_bb = gimple_bb (phi);
1494 for (i = 0; i < 2; i++)
1496 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1497 if (pred != assign_bb && pred != cond_bb)
1498 return;
1501 bool type_p = false;
1502 if (param_type && POINTER_TYPE_P (param_type)
1503 && contains_polymorphic_type_p (TREE_TYPE (param_type)))
1504 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1505 call, jfunc, offset);
1506 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1507 ipa_set_ancestor_jf (jfunc, offset, type_p ? TREE_TYPE (param_type) : NULL,
1508 index,
1509 parm_ref_data_pass_through_p (fbi, index, call, parm),
1510 type_p);
1513 /* Given OP which is passed as an actual argument to a called function,
1514 determine if it is possible to construct a KNOWN_TYPE jump function for it
1515 and if so, create one and store it to JFUNC.
1516 EXPECTED_TYPE represents a type the argument should be in */
1518 static void
1519 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1520 gimple_call call, tree expected_type)
1522 HOST_WIDE_INT offset, size, max_size;
1523 tree base;
1525 if (!flag_devirtualize
1526 || TREE_CODE (op) != ADDR_EXPR
1527 || !contains_polymorphic_type_p (TREE_TYPE (TREE_TYPE (op)))
1528 /* Be sure expected_type is polymorphic. */
1529 || !expected_type
1530 || !contains_polymorphic_type_p (expected_type))
1531 return;
1533 op = TREE_OPERAND (op, 0);
1534 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1535 if (!DECL_P (base)
1536 || max_size == -1
1537 || max_size != size
1538 || !contains_polymorphic_type_p (TREE_TYPE (base)))
1539 return;
1541 if (decl_maybe_in_construction_p (base, TREE_TYPE (base),
1542 call, current_function_decl)
1543 /* Even if the var seems to be in construction by inline call stack,
1544 we may work out the actual type by walking memory writes. */
1545 && (is_global_var (base)
1546 || detect_type_change (op, base, expected_type, call, jfunc, offset)))
1547 return;
1549 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1550 expected_type);
1553 /* Inspect the given TYPE and return true iff it has the same structure (the
1554 same number of fields of the same types) as a C++ member pointer. If
1555 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1556 corresponding fields there. */
1558 static bool
1559 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1561 tree fld;
1563 if (TREE_CODE (type) != RECORD_TYPE)
1564 return false;
1566 fld = TYPE_FIELDS (type);
1567 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1568 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1569 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1570 return false;
1572 if (method_ptr)
1573 *method_ptr = fld;
1575 fld = DECL_CHAIN (fld);
1576 if (!fld || INTEGRAL_TYPE_P (fld)
1577 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1578 return false;
1579 if (delta)
1580 *delta = fld;
1582 if (DECL_CHAIN (fld))
1583 return false;
1585 return true;
1588 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1589 return the rhs of its defining statement. Otherwise return RHS as it
1590 is. */
1592 static inline tree
1593 get_ssa_def_if_simple_copy (tree rhs)
1595 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1597 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1599 if (gimple_assign_single_p (def_stmt))
1600 rhs = gimple_assign_rhs1 (def_stmt);
1601 else
1602 break;
1604 return rhs;
1607 /* Simple linked list, describing known contents of an aggregate beforere
1608 call. */
1610 struct ipa_known_agg_contents_list
1612 /* Offset and size of the described part of the aggregate. */
1613 HOST_WIDE_INT offset, size;
1614 /* Known constant value or NULL if the contents is known to be unknown. */
1615 tree constant;
1616 /* Pointer to the next structure in the list. */
1617 struct ipa_known_agg_contents_list *next;
1620 /* Find the proper place in linked list of ipa_known_agg_contents_list
1621 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1622 unless there is a partial overlap, in which case return NULL, or such
1623 element is already there, in which case set *ALREADY_THERE to true. */
1625 static struct ipa_known_agg_contents_list **
1626 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1627 HOST_WIDE_INT lhs_offset,
1628 HOST_WIDE_INT lhs_size,
1629 bool *already_there)
1631 struct ipa_known_agg_contents_list **p = list;
1632 while (*p && (*p)->offset < lhs_offset)
1634 if ((*p)->offset + (*p)->size > lhs_offset)
1635 return NULL;
1636 p = &(*p)->next;
1639 if (*p && (*p)->offset < lhs_offset + lhs_size)
1641 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1642 /* We already know this value is subsequently overwritten with
1643 something else. */
1644 *already_there = true;
1645 else
1646 /* Otherwise this is a partial overlap which we cannot
1647 represent. */
1648 return NULL;
1650 return p;
1653 /* Build aggregate jump function from LIST, assuming there are exactly
1654 CONST_COUNT constant entries there and that th offset of the passed argument
1655 is ARG_OFFSET and store it into JFUNC. */
1657 static void
1658 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1659 int const_count, HOST_WIDE_INT arg_offset,
1660 struct ipa_jump_func *jfunc)
1662 vec_alloc (jfunc->agg.items, const_count);
1663 while (list)
1665 if (list->constant)
1667 struct ipa_agg_jf_item item;
1668 item.offset = list->offset - arg_offset;
1669 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1670 item.value = unshare_expr_without_location (list->constant);
1671 jfunc->agg.items->quick_push (item);
1673 list = list->next;
1677 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1678 in ARG is filled in with constant values. ARG can either be an aggregate
1679 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1680 aggregate. JFUNC is the jump function into which the constants are
1681 subsequently stored. */
1683 static void
1684 determine_locally_known_aggregate_parts (gimple_call call, tree arg,
1685 tree arg_type,
1686 struct ipa_jump_func *jfunc)
1688 struct ipa_known_agg_contents_list *list = NULL;
1689 int item_count = 0, const_count = 0;
1690 HOST_WIDE_INT arg_offset, arg_size;
1691 gimple_stmt_iterator gsi;
1692 tree arg_base;
1693 bool check_ref, by_ref;
1694 ao_ref r;
1696 /* The function operates in three stages. First, we prepare check_ref, r,
1697 arg_base and arg_offset based on what is actually passed as an actual
1698 argument. */
1700 if (POINTER_TYPE_P (arg_type))
1702 by_ref = true;
1703 if (TREE_CODE (arg) == SSA_NAME)
1705 tree type_size;
1706 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1707 return;
1708 check_ref = true;
1709 arg_base = arg;
1710 arg_offset = 0;
1711 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1712 arg_size = tree_to_uhwi (type_size);
1713 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1715 else if (TREE_CODE (arg) == ADDR_EXPR)
1717 HOST_WIDE_INT arg_max_size;
1719 arg = TREE_OPERAND (arg, 0);
1720 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1721 &arg_max_size);
1722 if (arg_max_size == -1
1723 || arg_max_size != arg_size
1724 || arg_offset < 0)
1725 return;
1726 if (DECL_P (arg_base))
1728 check_ref = false;
1729 ao_ref_init (&r, arg_base);
1731 else
1732 return;
1734 else
1735 return;
1737 else
1739 HOST_WIDE_INT arg_max_size;
1741 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1743 by_ref = false;
1744 check_ref = false;
1745 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1746 &arg_max_size);
1747 if (arg_max_size == -1
1748 || arg_max_size != arg_size
1749 || arg_offset < 0)
1750 return;
1752 ao_ref_init (&r, arg);
1755 /* Second stage walks back the BB, looks at individual statements and as long
1756 as it is confident of how the statements affect contents of the
1757 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1758 describing it. */
1759 gsi = gsi_for_stmt (call);
1760 gsi_prev (&gsi);
1761 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1763 struct ipa_known_agg_contents_list *n, **p;
1764 gimple stmt = gsi_stmt (gsi);
1765 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1766 tree lhs, rhs, lhs_base;
1768 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1769 continue;
1770 if (!gimple_assign_single_p (stmt))
1771 break;
1773 lhs = gimple_assign_lhs (stmt);
1774 rhs = gimple_assign_rhs1 (stmt);
1775 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1776 || TREE_CODE (lhs) == BIT_FIELD_REF
1777 || contains_bitfld_component_ref_p (lhs))
1778 break;
1780 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1781 &lhs_max_size);
1782 if (lhs_max_size == -1
1783 || lhs_max_size != lhs_size)
1784 break;
1786 if (check_ref)
1788 if (TREE_CODE (lhs_base) != MEM_REF
1789 || TREE_OPERAND (lhs_base, 0) != arg_base
1790 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1791 break;
1793 else if (lhs_base != arg_base)
1795 if (DECL_P (lhs_base))
1796 continue;
1797 else
1798 break;
1801 bool already_there = false;
1802 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1803 &already_there);
1804 if (!p)
1805 break;
1806 if (already_there)
1807 continue;
1809 rhs = get_ssa_def_if_simple_copy (rhs);
1810 n = XALLOCA (struct ipa_known_agg_contents_list);
1811 n->size = lhs_size;
1812 n->offset = lhs_offset;
1813 if (is_gimple_ip_invariant (rhs))
1815 n->constant = rhs;
1816 const_count++;
1818 else
1819 n->constant = NULL_TREE;
1820 n->next = *p;
1821 *p = n;
1823 item_count++;
1824 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1825 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1826 break;
1829 /* Third stage just goes over the list and creates an appropriate vector of
1830 ipa_agg_jf_item structures out of it, of sourse only if there are
1831 any known constants to begin with. */
1833 if (const_count)
1835 jfunc->agg.by_ref = by_ref;
1836 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1840 static tree
1841 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1843 int n;
1844 tree type = (e->callee
1845 ? TREE_TYPE (e->callee->decl)
1846 : gimple_call_fntype (e->call_stmt));
1847 tree t = TYPE_ARG_TYPES (type);
1849 for (n = 0; n < i; n++)
1851 if (!t)
1852 break;
1853 t = TREE_CHAIN (t);
1855 if (t)
1856 return TREE_VALUE (t);
1857 if (!e->callee)
1858 return NULL;
1859 t = DECL_ARGUMENTS (e->callee->decl);
1860 for (n = 0; n < i; n++)
1862 if (!t)
1863 return NULL;
1864 t = TREE_CHAIN (t);
1866 if (t)
1867 return TREE_TYPE (t);
1868 return NULL;
1871 /* Compute jump function for all arguments of callsite CS and insert the
1872 information in the jump_functions array in the ipa_edge_args corresponding
1873 to this callsite. */
1875 static void
1876 ipa_compute_jump_functions_for_edge (struct func_body_info *fbi,
1877 struct cgraph_edge *cs)
1879 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1880 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1881 gimple_call call = cs->call_stmt;
1882 int n, arg_num = gimple_call_num_args (call);
1883 bool useful_context = false;
1885 if (arg_num == 0 || args->jump_functions)
1886 return;
1887 vec_safe_grow_cleared (args->jump_functions, arg_num);
1888 if (flag_devirtualize)
1889 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1891 if (gimple_call_internal_p (call))
1892 return;
1893 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1894 return;
1896 for (n = 0; n < arg_num; n++)
1898 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1899 tree arg = gimple_call_arg (call, n);
1900 tree param_type = ipa_get_callee_param_type (cs, n);
1901 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1903 tree instance;
1904 struct ipa_polymorphic_call_context context (cs->caller->decl,
1905 arg, cs->call_stmt,
1906 &instance);
1907 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1908 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1909 if (!context.useless_p ())
1910 useful_context = true;
1913 if (is_gimple_ip_invariant (arg))
1914 ipa_set_jf_constant (jfunc, arg, cs);
1915 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1916 && TREE_CODE (arg) == PARM_DECL)
1918 int index = ipa_get_param_decl_index (info, arg);
1920 gcc_assert (index >=0);
1921 /* Aggregate passed by value, check for pass-through, otherwise we
1922 will attempt to fill in aggregate contents later in this
1923 for cycle. */
1924 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1926 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
1927 continue;
1930 else if (TREE_CODE (arg) == SSA_NAME)
1932 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1934 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1935 if (index >= 0)
1937 bool agg_p, type_p;
1938 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1939 if (param_type && POINTER_TYPE_P (param_type))
1940 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1941 call, jfunc);
1942 else
1943 type_p = false;
1944 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1945 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1946 type_p);
1949 else
1951 gimple stmt = SSA_NAME_DEF_STMT (arg);
1952 if (is_gimple_assign (stmt))
1953 compute_complex_assign_jump_func (fbi, info, jfunc,
1954 call, stmt, arg, param_type);
1955 else if (gimple_code (stmt) == GIMPLE_PHI)
1956 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1957 call,
1958 as_a <gimple_phi> (stmt),
1959 param_type);
1962 else
1963 compute_known_type_jump_func (arg, jfunc, call,
1964 param_type
1965 && POINTER_TYPE_P (param_type)
1966 ? TREE_TYPE (param_type)
1967 : NULL);
1969 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1970 passed (because type conversions are ignored in gimple). Usually we can
1971 safely get type from function declaration, but in case of K&R prototypes or
1972 variadic functions we can try our luck with type of the pointer passed.
1973 TODO: Since we look for actual initialization of the memory object, we may better
1974 work out the type based on the memory stores we find. */
1975 if (!param_type)
1976 param_type = TREE_TYPE (arg);
1978 if ((jfunc->type != IPA_JF_PASS_THROUGH
1979 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1980 && (jfunc->type != IPA_JF_ANCESTOR
1981 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1982 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1983 || POINTER_TYPE_P (param_type)))
1984 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1986 if (!useful_context)
1987 vec_free (args->polymorphic_call_contexts);
1990 /* Compute jump functions for all edges - both direct and indirect - outgoing
1991 from BB. */
1993 static void
1994 ipa_compute_jump_functions_for_bb (struct func_body_info *fbi, basic_block bb)
1996 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1997 int i;
1998 struct cgraph_edge *cs;
2000 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
2002 struct cgraph_node *callee = cs->callee;
2004 if (callee)
2006 callee->ultimate_alias_target ();
2007 /* We do not need to bother analyzing calls to unknown functions
2008 unless they may become known during lto/whopr. */
2009 if (!callee->definition && !flag_lto)
2010 continue;
2012 ipa_compute_jump_functions_for_edge (fbi, cs);
2016 /* If STMT looks like a statement loading a value from a member pointer formal
2017 parameter, return that parameter and store the offset of the field to
2018 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2019 might be clobbered). If USE_DELTA, then we look for a use of the delta
2020 field rather than the pfn. */
2022 static tree
2023 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
2024 HOST_WIDE_INT *offset_p)
2026 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2028 if (!gimple_assign_single_p (stmt))
2029 return NULL_TREE;
2031 rhs = gimple_assign_rhs1 (stmt);
2032 if (TREE_CODE (rhs) == COMPONENT_REF)
2034 ref_field = TREE_OPERAND (rhs, 1);
2035 rhs = TREE_OPERAND (rhs, 0);
2037 else
2038 ref_field = NULL_TREE;
2039 if (TREE_CODE (rhs) != MEM_REF)
2040 return NULL_TREE;
2041 rec = TREE_OPERAND (rhs, 0);
2042 if (TREE_CODE (rec) != ADDR_EXPR)
2043 return NULL_TREE;
2044 rec = TREE_OPERAND (rec, 0);
2045 if (TREE_CODE (rec) != PARM_DECL
2046 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2047 return NULL_TREE;
2048 ref_offset = TREE_OPERAND (rhs, 1);
2050 if (use_delta)
2051 fld = delta_field;
2052 else
2053 fld = ptr_field;
2054 if (offset_p)
2055 *offset_p = int_bit_position (fld);
2057 if (ref_field)
2059 if (integer_nonzerop (ref_offset))
2060 return NULL_TREE;
2061 return ref_field == fld ? rec : NULL_TREE;
2063 else
2064 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2065 : NULL_TREE;
2068 /* Returns true iff T is an SSA_NAME defined by a statement. */
2070 static bool
2071 ipa_is_ssa_with_stmt_def (tree t)
2073 if (TREE_CODE (t) == SSA_NAME
2074 && !SSA_NAME_IS_DEFAULT_DEF (t))
2075 return true;
2076 else
2077 return false;
2080 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2081 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2082 indirect call graph edge. */
2084 static struct cgraph_edge *
2085 ipa_note_param_call (struct cgraph_node *node, int param_index,
2086 gimple_call stmt)
2088 struct cgraph_edge *cs;
2090 cs = node->get_edge (stmt);
2091 cs->indirect_info->param_index = param_index;
2092 cs->indirect_info->agg_contents = 0;
2093 cs->indirect_info->member_ptr = 0;
2094 return cs;
2097 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2098 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2099 intermediate information about each formal parameter. Currently it checks
2100 whether the call calls a pointer that is a formal parameter and if so, the
2101 parameter is marked with the called flag and an indirect call graph edge
2102 describing the call is created. This is very simple for ordinary pointers
2103 represented in SSA but not-so-nice when it comes to member pointers. The
2104 ugly part of this function does nothing more than trying to match the
2105 pattern of such a call. An example of such a pattern is the gimple dump
2106 below, the call is on the last line:
2108 <bb 2>:
2109 f$__delta_5 = f.__delta;
2110 f$__pfn_24 = f.__pfn;
2113 <bb 2>:
2114 f$__delta_5 = MEM[(struct *)&f];
2115 f$__pfn_24 = MEM[(struct *)&f + 4B];
2117 and a few lines below:
2119 <bb 5>
2120 D.2496_3 = (int) f$__pfn_24;
2121 D.2497_4 = D.2496_3 & 1;
2122 if (D.2497_4 != 0)
2123 goto <bb 3>;
2124 else
2125 goto <bb 4>;
2127 <bb 6>:
2128 D.2500_7 = (unsigned int) f$__delta_5;
2129 D.2501_8 = &S + D.2500_7;
2130 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2131 D.2503_10 = *D.2502_9;
2132 D.2504_12 = f$__pfn_24 + -1;
2133 D.2505_13 = (unsigned int) D.2504_12;
2134 D.2506_14 = D.2503_10 + D.2505_13;
2135 D.2507_15 = *D.2506_14;
2136 iftmp.11_16 = (String:: *) D.2507_15;
2138 <bb 7>:
2139 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2140 D.2500_19 = (unsigned int) f$__delta_5;
2141 D.2508_20 = &S + D.2500_19;
2142 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2144 Such patterns are results of simple calls to a member pointer:
2146 int doprinting (int (MyString::* f)(int) const)
2148 MyString S ("somestring");
2150 return (S.*f)(4);
2153 Moreover, the function also looks for called pointers loaded from aggregates
2154 passed by value or reference. */
2156 static void
2157 ipa_analyze_indirect_call_uses (struct func_body_info *fbi, gimple_call call,
2158 tree target)
2160 struct ipa_node_params *info = fbi->info;
2161 HOST_WIDE_INT offset;
2162 bool by_ref;
2164 if (SSA_NAME_IS_DEFAULT_DEF (target))
2166 tree var = SSA_NAME_VAR (target);
2167 int index = ipa_get_param_decl_index (info, var);
2168 if (index >= 0)
2169 ipa_note_param_call (fbi->node, index, call);
2170 return;
2173 int index;
2174 gimple def = SSA_NAME_DEF_STMT (target);
2175 if (gimple_assign_single_p (def)
2176 && ipa_load_from_parm_agg_1 (fbi, info->descriptors, def,
2177 gimple_assign_rhs1 (def), &index, &offset,
2178 NULL, &by_ref))
2180 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2181 cs->indirect_info->offset = offset;
2182 cs->indirect_info->agg_contents = 1;
2183 cs->indirect_info->by_ref = by_ref;
2184 return;
2187 /* Now we need to try to match the complex pattern of calling a member
2188 pointer. */
2189 if (gimple_code (def) != GIMPLE_PHI
2190 || gimple_phi_num_args (def) != 2
2191 || !POINTER_TYPE_P (TREE_TYPE (target))
2192 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2193 return;
2195 /* First, we need to check whether one of these is a load from a member
2196 pointer that is a parameter to this function. */
2197 tree n1 = PHI_ARG_DEF (def, 0);
2198 tree n2 = PHI_ARG_DEF (def, 1);
2199 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2200 return;
2201 gimple d1 = SSA_NAME_DEF_STMT (n1);
2202 gimple d2 = SSA_NAME_DEF_STMT (n2);
2204 tree rec;
2205 basic_block bb, virt_bb;
2206 basic_block join = gimple_bb (def);
2207 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2209 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2210 return;
2212 bb = EDGE_PRED (join, 0)->src;
2213 virt_bb = gimple_bb (d2);
2215 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2217 bb = EDGE_PRED (join, 1)->src;
2218 virt_bb = gimple_bb (d1);
2220 else
2221 return;
2223 /* Second, we need to check that the basic blocks are laid out in the way
2224 corresponding to the pattern. */
2226 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2227 || single_pred (virt_bb) != bb
2228 || single_succ (virt_bb) != join)
2229 return;
2231 /* Third, let's see that the branching is done depending on the least
2232 significant bit of the pfn. */
2234 gimple branch = last_stmt (bb);
2235 if (!branch || gimple_code (branch) != GIMPLE_COND)
2236 return;
2238 if ((gimple_cond_code (branch) != NE_EXPR
2239 && gimple_cond_code (branch) != EQ_EXPR)
2240 || !integer_zerop (gimple_cond_rhs (branch)))
2241 return;
2243 tree cond = gimple_cond_lhs (branch);
2244 if (!ipa_is_ssa_with_stmt_def (cond))
2245 return;
2247 def = SSA_NAME_DEF_STMT (cond);
2248 if (!is_gimple_assign (def)
2249 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2250 || !integer_onep (gimple_assign_rhs2 (def)))
2251 return;
2253 cond = gimple_assign_rhs1 (def);
2254 if (!ipa_is_ssa_with_stmt_def (cond))
2255 return;
2257 def = SSA_NAME_DEF_STMT (cond);
2259 if (is_gimple_assign (def)
2260 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2262 cond = gimple_assign_rhs1 (def);
2263 if (!ipa_is_ssa_with_stmt_def (cond))
2264 return;
2265 def = SSA_NAME_DEF_STMT (cond);
2268 tree rec2;
2269 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2270 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2271 == ptrmemfunc_vbit_in_delta),
2272 NULL);
2273 if (rec != rec2)
2274 return;
2276 index = ipa_get_param_decl_index (info, rec);
2277 if (index >= 0
2278 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2280 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2281 cs->indirect_info->offset = offset;
2282 cs->indirect_info->agg_contents = 1;
2283 cs->indirect_info->member_ptr = 1;
2286 return;
2289 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2290 object referenced in the expression is a formal parameter of the caller
2291 FBI->node (described by FBI->info), create a call note for the
2292 statement. */
2294 static void
2295 ipa_analyze_virtual_call_uses (struct func_body_info *fbi,
2296 gimple_call call, tree target)
2298 tree obj = OBJ_TYPE_REF_OBJECT (target);
2299 int index;
2300 HOST_WIDE_INT anc_offset;
2302 if (!flag_devirtualize)
2303 return;
2305 if (TREE_CODE (obj) != SSA_NAME)
2306 return;
2308 struct ipa_node_params *info = fbi->info;
2309 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2311 struct ipa_jump_func jfunc;
2312 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2313 return;
2315 anc_offset = 0;
2316 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2317 gcc_assert (index >= 0);
2318 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2319 call, &jfunc))
2320 return;
2322 else
2324 struct ipa_jump_func jfunc;
2325 gimple stmt = SSA_NAME_DEF_STMT (obj);
2326 tree expr;
2328 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2329 if (!expr)
2330 return;
2331 index = ipa_get_param_decl_index (info,
2332 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2333 gcc_assert (index >= 0);
2334 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2335 call, &jfunc, anc_offset))
2336 return;
2339 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2340 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2341 ii->offset = anc_offset;
2342 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2343 ii->otr_type = obj_type_ref_class (target);
2344 ii->polymorphic = 1;
2347 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2348 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2349 containing intermediate information about each formal parameter. */
2351 static void
2352 ipa_analyze_call_uses (struct func_body_info *fbi, gimple_call call)
2354 tree target = gimple_call_fn (call);
2356 if (!target
2357 || (TREE_CODE (target) != SSA_NAME
2358 && !virtual_method_call_p (target)))
2359 return;
2361 struct cgraph_edge *cs = fbi->node->get_edge (call);
2362 /* If we previously turned the call into a direct call, there is
2363 no need to analyze. */
2364 if (cs && !cs->indirect_unknown_callee)
2365 return;
2367 if (cs->indirect_info->polymorphic)
2369 tree instance;
2370 tree target = gimple_call_fn (call);
2371 ipa_polymorphic_call_context context (current_function_decl,
2372 target, call, &instance);
2374 gcc_checking_assert (cs->indirect_info->otr_type
2375 == obj_type_ref_class (target));
2376 gcc_checking_assert (cs->indirect_info->otr_token
2377 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2379 cs->indirect_info->vptr_changed
2380 = !context.get_dynamic_type (instance,
2381 OBJ_TYPE_REF_OBJECT (target),
2382 obj_type_ref_class (target), call);
2383 cs->indirect_info->context = context;
2386 if (TREE_CODE (target) == SSA_NAME)
2387 ipa_analyze_indirect_call_uses (fbi, call, target);
2388 else if (virtual_method_call_p (target))
2389 ipa_analyze_virtual_call_uses (fbi, call, target);
2393 /* Analyze the call statement STMT with respect to formal parameters (described
2394 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2395 formal parameters are called. */
2397 static void
2398 ipa_analyze_stmt_uses (struct func_body_info *fbi, gimple stmt)
2400 if (is_gimple_call (stmt))
2401 ipa_analyze_call_uses (fbi, as_a <gimple_call> (stmt));
2404 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2405 If OP is a parameter declaration, mark it as used in the info structure
2406 passed in DATA. */
2408 static bool
2409 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2411 struct ipa_node_params *info = (struct ipa_node_params *) data;
2413 op = get_base_address (op);
2414 if (op
2415 && TREE_CODE (op) == PARM_DECL)
2417 int index = ipa_get_param_decl_index (info, op);
2418 gcc_assert (index >= 0);
2419 ipa_set_param_used (info, index, true);
2422 return false;
2425 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2426 the findings in various structures of the associated ipa_node_params
2427 structure, such as parameter flags, notes etc. FBI holds various data about
2428 the function being analyzed. */
2430 static void
2431 ipa_analyze_params_uses_in_bb (struct func_body_info *fbi, basic_block bb)
2433 gimple_stmt_iterator gsi;
2434 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2436 gimple stmt = gsi_stmt (gsi);
2438 if (is_gimple_debug (stmt))
2439 continue;
2441 ipa_analyze_stmt_uses (fbi, stmt);
2442 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2443 visit_ref_for_mod_analysis,
2444 visit_ref_for_mod_analysis,
2445 visit_ref_for_mod_analysis);
2447 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2448 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2449 visit_ref_for_mod_analysis,
2450 visit_ref_for_mod_analysis,
2451 visit_ref_for_mod_analysis);
2454 /* Calculate controlled uses of parameters of NODE. */
2456 static void
2457 ipa_analyze_controlled_uses (struct cgraph_node *node)
2459 struct ipa_node_params *info = IPA_NODE_REF (node);
2461 for (int i = 0; i < ipa_get_param_count (info); i++)
2463 tree parm = ipa_get_param (info, i);
2464 int controlled_uses = 0;
2466 /* For SSA regs see if parameter is used. For non-SSA we compute
2467 the flag during modification analysis. */
2468 if (is_gimple_reg (parm))
2470 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2471 parm);
2472 if (ddef && !has_zero_uses (ddef))
2474 imm_use_iterator imm_iter;
2475 use_operand_p use_p;
2477 ipa_set_param_used (info, i, true);
2478 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2479 if (!is_gimple_call (USE_STMT (use_p)))
2481 if (!is_gimple_debug (USE_STMT (use_p)))
2483 controlled_uses = IPA_UNDESCRIBED_USE;
2484 break;
2487 else
2488 controlled_uses++;
2490 else
2491 controlled_uses = 0;
2493 else
2494 controlled_uses = IPA_UNDESCRIBED_USE;
2495 ipa_set_controlled_uses (info, i, controlled_uses);
2499 /* Free stuff in BI. */
2501 static void
2502 free_ipa_bb_info (struct ipa_bb_info *bi)
2504 bi->cg_edges.release ();
2505 bi->param_aa_statuses.release ();
2508 /* Dominator walker driving the analysis. */
2510 class analysis_dom_walker : public dom_walker
2512 public:
2513 analysis_dom_walker (struct func_body_info *fbi)
2514 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2516 virtual void before_dom_children (basic_block);
2518 private:
2519 struct func_body_info *m_fbi;
2522 void
2523 analysis_dom_walker::before_dom_children (basic_block bb)
2525 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2526 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2529 /* Initialize the array describing properties of of formal parameters
2530 of NODE, analyze their uses and compute jump functions associated
2531 with actual arguments of calls from within NODE. */
2533 void
2534 ipa_analyze_node (struct cgraph_node *node)
2536 struct func_body_info fbi;
2537 struct ipa_node_params *info;
2539 ipa_check_create_node_params ();
2540 ipa_check_create_edge_args ();
2541 info = IPA_NODE_REF (node);
2543 if (info->analysis_done)
2544 return;
2545 info->analysis_done = 1;
2547 if (ipa_func_spec_opts_forbid_analysis_p (node))
2549 for (int i = 0; i < ipa_get_param_count (info); i++)
2551 ipa_set_param_used (info, i, true);
2552 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2554 return;
2557 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2558 push_cfun (func);
2559 calculate_dominance_info (CDI_DOMINATORS);
2560 ipa_initialize_node_params (node);
2561 ipa_analyze_controlled_uses (node);
2563 fbi.node = node;
2564 fbi.info = IPA_NODE_REF (node);
2565 fbi.bb_infos = vNULL;
2566 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2567 fbi.param_count = ipa_get_param_count (info);
2568 fbi.aa_walked = 0;
2570 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2572 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2573 bi->cg_edges.safe_push (cs);
2576 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2578 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2579 bi->cg_edges.safe_push (cs);
2582 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2584 int i;
2585 struct ipa_bb_info *bi;
2586 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
2587 free_ipa_bb_info (bi);
2588 fbi.bb_infos.release ();
2589 free_dominance_info (CDI_DOMINATORS);
2590 pop_cfun ();
2593 /* Update the jump function DST when the call graph edge corresponding to SRC is
2594 is being inlined, knowing that DST is of type ancestor and src of known
2595 type. */
2597 static void
2598 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2599 struct ipa_jump_func *dst)
2601 HOST_WIDE_INT combined_offset;
2602 tree combined_type;
2604 if (!ipa_get_jf_ancestor_type_preserved (dst))
2606 dst->type = IPA_JF_UNKNOWN;
2607 return;
2610 combined_offset = ipa_get_jf_known_type_offset (src)
2611 + ipa_get_jf_ancestor_offset (dst);
2612 combined_type = ipa_get_jf_ancestor_type (dst);
2614 ipa_set_jf_known_type (dst, combined_offset,
2615 ipa_get_jf_known_type_base_type (src),
2616 combined_type);
2619 /* Update the jump functions associated with call graph edge E when the call
2620 graph edge CS is being inlined, assuming that E->caller is already (possibly
2621 indirectly) inlined into CS->callee and that E has not been inlined. */
2623 static void
2624 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2625 struct cgraph_edge *e)
2627 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2628 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2629 int count = ipa_get_cs_argument_count (args);
2630 int i;
2632 for (i = 0; i < count; i++)
2634 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2635 struct ipa_polymorphic_call_context *dst_ctx
2636 = ipa_get_ith_polymorhic_call_context (args, i);
2638 if (dst->type == IPA_JF_ANCESTOR)
2640 struct ipa_jump_func *src;
2641 int dst_fid = dst->value.ancestor.formal_id;
2642 struct ipa_polymorphic_call_context *src_ctx
2643 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2645 /* Variable number of arguments can cause havoc if we try to access
2646 one that does not exist in the inlined edge. So make sure we
2647 don't. */
2648 if (dst_fid >= ipa_get_cs_argument_count (top))
2650 dst->type = IPA_JF_UNKNOWN;
2651 continue;
2654 src = ipa_get_ith_jump_func (top, dst_fid);
2656 if (src_ctx && !src_ctx->useless_p ())
2658 struct ipa_polymorphic_call_context ctx = *src_ctx;
2660 /* TODO: Make type preserved safe WRT contexts. */
2661 if (!dst->value.ancestor.agg_preserved)
2662 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2663 ctx.offset_by (dst->value.ancestor.offset);
2664 if (!ctx.useless_p ())
2666 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2667 count);
2668 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2672 if (src->agg.items
2673 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2675 struct ipa_agg_jf_item *item;
2676 int j;
2678 /* Currently we do not produce clobber aggregate jump functions,
2679 replace with merging when we do. */
2680 gcc_assert (!dst->agg.items);
2682 dst->agg.items = vec_safe_copy (src->agg.items);
2683 dst->agg.by_ref = src->agg.by_ref;
2684 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2685 item->offset -= dst->value.ancestor.offset;
2688 if (src->type == IPA_JF_KNOWN_TYPE)
2689 combine_known_type_and_ancestor_jfs (src, dst);
2690 else if (src->type == IPA_JF_PASS_THROUGH
2691 && src->value.pass_through.operation == NOP_EXPR)
2693 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2694 dst->value.ancestor.agg_preserved &=
2695 src->value.pass_through.agg_preserved;
2696 dst->value.ancestor.type_preserved &=
2697 src->value.pass_through.type_preserved;
2699 else if (src->type == IPA_JF_ANCESTOR)
2701 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2702 dst->value.ancestor.offset += src->value.ancestor.offset;
2703 dst->value.ancestor.agg_preserved &=
2704 src->value.ancestor.agg_preserved;
2705 dst->value.ancestor.type_preserved &=
2706 src->value.ancestor.type_preserved;
2708 else
2709 dst->type = IPA_JF_UNKNOWN;
2711 else if (dst->type == IPA_JF_PASS_THROUGH)
2713 struct ipa_jump_func *src;
2714 /* We must check range due to calls with variable number of arguments
2715 and we cannot combine jump functions with operations. */
2716 if (dst->value.pass_through.operation == NOP_EXPR
2717 && (dst->value.pass_through.formal_id
2718 < ipa_get_cs_argument_count (top)))
2720 int dst_fid = dst->value.pass_through.formal_id;
2721 src = ipa_get_ith_jump_func (top, dst_fid);
2722 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2723 struct ipa_polymorphic_call_context *src_ctx
2724 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2726 if (src_ctx && !src_ctx->useless_p ())
2728 struct ipa_polymorphic_call_context ctx = *src_ctx;
2730 /* TODO: Make type preserved safe WRT contexts. */
2731 if (!dst->value.ancestor.agg_preserved)
2732 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2733 if (!ctx.useless_p ())
2735 if (!dst_ctx)
2737 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2738 count);
2739 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2741 dst_ctx->combine_with (ctx);
2744 switch (src->type)
2746 case IPA_JF_UNKNOWN:
2747 dst->type = IPA_JF_UNKNOWN;
2748 break;
2749 case IPA_JF_KNOWN_TYPE:
2750 if (ipa_get_jf_pass_through_type_preserved (dst))
2751 ipa_set_jf_known_type (dst,
2752 ipa_get_jf_known_type_offset (src),
2753 ipa_get_jf_known_type_base_type (src),
2754 ipa_get_jf_known_type_component_type (src));
2755 else
2756 dst->type = IPA_JF_UNKNOWN;
2757 break;
2758 case IPA_JF_CONST:
2759 ipa_set_jf_cst_copy (dst, src);
2760 break;
2762 case IPA_JF_PASS_THROUGH:
2764 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2765 enum tree_code operation;
2766 operation = ipa_get_jf_pass_through_operation (src);
2768 if (operation == NOP_EXPR)
2770 bool agg_p, type_p;
2771 agg_p = dst_agg_p
2772 && ipa_get_jf_pass_through_agg_preserved (src);
2773 type_p = ipa_get_jf_pass_through_type_preserved (src)
2774 && ipa_get_jf_pass_through_type_preserved (dst);
2775 ipa_set_jf_simple_pass_through (dst, formal_id,
2776 agg_p, type_p);
2778 else
2780 tree operand = ipa_get_jf_pass_through_operand (src);
2781 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2782 operation);
2784 break;
2786 case IPA_JF_ANCESTOR:
2788 bool agg_p, type_p;
2789 agg_p = dst_agg_p
2790 && ipa_get_jf_ancestor_agg_preserved (src);
2791 type_p = ipa_get_jf_ancestor_type_preserved (src)
2792 && ipa_get_jf_pass_through_type_preserved (dst);
2793 ipa_set_ancestor_jf (dst,
2794 ipa_get_jf_ancestor_offset (src),
2795 ipa_get_jf_ancestor_type (src),
2796 ipa_get_jf_ancestor_formal_id (src),
2797 agg_p, type_p);
2798 break;
2800 default:
2801 gcc_unreachable ();
2804 if (src->agg.items
2805 && (dst_agg_p || !src->agg.by_ref))
2807 /* Currently we do not produce clobber aggregate jump
2808 functions, replace with merging when we do. */
2809 gcc_assert (!dst->agg.items);
2811 dst->agg.by_ref = src->agg.by_ref;
2812 dst->agg.items = vec_safe_copy (src->agg.items);
2815 else
2816 dst->type = IPA_JF_UNKNOWN;
2821 /* If TARGET is an addr_expr of a function declaration, make it the
2822 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2823 Otherwise, return NULL. */
2825 struct cgraph_edge *
2826 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2827 bool speculative)
2829 struct cgraph_node *callee;
2830 struct inline_edge_summary *es = inline_edge_summary (ie);
2831 bool unreachable = false;
2833 if (TREE_CODE (target) == ADDR_EXPR)
2834 target = TREE_OPERAND (target, 0);
2835 if (TREE_CODE (target) != FUNCTION_DECL)
2837 target = canonicalize_constructor_val (target, NULL);
2838 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2840 if (ie->indirect_info->member_ptr)
2841 /* Member pointer call that goes through a VMT lookup. */
2842 return NULL;
2844 if (dump_enabled_p ())
2846 location_t loc = gimple_location_safe (ie->call_stmt);
2847 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2848 "discovered direct call to non-function in %s/%i, "
2849 "making it __builtin_unreachable\n",
2850 ie->caller->name (), ie->caller->order);
2853 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2854 callee = cgraph_node::get_create (target);
2855 unreachable = true;
2857 else
2858 callee = cgraph_node::get (target);
2860 else
2861 callee = cgraph_node::get (target);
2863 /* Because may-edges are not explicitely represented and vtable may be external,
2864 we may create the first reference to the object in the unit. */
2865 if (!callee || callee->global.inlined_to)
2868 /* We are better to ensure we can refer to it.
2869 In the case of static functions we are out of luck, since we already
2870 removed its body. In the case of public functions we may or may
2871 not introduce the reference. */
2872 if (!canonicalize_constructor_val (target, NULL)
2873 || !TREE_PUBLIC (target))
2875 if (dump_file)
2876 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2877 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2878 xstrdup (ie->caller->name ()),
2879 ie->caller->order,
2880 xstrdup (ie->callee->name ()),
2881 ie->callee->order);
2882 return NULL;
2884 callee = cgraph_node::get_create (target);
2887 /* If the edge is already speculated. */
2888 if (speculative && ie->speculative)
2890 struct cgraph_edge *e2;
2891 struct ipa_ref *ref;
2892 ie->speculative_call_info (e2, ie, ref);
2893 if (e2->callee->ultimate_alias_target ()
2894 != callee->ultimate_alias_target ())
2896 if (dump_file)
2897 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2898 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2899 xstrdup (ie->caller->name ()),
2900 ie->caller->order,
2901 xstrdup (callee->name ()),
2902 callee->order,
2903 xstrdup (e2->callee->name ()),
2904 e2->callee->order);
2906 else
2908 if (dump_file)
2909 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2910 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2911 xstrdup (ie->caller->name ()),
2912 ie->caller->order,
2913 xstrdup (callee->name ()),
2914 callee->order);
2916 return NULL;
2919 if (!dbg_cnt (devirt))
2920 return NULL;
2922 ipa_check_create_node_params ();
2924 /* We can not make edges to inline clones. It is bug that someone removed
2925 the cgraph node too early. */
2926 gcc_assert (!callee->global.inlined_to);
2928 if (dump_file && !unreachable)
2930 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2931 "(%s/%i -> %s/%i), for stmt ",
2932 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2933 speculative ? "speculative" : "known",
2934 xstrdup (ie->caller->name ()),
2935 ie->caller->order,
2936 xstrdup (callee->name ()),
2937 callee->order);
2938 if (ie->call_stmt)
2939 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2940 else
2941 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2943 if (dump_enabled_p ())
2945 location_t loc = gimple_location_safe (ie->call_stmt);
2947 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2948 "converting indirect call in %s to direct call to %s\n",
2949 ie->caller->name (), callee->name ());
2951 if (!speculative)
2952 ie = ie->make_direct (callee);
2953 else
2955 if (!callee->can_be_discarded_p ())
2957 cgraph_node *alias;
2958 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2959 if (alias)
2960 callee = alias;
2962 ie = ie->make_speculative
2963 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2965 es = inline_edge_summary (ie);
2966 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2967 - eni_size_weights.call_cost);
2968 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2969 - eni_time_weights.call_cost);
2971 return ie;
2974 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2975 return NULL if there is not any. BY_REF specifies whether the value has to
2976 be passed by reference or by value. */
2978 tree
2979 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2980 HOST_WIDE_INT offset, bool by_ref)
2982 struct ipa_agg_jf_item *item;
2983 int i;
2985 if (by_ref != agg->by_ref)
2986 return NULL;
2988 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2989 if (item->offset == offset)
2991 /* Currently we do not have clobber values, return NULL for them once
2992 we do. */
2993 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2994 return item->value;
2996 return NULL;
2999 /* Remove a reference to SYMBOL from the list of references of a node given by
3000 reference description RDESC. Return true if the reference has been
3001 successfully found and removed. */
3003 static bool
3004 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3006 struct ipa_ref *to_del;
3007 struct cgraph_edge *origin;
3009 origin = rdesc->cs;
3010 if (!origin)
3011 return false;
3012 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3013 origin->lto_stmt_uid);
3014 if (!to_del)
3015 return false;
3017 to_del->remove_reference ();
3018 if (dump_file)
3019 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
3020 xstrdup (origin->caller->name ()),
3021 origin->caller->order, xstrdup (symbol->name ()));
3022 return true;
3025 /* If JFUNC has a reference description with refcount different from
3026 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3027 NULL. JFUNC must be a constant jump function. */
3029 static struct ipa_cst_ref_desc *
3030 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3032 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3033 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3034 return rdesc;
3035 else
3036 return NULL;
3039 /* If the value of constant jump function JFUNC is an address of a function
3040 declaration, return the associated call graph node. Otherwise return
3041 NULL. */
3043 static cgraph_node *
3044 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3046 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3047 tree cst = ipa_get_jf_constant (jfunc);
3048 if (TREE_CODE (cst) != ADDR_EXPR
3049 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3050 return NULL;
3052 return cgraph_node::get (TREE_OPERAND (cst, 0));
3056 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3057 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3058 the edge specified in the rdesc. Return false if either the symbol or the
3059 reference could not be found, otherwise return true. */
3061 static bool
3062 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3064 struct ipa_cst_ref_desc *rdesc;
3065 if (jfunc->type == IPA_JF_CONST
3066 && (rdesc = jfunc_rdesc_usable (jfunc))
3067 && --rdesc->refcount == 0)
3069 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3070 if (!symbol)
3071 return false;
3073 return remove_described_reference (symbol, rdesc);
3075 return true;
3078 /* Try to find a destination for indirect edge IE that corresponds to a simple
3079 call or a call of a member function pointer and where the destination is a
3080 pointer formal parameter described by jump function JFUNC. If it can be
3081 determined, return the newly direct edge, otherwise return NULL.
3082 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3084 static struct cgraph_edge *
3085 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3086 struct ipa_jump_func *jfunc,
3087 struct ipa_node_params *new_root_info)
3089 struct cgraph_edge *cs;
3090 tree target;
3091 bool agg_contents = ie->indirect_info->agg_contents;
3093 if (ie->indirect_info->agg_contents)
3094 target = ipa_find_agg_cst_for_param (&jfunc->agg,
3095 ie->indirect_info->offset,
3096 ie->indirect_info->by_ref);
3097 else
3098 target = ipa_value_from_jfunc (new_root_info, jfunc);
3099 if (!target)
3100 return NULL;
3101 cs = ipa_make_edge_direct_to_target (ie, target);
3103 if (cs && !agg_contents)
3105 bool ok;
3106 gcc_checking_assert (cs->callee
3107 && (cs != ie
3108 || jfunc->type != IPA_JF_CONST
3109 || !cgraph_node_for_jfunc (jfunc)
3110 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3111 ok = try_decrement_rdesc_refcount (jfunc);
3112 gcc_checking_assert (ok);
3115 return cs;
3118 /* Return the target to be used in cases of impossible devirtualization. IE
3119 and target (the latter can be NULL) are dumped when dumping is enabled. */
3121 tree
3122 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3124 if (dump_file)
3126 if (target)
3127 fprintf (dump_file,
3128 "Type inconsistent devirtualization: %s/%i->%s\n",
3129 ie->caller->name (), ie->caller->order,
3130 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3131 else
3132 fprintf (dump_file,
3133 "No devirtualization target in %s/%i\n",
3134 ie->caller->name (), ie->caller->order);
3136 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3137 cgraph_node::get_create (new_target);
3138 return new_target;
3141 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3142 call based on a formal parameter which is described by jump function JFUNC
3143 and if it can be determined, make it direct and return the direct edge.
3144 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
3145 are relative to. */
3147 static struct cgraph_edge *
3148 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3149 struct ipa_jump_func *jfunc,
3150 struct ipa_node_params *new_root_info,
3151 struct ipa_polymorphic_call_context *ctx_ptr)
3153 tree binfo, target = NULL;
3154 bool speculative = false;
3155 bool updated = false;
3157 if (!flag_devirtualize)
3158 return NULL;
3160 /* If this is call of a function parameter, restrict its type
3161 based on knowlede of the context. */
3162 if (ctx_ptr && !ie->indirect_info->by_ref)
3164 struct ipa_polymorphic_call_context ctx = *ctx_ptr;
3166 ctx.offset_by (ie->indirect_info->offset);
3168 if (ie->indirect_info->vptr_changed)
3169 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3170 ie->indirect_info->otr_type);
3172 updated = ie->indirect_info->context.combine_with
3173 (ctx, ie->indirect_info->otr_type);
3176 /* Try to do lookup via known virtual table pointer value. */
3177 if (!ie->indirect_info->by_ref
3178 && (!ie->indirect_info->vptr_changed || flag_devirtualize_speculatively))
3180 tree vtable;
3181 unsigned HOST_WIDE_INT offset;
3182 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
3183 ie->indirect_info->offset,
3184 true);
3185 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3187 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3188 vtable, offset);
3189 if (t)
3191 if ((TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
3192 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
3193 || !possible_polymorphic_call_target_p
3194 (ie, cgraph_node::get (t)))
3196 /* Do not speculate builtin_unreachable, it is stpid! */
3197 if (!ie->indirect_info->vptr_changed)
3198 target = ipa_impossible_devirt_target (ie, target);
3200 else
3202 target = t;
3203 speculative = ie->indirect_info->vptr_changed;
3209 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
3211 if (binfo && TREE_CODE (binfo) != TREE_BINFO)
3213 struct ipa_polymorphic_call_context ctx (binfo,
3214 ie->indirect_info->otr_type,
3215 ie->indirect_info->offset);
3216 updated |= ie->indirect_info->context.combine_with
3217 (ctx, ie->indirect_info->otr_type);
3220 if (updated)
3222 ipa_polymorphic_call_context context (ie);
3223 vec <cgraph_node *>targets;
3224 bool final;
3226 targets = possible_polymorphic_call_targets
3227 (ie->indirect_info->otr_type,
3228 ie->indirect_info->otr_token,
3229 context, &final);
3230 if (final && targets.length () <= 1)
3232 if (targets.length () == 1)
3233 target = targets[0]->decl;
3234 else
3235 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3237 else if (!target && flag_devirtualize_speculatively
3238 && !ie->speculative && ie->maybe_hot_p ())
3240 cgraph_node *n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3241 ie->indirect_info->otr_token,
3242 ie->indirect_info->context);
3243 if (n)
3245 target = n->decl;
3246 speculative = true;
3251 if (binfo && TREE_CODE (binfo) == TREE_BINFO)
3253 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
3254 ie->indirect_info->otr_type);
3255 if (binfo)
3257 tree t = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
3258 binfo);
3259 if (t)
3261 target = t;
3262 speculative = false;
3267 if (target)
3269 if (!possible_polymorphic_call_target_p (ie, cgraph_node::get_create (target)))
3271 if (speculative)
3272 return NULL;
3273 target = ipa_impossible_devirt_target (ie, target);
3275 return ipa_make_edge_direct_to_target (ie, target, speculative);
3277 else
3278 return NULL;
3281 /* Update the param called notes associated with NODE when CS is being inlined,
3282 assuming NODE is (potentially indirectly) inlined into CS->callee.
3283 Moreover, if the callee is discovered to be constant, create a new cgraph
3284 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3285 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3287 static bool
3288 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3289 struct cgraph_node *node,
3290 vec<cgraph_edge *> *new_edges)
3292 struct ipa_edge_args *top;
3293 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3294 struct ipa_node_params *new_root_info;
3295 bool res = false;
3297 ipa_check_create_edge_args ();
3298 top = IPA_EDGE_REF (cs);
3299 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3300 ? cs->caller->global.inlined_to
3301 : cs->caller);
3303 for (ie = node->indirect_calls; ie; ie = next_ie)
3305 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3306 struct ipa_jump_func *jfunc;
3307 int param_index;
3309 next_ie = ie->next_callee;
3311 if (ici->param_index == -1)
3312 continue;
3314 /* We must check range due to calls with variable number of arguments: */
3315 if (ici->param_index >= ipa_get_cs_argument_count (top))
3317 ici->param_index = -1;
3318 continue;
3321 param_index = ici->param_index;
3322 jfunc = ipa_get_ith_jump_func (top, param_index);
3324 if (!flag_indirect_inlining)
3325 new_direct_edge = NULL;
3326 else if (ici->polymorphic)
3328 ipa_polymorphic_call_context *ctx;
3329 ctx = ipa_get_ith_polymorhic_call_context (top, param_index);
3330 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
3331 new_root_info,
3332 ctx);
3334 else
3335 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3336 new_root_info);
3337 /* If speculation was removed, then we need to do nothing. */
3338 if (new_direct_edge && new_direct_edge != ie)
3340 new_direct_edge->indirect_inlining_edge = 1;
3341 top = IPA_EDGE_REF (cs);
3342 res = true;
3344 else if (new_direct_edge)
3346 new_direct_edge->indirect_inlining_edge = 1;
3347 if (new_direct_edge->call_stmt)
3348 new_direct_edge->call_stmt_cannot_inline_p
3349 = !gimple_check_call_matching_types (
3350 new_direct_edge->call_stmt,
3351 new_direct_edge->callee->decl, false);
3352 if (new_edges)
3354 new_edges->safe_push (new_direct_edge);
3355 res = true;
3357 top = IPA_EDGE_REF (cs);
3359 else if (jfunc->type == IPA_JF_PASS_THROUGH
3360 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3362 if ((ici->agg_contents
3363 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
3364 || (ici->polymorphic
3365 && !ipa_get_jf_pass_through_type_preserved (jfunc)))
3366 ici->param_index = -1;
3367 else
3368 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3370 else if (jfunc->type == IPA_JF_ANCESTOR)
3372 if ((ici->agg_contents
3373 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
3374 || (ici->polymorphic
3375 && !ipa_get_jf_ancestor_type_preserved (jfunc)))
3376 ici->param_index = -1;
3377 else
3379 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3380 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3383 else
3384 /* Either we can find a destination for this edge now or never. */
3385 ici->param_index = -1;
3388 return res;
3391 /* Recursively traverse subtree of NODE (including node) made of inlined
3392 cgraph_edges when CS has been inlined and invoke
3393 update_indirect_edges_after_inlining on all nodes and
3394 update_jump_functions_after_inlining on all non-inlined edges that lead out
3395 of this subtree. Newly discovered indirect edges will be added to
3396 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3397 created. */
3399 static bool
3400 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3401 struct cgraph_node *node,
3402 vec<cgraph_edge *> *new_edges)
3404 struct cgraph_edge *e;
3405 bool res;
3407 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3409 for (e = node->callees; e; e = e->next_callee)
3410 if (!e->inline_failed)
3411 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3412 else
3413 update_jump_functions_after_inlining (cs, e);
3414 for (e = node->indirect_calls; e; e = e->next_callee)
3415 update_jump_functions_after_inlining (cs, e);
3417 return res;
3420 /* Combine two controlled uses counts as done during inlining. */
3422 static int
3423 combine_controlled_uses_counters (int c, int d)
3425 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3426 return IPA_UNDESCRIBED_USE;
3427 else
3428 return c + d - 1;
3431 /* Propagate number of controlled users from CS->caleee to the new root of the
3432 tree of inlined nodes. */
3434 static void
3435 propagate_controlled_uses (struct cgraph_edge *cs)
3437 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3438 struct cgraph_node *new_root = cs->caller->global.inlined_to
3439 ? cs->caller->global.inlined_to : cs->caller;
3440 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3441 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3442 int count, i;
3444 count = MIN (ipa_get_cs_argument_count (args),
3445 ipa_get_param_count (old_root_info));
3446 for (i = 0; i < count; i++)
3448 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3449 struct ipa_cst_ref_desc *rdesc;
3451 if (jf->type == IPA_JF_PASS_THROUGH)
3453 int src_idx, c, d;
3454 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3455 c = ipa_get_controlled_uses (new_root_info, src_idx);
3456 d = ipa_get_controlled_uses (old_root_info, i);
3458 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3459 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3460 c = combine_controlled_uses_counters (c, d);
3461 ipa_set_controlled_uses (new_root_info, src_idx, c);
3462 if (c == 0 && new_root_info->ipcp_orig_node)
3464 struct cgraph_node *n;
3465 struct ipa_ref *ref;
3466 tree t = new_root_info->known_vals[src_idx];
3468 if (t && TREE_CODE (t) == ADDR_EXPR
3469 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3470 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3471 && (ref = new_root->find_reference (n, NULL, 0)))
3473 if (dump_file)
3474 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3475 "reference from %s/%i to %s/%i.\n",
3476 xstrdup (new_root->name ()),
3477 new_root->order,
3478 xstrdup (n->name ()), n->order);
3479 ref->remove_reference ();
3483 else if (jf->type == IPA_JF_CONST
3484 && (rdesc = jfunc_rdesc_usable (jf)))
3486 int d = ipa_get_controlled_uses (old_root_info, i);
3487 int c = rdesc->refcount;
3488 rdesc->refcount = combine_controlled_uses_counters (c, d);
3489 if (rdesc->refcount == 0)
3491 tree cst = ipa_get_jf_constant (jf);
3492 struct cgraph_node *n;
3493 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3494 && TREE_CODE (TREE_OPERAND (cst, 0))
3495 == FUNCTION_DECL);
3496 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3497 if (n)
3499 struct cgraph_node *clone;
3500 bool ok;
3501 ok = remove_described_reference (n, rdesc);
3502 gcc_checking_assert (ok);
3504 clone = cs->caller;
3505 while (clone->global.inlined_to
3506 && clone != rdesc->cs->caller
3507 && IPA_NODE_REF (clone)->ipcp_orig_node)
3509 struct ipa_ref *ref;
3510 ref = clone->find_reference (n, NULL, 0);
3511 if (ref)
3513 if (dump_file)
3514 fprintf (dump_file, "ipa-prop: Removing "
3515 "cloning-created reference "
3516 "from %s/%i to %s/%i.\n",
3517 xstrdup (clone->name ()),
3518 clone->order,
3519 xstrdup (n->name ()),
3520 n->order);
3521 ref->remove_reference ();
3523 clone = clone->callers->caller;
3530 for (i = ipa_get_param_count (old_root_info);
3531 i < ipa_get_cs_argument_count (args);
3532 i++)
3534 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3536 if (jf->type == IPA_JF_CONST)
3538 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3539 if (rdesc)
3540 rdesc->refcount = IPA_UNDESCRIBED_USE;
3542 else if (jf->type == IPA_JF_PASS_THROUGH)
3543 ipa_set_controlled_uses (new_root_info,
3544 jf->value.pass_through.formal_id,
3545 IPA_UNDESCRIBED_USE);
3549 /* Update jump functions and call note functions on inlining the call site CS.
3550 CS is expected to lead to a node already cloned by
3551 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3552 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3553 created. */
3555 bool
3556 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3557 vec<cgraph_edge *> *new_edges)
3559 bool changed;
3560 /* Do nothing if the preparation phase has not been carried out yet
3561 (i.e. during early inlining). */
3562 if (!ipa_node_params_vector.exists ())
3563 return false;
3564 gcc_assert (ipa_edge_args_vector);
3566 propagate_controlled_uses (cs);
3567 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3569 return changed;
3572 /* Frees all dynamically allocated structures that the argument info points
3573 to. */
3575 void
3576 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3578 vec_free (args->jump_functions);
3579 memset (args, 0, sizeof (*args));
3582 /* Free all ipa_edge structures. */
3584 void
3585 ipa_free_all_edge_args (void)
3587 int i;
3588 struct ipa_edge_args *args;
3590 if (!ipa_edge_args_vector)
3591 return;
3593 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3594 ipa_free_edge_args_substructures (args);
3596 vec_free (ipa_edge_args_vector);
3599 /* Frees all dynamically allocated structures that the param info points
3600 to. */
3602 void
3603 ipa_free_node_params_substructures (struct ipa_node_params *info)
3605 info->descriptors.release ();
3606 free (info->lattices);
3607 /* Lattice values and their sources are deallocated with their alocation
3608 pool. */
3609 info->known_vals.release ();
3610 memset (info, 0, sizeof (*info));
3613 /* Free all ipa_node_params structures. */
3615 void
3616 ipa_free_all_node_params (void)
3618 int i;
3619 struct ipa_node_params *info;
3621 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
3622 ipa_free_node_params_substructures (info);
3624 ipa_node_params_vector.release ();
3627 /* Set the aggregate replacements of NODE to be AGGVALS. */
3629 void
3630 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3631 struct ipa_agg_replacement_value *aggvals)
3633 if (vec_safe_length (ipa_node_agg_replacements)
3634 <= (unsigned) symtab->cgraph_max_uid)
3635 vec_safe_grow_cleared (ipa_node_agg_replacements,
3636 symtab->cgraph_max_uid + 1);
3638 (*ipa_node_agg_replacements)[node->uid] = aggvals;
3641 /* Hook that is called by cgraph.c when an edge is removed. */
3643 static void
3644 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3646 struct ipa_edge_args *args;
3648 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3649 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3650 return;
3652 args = IPA_EDGE_REF (cs);
3653 if (args->jump_functions)
3655 struct ipa_jump_func *jf;
3656 int i;
3657 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3659 struct ipa_cst_ref_desc *rdesc;
3660 try_decrement_rdesc_refcount (jf);
3661 if (jf->type == IPA_JF_CONST
3662 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3663 && rdesc->cs == cs)
3664 rdesc->cs = NULL;
3668 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3671 /* Hook that is called by cgraph.c when a node is removed. */
3673 static void
3674 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3676 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3677 if (ipa_node_params_vector.length () > (unsigned)node->uid)
3678 ipa_free_node_params_substructures (IPA_NODE_REF (node));
3679 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3680 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
3683 /* Hook that is called by cgraph.c when an edge is duplicated. */
3685 static void
3686 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3687 __attribute__((unused)) void *data)
3689 struct ipa_edge_args *old_args, *new_args;
3690 unsigned int i;
3692 ipa_check_create_edge_args ();
3694 old_args = IPA_EDGE_REF (src);
3695 new_args = IPA_EDGE_REF (dst);
3697 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3698 if (old_args->polymorphic_call_contexts)
3699 new_args->polymorphic_call_contexts
3700 = vec_safe_copy (old_args->polymorphic_call_contexts);
3702 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3704 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3705 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3707 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3709 if (src_jf->type == IPA_JF_CONST)
3711 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3713 if (!src_rdesc)
3714 dst_jf->value.constant.rdesc = NULL;
3715 else if (src->caller == dst->caller)
3717 struct ipa_ref *ref;
3718 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3719 gcc_checking_assert (n);
3720 ref = src->caller->find_reference (n, src->call_stmt,
3721 src->lto_stmt_uid);
3722 gcc_checking_assert (ref);
3723 dst->caller->clone_reference (ref, ref->stmt);
3725 gcc_checking_assert (ipa_refdesc_pool);
3726 struct ipa_cst_ref_desc *dst_rdesc
3727 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3728 dst_rdesc->cs = dst;
3729 dst_rdesc->refcount = src_rdesc->refcount;
3730 dst_rdesc->next_duplicate = NULL;
3731 dst_jf->value.constant.rdesc = dst_rdesc;
3733 else if (src_rdesc->cs == src)
3735 struct ipa_cst_ref_desc *dst_rdesc;
3736 gcc_checking_assert (ipa_refdesc_pool);
3737 dst_rdesc
3738 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3739 dst_rdesc->cs = dst;
3740 dst_rdesc->refcount = src_rdesc->refcount;
3741 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3742 src_rdesc->next_duplicate = dst_rdesc;
3743 dst_jf->value.constant.rdesc = dst_rdesc;
3745 else
3747 struct ipa_cst_ref_desc *dst_rdesc;
3748 /* This can happen during inlining, when a JFUNC can refer to a
3749 reference taken in a function up in the tree of inline clones.
3750 We need to find the duplicate that refers to our tree of
3751 inline clones. */
3753 gcc_assert (dst->caller->global.inlined_to);
3754 for (dst_rdesc = src_rdesc->next_duplicate;
3755 dst_rdesc;
3756 dst_rdesc = dst_rdesc->next_duplicate)
3758 struct cgraph_node *top;
3759 top = dst_rdesc->cs->caller->global.inlined_to
3760 ? dst_rdesc->cs->caller->global.inlined_to
3761 : dst_rdesc->cs->caller;
3762 if (dst->caller->global.inlined_to == top)
3763 break;
3765 gcc_assert (dst_rdesc);
3766 dst_jf->value.constant.rdesc = dst_rdesc;
3769 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3770 && src->caller == dst->caller)
3772 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3773 ? dst->caller->global.inlined_to : dst->caller;
3774 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3775 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3777 int c = ipa_get_controlled_uses (root_info, idx);
3778 if (c != IPA_UNDESCRIBED_USE)
3780 c++;
3781 ipa_set_controlled_uses (root_info, idx, c);
3787 /* Hook that is called by cgraph.c when a node is duplicated. */
3789 static void
3790 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3791 ATTRIBUTE_UNUSED void *data)
3793 struct ipa_node_params *old_info, *new_info;
3794 struct ipa_agg_replacement_value *old_av, *new_av;
3796 ipa_check_create_node_params ();
3797 old_info = IPA_NODE_REF (src);
3798 new_info = IPA_NODE_REF (dst);
3800 new_info->descriptors = old_info->descriptors.copy ();
3801 new_info->lattices = NULL;
3802 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3804 new_info->analysis_done = old_info->analysis_done;
3805 new_info->node_enqueued = old_info->node_enqueued;
3807 old_av = ipa_get_agg_replacements_for_node (src);
3808 if (!old_av)
3809 return;
3811 new_av = NULL;
3812 while (old_av)
3814 struct ipa_agg_replacement_value *v;
3816 v = ggc_alloc<ipa_agg_replacement_value> ();
3817 memcpy (v, old_av, sizeof (*v));
3818 v->next = new_av;
3819 new_av = v;
3820 old_av = old_av->next;
3822 ipa_set_node_agg_value_chain (dst, new_av);
3826 /* Analyze newly added function into callgraph. */
3828 static void
3829 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3831 if (node->has_gimple_body_p ())
3832 ipa_analyze_node (node);
3835 /* Register our cgraph hooks if they are not already there. */
3837 void
3838 ipa_register_cgraph_hooks (void)
3840 if (!edge_removal_hook_holder)
3841 edge_removal_hook_holder =
3842 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3843 if (!node_removal_hook_holder)
3844 node_removal_hook_holder =
3845 symtab->add_cgraph_removal_hook (&ipa_node_removal_hook, NULL);
3846 if (!edge_duplication_hook_holder)
3847 edge_duplication_hook_holder =
3848 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3849 if (!node_duplication_hook_holder)
3850 node_duplication_hook_holder =
3851 symtab->add_cgraph_duplication_hook (&ipa_node_duplication_hook, NULL);
3852 function_insertion_hook_holder =
3853 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3856 /* Unregister our cgraph hooks if they are not already there. */
3858 static void
3859 ipa_unregister_cgraph_hooks (void)
3861 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
3862 edge_removal_hook_holder = NULL;
3863 symtab->remove_cgraph_removal_hook (node_removal_hook_holder);
3864 node_removal_hook_holder = NULL;
3865 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
3866 edge_duplication_hook_holder = NULL;
3867 symtab->remove_cgraph_duplication_hook (node_duplication_hook_holder);
3868 node_duplication_hook_holder = NULL;
3869 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3870 function_insertion_hook_holder = NULL;
3873 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3874 longer needed after ipa-cp. */
3876 void
3877 ipa_free_all_structures_after_ipa_cp (void)
3879 if (!optimize)
3881 ipa_free_all_edge_args ();
3882 ipa_free_all_node_params ();
3883 free_alloc_pool (ipcp_sources_pool);
3884 free_alloc_pool (ipcp_values_pool);
3885 free_alloc_pool (ipcp_agg_lattice_pool);
3886 ipa_unregister_cgraph_hooks ();
3887 if (ipa_refdesc_pool)
3888 free_alloc_pool (ipa_refdesc_pool);
3892 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3893 longer needed after indirect inlining. */
3895 void
3896 ipa_free_all_structures_after_iinln (void)
3898 ipa_free_all_edge_args ();
3899 ipa_free_all_node_params ();
3900 ipa_unregister_cgraph_hooks ();
3901 if (ipcp_sources_pool)
3902 free_alloc_pool (ipcp_sources_pool);
3903 if (ipcp_values_pool)
3904 free_alloc_pool (ipcp_values_pool);
3905 if (ipcp_agg_lattice_pool)
3906 free_alloc_pool (ipcp_agg_lattice_pool);
3907 if (ipa_refdesc_pool)
3908 free_alloc_pool (ipa_refdesc_pool);
3911 /* Print ipa_tree_map data structures of all functions in the
3912 callgraph to F. */
3914 void
3915 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3917 int i, count;
3918 struct ipa_node_params *info;
3920 if (!node->definition)
3921 return;
3922 info = IPA_NODE_REF (node);
3923 fprintf (f, " function %s/%i parameter descriptors:\n",
3924 node->name (), node->order);
3925 count = ipa_get_param_count (info);
3926 for (i = 0; i < count; i++)
3928 int c;
3930 fprintf (f, " ");
3931 ipa_dump_param (f, info, i);
3932 if (ipa_is_param_used (info, i))
3933 fprintf (f, " used");
3934 c = ipa_get_controlled_uses (info, i);
3935 if (c == IPA_UNDESCRIBED_USE)
3936 fprintf (f, " undescribed_use");
3937 else
3938 fprintf (f, " controlled_uses=%i", c);
3939 fprintf (f, "\n");
3943 /* Print ipa_tree_map data structures of all functions in the
3944 callgraph to F. */
3946 void
3947 ipa_print_all_params (FILE * f)
3949 struct cgraph_node *node;
3951 fprintf (f, "\nFunction parameters:\n");
3952 FOR_EACH_FUNCTION (node)
3953 ipa_print_node_params (f, node);
3956 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3958 vec<tree>
3959 ipa_get_vector_of_formal_parms (tree fndecl)
3961 vec<tree> args;
3962 int count;
3963 tree parm;
3965 gcc_assert (!flag_wpa);
3966 count = count_formal_params (fndecl);
3967 args.create (count);
3968 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3969 args.quick_push (parm);
3971 return args;
3974 /* Return a heap allocated vector containing types of formal parameters of
3975 function type FNTYPE. */
3977 vec<tree>
3978 ipa_get_vector_of_formal_parm_types (tree fntype)
3980 vec<tree> types;
3981 int count = 0;
3982 tree t;
3984 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3985 count++;
3987 types.create (count);
3988 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3989 types.quick_push (TREE_VALUE (t));
3991 return types;
3994 /* Modify the function declaration FNDECL and its type according to the plan in
3995 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3996 to reflect the actual parameters being modified which are determined by the
3997 base_index field. */
3999 void
4000 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
4002 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
4003 tree orig_type = TREE_TYPE (fndecl);
4004 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
4006 /* The following test is an ugly hack, some functions simply don't have any
4007 arguments in their type. This is probably a bug but well... */
4008 bool care_for_types = (old_arg_types != NULL_TREE);
4009 bool last_parm_void;
4010 vec<tree> otypes;
4011 if (care_for_types)
4013 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
4014 == void_type_node);
4015 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
4016 if (last_parm_void)
4017 gcc_assert (oparms.length () + 1 == otypes.length ());
4018 else
4019 gcc_assert (oparms.length () == otypes.length ());
4021 else
4023 last_parm_void = false;
4024 otypes.create (0);
4027 int len = adjustments.length ();
4028 tree *link = &DECL_ARGUMENTS (fndecl);
4029 tree new_arg_types = NULL;
4030 for (int i = 0; i < len; i++)
4032 struct ipa_parm_adjustment *adj;
4033 gcc_assert (link);
4035 adj = &adjustments[i];
4036 tree parm;
4037 if (adj->op == IPA_PARM_OP_NEW)
4038 parm = NULL;
4039 else
4040 parm = oparms[adj->base_index];
4041 adj->base = parm;
4043 if (adj->op == IPA_PARM_OP_COPY)
4045 if (care_for_types)
4046 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
4047 new_arg_types);
4048 *link = parm;
4049 link = &DECL_CHAIN (parm);
4051 else if (adj->op != IPA_PARM_OP_REMOVE)
4053 tree new_parm;
4054 tree ptype;
4056 if (adj->by_ref)
4057 ptype = build_pointer_type (adj->type);
4058 else
4060 ptype = adj->type;
4061 if (is_gimple_reg_type (ptype))
4063 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
4064 if (TYPE_ALIGN (ptype) < malign)
4065 ptype = build_aligned_type (ptype, malign);
4069 if (care_for_types)
4070 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
4072 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
4073 ptype);
4074 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
4075 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
4076 DECL_ARTIFICIAL (new_parm) = 1;
4077 DECL_ARG_TYPE (new_parm) = ptype;
4078 DECL_CONTEXT (new_parm) = fndecl;
4079 TREE_USED (new_parm) = 1;
4080 DECL_IGNORED_P (new_parm) = 1;
4081 layout_decl (new_parm, 0);
4083 if (adj->op == IPA_PARM_OP_NEW)
4084 adj->base = NULL;
4085 else
4086 adj->base = parm;
4087 adj->new_decl = new_parm;
4089 *link = new_parm;
4090 link = &DECL_CHAIN (new_parm);
4094 *link = NULL_TREE;
4096 tree new_reversed = NULL;
4097 if (care_for_types)
4099 new_reversed = nreverse (new_arg_types);
4100 if (last_parm_void)
4102 if (new_reversed)
4103 TREE_CHAIN (new_arg_types) = void_list_node;
4104 else
4105 new_reversed = void_list_node;
4109 /* Use copy_node to preserve as much as possible from original type
4110 (debug info, attribute lists etc.)
4111 Exception is METHOD_TYPEs must have THIS argument.
4112 When we are asked to remove it, we need to build new FUNCTION_TYPE
4113 instead. */
4114 tree new_type = NULL;
4115 if (TREE_CODE (orig_type) != METHOD_TYPE
4116 || (adjustments[0].op == IPA_PARM_OP_COPY
4117 && adjustments[0].base_index == 0))
4119 new_type = build_distinct_type_copy (orig_type);
4120 TYPE_ARG_TYPES (new_type) = new_reversed;
4122 else
4124 new_type
4125 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
4126 new_reversed));
4127 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
4128 DECL_VINDEX (fndecl) = NULL_TREE;
4131 /* When signature changes, we need to clear builtin info. */
4132 if (DECL_BUILT_IN (fndecl))
4134 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
4135 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
4138 TREE_TYPE (fndecl) = new_type;
4139 DECL_VIRTUAL_P (fndecl) = 0;
4140 DECL_LANG_SPECIFIC (fndecl) = NULL;
4141 otypes.release ();
4142 oparms.release ();
4145 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
4146 If this is a directly recursive call, CS must be NULL. Otherwise it must
4147 contain the corresponding call graph edge. */
4149 void
4150 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
4151 ipa_parm_adjustment_vec adjustments)
4153 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
4154 vec<tree> vargs;
4155 vec<tree, va_gc> **debug_args = NULL;
4156 gimple_call new_stmt;
4157 gimple_stmt_iterator gsi, prev_gsi;
4158 tree callee_decl;
4159 int i, len;
4161 len = adjustments.length ();
4162 vargs.create (len);
4163 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
4164 current_node->remove_stmt_references (stmt);
4166 gsi = gsi_for_stmt (stmt);
4167 prev_gsi = gsi;
4168 gsi_prev (&prev_gsi);
4169 for (i = 0; i < len; i++)
4171 struct ipa_parm_adjustment *adj;
4173 adj = &adjustments[i];
4175 if (adj->op == IPA_PARM_OP_COPY)
4177 tree arg = gimple_call_arg (stmt, adj->base_index);
4179 vargs.quick_push (arg);
4181 else if (adj->op != IPA_PARM_OP_REMOVE)
4183 tree expr, base, off;
4184 location_t loc;
4185 unsigned int deref_align = 0;
4186 bool deref_base = false;
4188 /* We create a new parameter out of the value of the old one, we can
4189 do the following kind of transformations:
4191 - A scalar passed by reference is converted to a scalar passed by
4192 value. (adj->by_ref is false and the type of the original
4193 actual argument is a pointer to a scalar).
4195 - A part of an aggregate is passed instead of the whole aggregate.
4196 The part can be passed either by value or by reference, this is
4197 determined by value of adj->by_ref. Moreover, the code below
4198 handles both situations when the original aggregate is passed by
4199 value (its type is not a pointer) and when it is passed by
4200 reference (it is a pointer to an aggregate).
4202 When the new argument is passed by reference (adj->by_ref is true)
4203 it must be a part of an aggregate and therefore we form it by
4204 simply taking the address of a reference inside the original
4205 aggregate. */
4207 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
4208 base = gimple_call_arg (stmt, adj->base_index);
4209 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
4210 : EXPR_LOCATION (base);
4212 if (TREE_CODE (base) != ADDR_EXPR
4213 && POINTER_TYPE_P (TREE_TYPE (base)))
4214 off = build_int_cst (adj->alias_ptr_type,
4215 adj->offset / BITS_PER_UNIT);
4216 else
4218 HOST_WIDE_INT base_offset;
4219 tree prev_base;
4220 bool addrof;
4222 if (TREE_CODE (base) == ADDR_EXPR)
4224 base = TREE_OPERAND (base, 0);
4225 addrof = true;
4227 else
4228 addrof = false;
4229 prev_base = base;
4230 base = get_addr_base_and_unit_offset (base, &base_offset);
4231 /* Aggregate arguments can have non-invariant addresses. */
4232 if (!base)
4234 base = build_fold_addr_expr (prev_base);
4235 off = build_int_cst (adj->alias_ptr_type,
4236 adj->offset / BITS_PER_UNIT);
4238 else if (TREE_CODE (base) == MEM_REF)
4240 if (!addrof)
4242 deref_base = true;
4243 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4245 off = build_int_cst (adj->alias_ptr_type,
4246 base_offset
4247 + adj->offset / BITS_PER_UNIT);
4248 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
4249 off);
4250 base = TREE_OPERAND (base, 0);
4252 else
4254 off = build_int_cst (adj->alias_ptr_type,
4255 base_offset
4256 + adj->offset / BITS_PER_UNIT);
4257 base = build_fold_addr_expr (base);
4261 if (!adj->by_ref)
4263 tree type = adj->type;
4264 unsigned int align;
4265 unsigned HOST_WIDE_INT misalign;
4267 if (deref_base)
4269 align = deref_align;
4270 misalign = 0;
4272 else
4274 get_pointer_alignment_1 (base, &align, &misalign);
4275 if (TYPE_ALIGN (type) > align)
4276 align = TYPE_ALIGN (type);
4278 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
4279 * BITS_PER_UNIT);
4280 misalign = misalign & (align - 1);
4281 if (misalign != 0)
4282 align = (misalign & -misalign);
4283 if (align < TYPE_ALIGN (type))
4284 type = build_aligned_type (type, align);
4285 base = force_gimple_operand_gsi (&gsi, base,
4286 true, NULL, true, GSI_SAME_STMT);
4287 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4288 /* If expr is not a valid gimple call argument emit
4289 a load into a temporary. */
4290 if (is_gimple_reg_type (TREE_TYPE (expr)))
4292 gimple tem = gimple_build_assign (NULL_TREE, expr);
4293 if (gimple_in_ssa_p (cfun))
4295 gimple_set_vuse (tem, gimple_vuse (stmt));
4296 expr = make_ssa_name (TREE_TYPE (expr), tem);
4298 else
4299 expr = create_tmp_reg (TREE_TYPE (expr), NULL);
4300 gimple_assign_set_lhs (tem, expr);
4301 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4304 else
4306 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4307 expr = build_fold_addr_expr (expr);
4308 expr = force_gimple_operand_gsi (&gsi, expr,
4309 true, NULL, true, GSI_SAME_STMT);
4311 vargs.quick_push (expr);
4313 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4315 unsigned int ix;
4316 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4317 gimple def_temp;
4319 arg = gimple_call_arg (stmt, adj->base_index);
4320 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4322 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4323 continue;
4324 arg = fold_convert_loc (gimple_location (stmt),
4325 TREE_TYPE (origin), arg);
4327 if (debug_args == NULL)
4328 debug_args = decl_debug_args_insert (callee_decl);
4329 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4330 if (ddecl == origin)
4332 ddecl = (**debug_args)[ix + 1];
4333 break;
4335 if (ddecl == NULL)
4337 ddecl = make_node (DEBUG_EXPR_DECL);
4338 DECL_ARTIFICIAL (ddecl) = 1;
4339 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4340 DECL_MODE (ddecl) = DECL_MODE (origin);
4342 vec_safe_push (*debug_args, origin);
4343 vec_safe_push (*debug_args, ddecl);
4345 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4346 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4350 if (dump_file && (dump_flags & TDF_DETAILS))
4352 fprintf (dump_file, "replacing stmt:");
4353 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4356 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4357 vargs.release ();
4358 if (gimple_call_lhs (stmt))
4359 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4361 gimple_set_block (new_stmt, gimple_block (stmt));
4362 if (gimple_has_location (stmt))
4363 gimple_set_location (new_stmt, gimple_location (stmt));
4364 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4365 gimple_call_copy_flags (new_stmt, stmt);
4366 if (gimple_in_ssa_p (cfun))
4368 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4369 if (gimple_vdef (stmt))
4371 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4372 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4376 if (dump_file && (dump_flags & TDF_DETAILS))
4378 fprintf (dump_file, "with stmt:");
4379 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4380 fprintf (dump_file, "\n");
4382 gsi_replace (&gsi, new_stmt, true);
4383 if (cs)
4384 cs->set_call_stmt (new_stmt);
4387 current_node->record_stmt_references (gsi_stmt (gsi));
4388 gsi_prev (&gsi);
4390 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4393 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4394 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4395 specifies whether the function should care about type incompatibility the
4396 current and new expressions. If it is false, the function will leave
4397 incompatibility issues to the caller. Return true iff the expression
4398 was modified. */
4400 bool
4401 ipa_modify_expr (tree *expr, bool convert,
4402 ipa_parm_adjustment_vec adjustments)
4404 struct ipa_parm_adjustment *cand
4405 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4406 if (!cand)
4407 return false;
4409 tree src;
4410 if (cand->by_ref)
4411 src = build_simple_mem_ref (cand->new_decl);
4412 else
4413 src = cand->new_decl;
4415 if (dump_file && (dump_flags & TDF_DETAILS))
4417 fprintf (dump_file, "About to replace expr ");
4418 print_generic_expr (dump_file, *expr, 0);
4419 fprintf (dump_file, " with ");
4420 print_generic_expr (dump_file, src, 0);
4421 fprintf (dump_file, "\n");
4424 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4426 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4427 *expr = vce;
4429 else
4430 *expr = src;
4431 return true;
4434 /* If T is an SSA_NAME, return NULL if it is not a default def or
4435 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4436 the base variable is always returned, regardless if it is a default
4437 def. Return T if it is not an SSA_NAME. */
4439 static tree
4440 get_ssa_base_param (tree t, bool ignore_default_def)
4442 if (TREE_CODE (t) == SSA_NAME)
4444 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4445 return SSA_NAME_VAR (t);
4446 else
4447 return NULL_TREE;
4449 return t;
4452 /* Given an expression, return an adjustment entry specifying the
4453 transformation to be done on EXPR. If no suitable adjustment entry
4454 was found, returns NULL.
4456 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4457 default def, otherwise bail on them.
4459 If CONVERT is non-NULL, this function will set *CONVERT if the
4460 expression provided is a component reference. ADJUSTMENTS is the
4461 adjustments vector. */
4463 ipa_parm_adjustment *
4464 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4465 ipa_parm_adjustment_vec adjustments,
4466 bool ignore_default_def)
4468 if (TREE_CODE (**expr) == BIT_FIELD_REF
4469 || TREE_CODE (**expr) == IMAGPART_EXPR
4470 || TREE_CODE (**expr) == REALPART_EXPR)
4472 *expr = &TREE_OPERAND (**expr, 0);
4473 if (convert)
4474 *convert = true;
4477 HOST_WIDE_INT offset, size, max_size;
4478 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
4479 if (!base || size == -1 || max_size == -1)
4480 return NULL;
4482 if (TREE_CODE (base) == MEM_REF)
4484 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4485 base = TREE_OPERAND (base, 0);
4488 base = get_ssa_base_param (base, ignore_default_def);
4489 if (!base || TREE_CODE (base) != PARM_DECL)
4490 return NULL;
4492 struct ipa_parm_adjustment *cand = NULL;
4493 unsigned int len = adjustments.length ();
4494 for (unsigned i = 0; i < len; i++)
4496 struct ipa_parm_adjustment *adj = &adjustments[i];
4498 if (adj->base == base
4499 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4501 cand = adj;
4502 break;
4506 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4507 return NULL;
4508 return cand;
4511 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4513 static bool
4514 index_in_adjustments_multiple_times_p (int base_index,
4515 ipa_parm_adjustment_vec adjustments)
4517 int i, len = adjustments.length ();
4518 bool one = false;
4520 for (i = 0; i < len; i++)
4522 struct ipa_parm_adjustment *adj;
4523 adj = &adjustments[i];
4525 if (adj->base_index == base_index)
4527 if (one)
4528 return true;
4529 else
4530 one = true;
4533 return false;
4537 /* Return adjustments that should have the same effect on function parameters
4538 and call arguments as if they were first changed according to adjustments in
4539 INNER and then by adjustments in OUTER. */
4541 ipa_parm_adjustment_vec
4542 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4543 ipa_parm_adjustment_vec outer)
4545 int i, outlen = outer.length ();
4546 int inlen = inner.length ();
4547 int removals = 0;
4548 ipa_parm_adjustment_vec adjustments, tmp;
4550 tmp.create (inlen);
4551 for (i = 0; i < inlen; i++)
4553 struct ipa_parm_adjustment *n;
4554 n = &inner[i];
4556 if (n->op == IPA_PARM_OP_REMOVE)
4557 removals++;
4558 else
4560 /* FIXME: Handling of new arguments are not implemented yet. */
4561 gcc_assert (n->op != IPA_PARM_OP_NEW);
4562 tmp.quick_push (*n);
4566 adjustments.create (outlen + removals);
4567 for (i = 0; i < outlen; i++)
4569 struct ipa_parm_adjustment r;
4570 struct ipa_parm_adjustment *out = &outer[i];
4571 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4573 memset (&r, 0, sizeof (r));
4574 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4575 if (out->op == IPA_PARM_OP_REMOVE)
4577 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4579 r.op = IPA_PARM_OP_REMOVE;
4580 adjustments.quick_push (r);
4582 continue;
4584 else
4586 /* FIXME: Handling of new arguments are not implemented yet. */
4587 gcc_assert (out->op != IPA_PARM_OP_NEW);
4590 r.base_index = in->base_index;
4591 r.type = out->type;
4593 /* FIXME: Create nonlocal value too. */
4595 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4596 r.op = IPA_PARM_OP_COPY;
4597 else if (in->op == IPA_PARM_OP_COPY)
4598 r.offset = out->offset;
4599 else if (out->op == IPA_PARM_OP_COPY)
4600 r.offset = in->offset;
4601 else
4602 r.offset = in->offset + out->offset;
4603 adjustments.quick_push (r);
4606 for (i = 0; i < inlen; i++)
4608 struct ipa_parm_adjustment *n = &inner[i];
4610 if (n->op == IPA_PARM_OP_REMOVE)
4611 adjustments.quick_push (*n);
4614 tmp.release ();
4615 return adjustments;
4618 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4619 friendly way, assuming they are meant to be applied to FNDECL. */
4621 void
4622 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4623 tree fndecl)
4625 int i, len = adjustments.length ();
4626 bool first = true;
4627 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4629 fprintf (file, "IPA param adjustments: ");
4630 for (i = 0; i < len; i++)
4632 struct ipa_parm_adjustment *adj;
4633 adj = &adjustments[i];
4635 if (!first)
4636 fprintf (file, " ");
4637 else
4638 first = false;
4640 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4641 print_generic_expr (file, parms[adj->base_index], 0);
4642 if (adj->base)
4644 fprintf (file, ", base: ");
4645 print_generic_expr (file, adj->base, 0);
4647 if (adj->new_decl)
4649 fprintf (file, ", new_decl: ");
4650 print_generic_expr (file, adj->new_decl, 0);
4652 if (adj->new_ssa_base)
4654 fprintf (file, ", new_ssa_base: ");
4655 print_generic_expr (file, adj->new_ssa_base, 0);
4658 if (adj->op == IPA_PARM_OP_COPY)
4659 fprintf (file, ", copy_param");
4660 else if (adj->op == IPA_PARM_OP_REMOVE)
4661 fprintf (file, ", remove_param");
4662 else
4663 fprintf (file, ", offset %li", (long) adj->offset);
4664 if (adj->by_ref)
4665 fprintf (file, ", by_ref");
4666 print_node_brief (file, ", type: ", adj->type, 0);
4667 fprintf (file, "\n");
4669 parms.release ();
4672 /* Dump the AV linked list. */
4674 void
4675 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4677 bool comma = false;
4678 fprintf (f, " Aggregate replacements:");
4679 for (; av; av = av->next)
4681 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4682 av->index, av->offset);
4683 print_generic_expr (f, av->value, 0);
4684 comma = true;
4686 fprintf (f, "\n");
4689 /* Stream out jump function JUMP_FUNC to OB. */
4691 static void
4692 ipa_write_jump_function (struct output_block *ob,
4693 struct ipa_jump_func *jump_func)
4695 struct ipa_agg_jf_item *item;
4696 struct bitpack_d bp;
4697 int i, count;
4699 streamer_write_uhwi (ob, jump_func->type);
4700 switch (jump_func->type)
4702 case IPA_JF_UNKNOWN:
4703 break;
4704 case IPA_JF_KNOWN_TYPE:
4705 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
4706 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
4707 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
4708 break;
4709 case IPA_JF_CONST:
4710 gcc_assert (
4711 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4712 stream_write_tree (ob, jump_func->value.constant.value, true);
4713 break;
4714 case IPA_JF_PASS_THROUGH:
4715 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4716 if (jump_func->value.pass_through.operation == NOP_EXPR)
4718 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4719 bp = bitpack_create (ob->main_stream);
4720 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4721 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
4722 streamer_write_bitpack (&bp);
4724 else
4726 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4727 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4729 break;
4730 case IPA_JF_ANCESTOR:
4731 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4732 stream_write_tree (ob, jump_func->value.ancestor.type, true);
4733 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4734 bp = bitpack_create (ob->main_stream);
4735 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4736 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
4737 streamer_write_bitpack (&bp);
4738 break;
4741 count = vec_safe_length (jump_func->agg.items);
4742 streamer_write_uhwi (ob, count);
4743 if (count)
4745 bp = bitpack_create (ob->main_stream);
4746 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4747 streamer_write_bitpack (&bp);
4750 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4752 streamer_write_uhwi (ob, item->offset);
4753 stream_write_tree (ob, item->value, true);
4757 /* Read in jump function JUMP_FUNC from IB. */
4759 static void
4760 ipa_read_jump_function (struct lto_input_block *ib,
4761 struct ipa_jump_func *jump_func,
4762 struct cgraph_edge *cs,
4763 struct data_in *data_in)
4765 enum jump_func_type jftype;
4766 enum tree_code operation;
4767 int i, count;
4769 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4770 switch (jftype)
4772 case IPA_JF_UNKNOWN:
4773 jump_func->type = IPA_JF_UNKNOWN;
4774 break;
4775 case IPA_JF_KNOWN_TYPE:
4777 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4778 tree base_type = stream_read_tree (ib, data_in);
4779 tree component_type = stream_read_tree (ib, data_in);
4781 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
4782 break;
4784 case IPA_JF_CONST:
4785 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4786 break;
4787 case IPA_JF_PASS_THROUGH:
4788 operation = (enum tree_code) streamer_read_uhwi (ib);
4789 if (operation == NOP_EXPR)
4791 int formal_id = streamer_read_uhwi (ib);
4792 struct bitpack_d bp = streamer_read_bitpack (ib);
4793 bool agg_preserved = bp_unpack_value (&bp, 1);
4794 bool type_preserved = bp_unpack_value (&bp, 1);
4795 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4796 type_preserved);
4798 else
4800 tree operand = stream_read_tree (ib, data_in);
4801 int formal_id = streamer_read_uhwi (ib);
4802 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4803 operation);
4805 break;
4806 case IPA_JF_ANCESTOR:
4808 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4809 tree type = stream_read_tree (ib, data_in);
4810 int formal_id = streamer_read_uhwi (ib);
4811 struct bitpack_d bp = streamer_read_bitpack (ib);
4812 bool agg_preserved = bp_unpack_value (&bp, 1);
4813 bool type_preserved = bp_unpack_value (&bp, 1);
4815 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4816 type_preserved);
4817 break;
4821 count = streamer_read_uhwi (ib);
4822 vec_alloc (jump_func->agg.items, count);
4823 if (count)
4825 struct bitpack_d bp = streamer_read_bitpack (ib);
4826 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4828 for (i = 0; i < count; i++)
4830 struct ipa_agg_jf_item item;
4831 item.offset = streamer_read_uhwi (ib);
4832 item.value = stream_read_tree (ib, data_in);
4833 jump_func->agg.items->quick_push (item);
4837 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4838 relevant to indirect inlining to OB. */
4840 static void
4841 ipa_write_indirect_edge_info (struct output_block *ob,
4842 struct cgraph_edge *cs)
4844 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4845 struct bitpack_d bp;
4847 streamer_write_hwi (ob, ii->param_index);
4848 bp = bitpack_create (ob->main_stream);
4849 bp_pack_value (&bp, ii->polymorphic, 1);
4850 bp_pack_value (&bp, ii->agg_contents, 1);
4851 bp_pack_value (&bp, ii->member_ptr, 1);
4852 bp_pack_value (&bp, ii->by_ref, 1);
4853 bp_pack_value (&bp, ii->vptr_changed, 1);
4854 streamer_write_bitpack (&bp);
4855 if (ii->agg_contents || ii->polymorphic)
4856 streamer_write_hwi (ob, ii->offset);
4857 else
4858 gcc_assert (ii->offset == 0);
4860 if (ii->polymorphic)
4862 streamer_write_hwi (ob, ii->otr_token);
4863 stream_write_tree (ob, ii->otr_type, true);
4864 ii->context.stream_out (ob);
4868 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4869 relevant to indirect inlining from IB. */
4871 static void
4872 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4873 struct data_in *data_in,
4874 struct cgraph_edge *cs)
4876 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4877 struct bitpack_d bp;
4879 ii->param_index = (int) streamer_read_hwi (ib);
4880 bp = streamer_read_bitpack (ib);
4881 ii->polymorphic = bp_unpack_value (&bp, 1);
4882 ii->agg_contents = bp_unpack_value (&bp, 1);
4883 ii->member_ptr = bp_unpack_value (&bp, 1);
4884 ii->by_ref = bp_unpack_value (&bp, 1);
4885 ii->vptr_changed = bp_unpack_value (&bp, 1);
4886 if (ii->agg_contents || ii->polymorphic)
4887 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4888 else
4889 ii->offset = 0;
4890 if (ii->polymorphic)
4892 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4893 ii->otr_type = stream_read_tree (ib, data_in);
4894 ii->context.stream_in (ib, data_in);
4898 /* Stream out NODE info to OB. */
4900 static void
4901 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4903 int node_ref;
4904 lto_symtab_encoder_t encoder;
4905 struct ipa_node_params *info = IPA_NODE_REF (node);
4906 int j;
4907 struct cgraph_edge *e;
4908 struct bitpack_d bp;
4910 encoder = ob->decl_state->symtab_node_encoder;
4911 node_ref = lto_symtab_encoder_encode (encoder, node);
4912 streamer_write_uhwi (ob, node_ref);
4914 streamer_write_uhwi (ob, ipa_get_param_count (info));
4915 for (j = 0; j < ipa_get_param_count (info); j++)
4916 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4917 bp = bitpack_create (ob->main_stream);
4918 gcc_assert (info->analysis_done
4919 || ipa_get_param_count (info) == 0);
4920 gcc_assert (!info->node_enqueued);
4921 gcc_assert (!info->ipcp_orig_node);
4922 for (j = 0; j < ipa_get_param_count (info); j++)
4923 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4924 streamer_write_bitpack (&bp);
4925 for (j = 0; j < ipa_get_param_count (info); j++)
4926 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4927 for (e = node->callees; e; e = e->next_callee)
4929 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4931 streamer_write_uhwi (ob,
4932 ipa_get_cs_argument_count (args) * 2
4933 + (args->polymorphic_call_contexts != NULL));
4934 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4936 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4937 if (args->polymorphic_call_contexts != NULL)
4938 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4941 for (e = node->indirect_calls; e; e = e->next_callee)
4943 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4945 streamer_write_uhwi (ob,
4946 ipa_get_cs_argument_count (args) * 2
4947 + (args->polymorphic_call_contexts != NULL));
4948 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4950 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4951 if (args->polymorphic_call_contexts != NULL)
4952 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4954 ipa_write_indirect_edge_info (ob, e);
4958 /* Stream in NODE info from IB. */
4960 static void
4961 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4962 struct data_in *data_in)
4964 struct ipa_node_params *info = IPA_NODE_REF (node);
4965 int k;
4966 struct cgraph_edge *e;
4967 struct bitpack_d bp;
4969 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4971 for (k = 0; k < ipa_get_param_count (info); k++)
4972 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4974 bp = streamer_read_bitpack (ib);
4975 if (ipa_get_param_count (info) != 0)
4976 info->analysis_done = true;
4977 info->node_enqueued = false;
4978 for (k = 0; k < ipa_get_param_count (info); k++)
4979 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4980 for (k = 0; k < ipa_get_param_count (info); k++)
4981 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4982 for (e = node->callees; e; e = e->next_callee)
4984 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4985 int count = streamer_read_uhwi (ib);
4986 bool contexts_computed = count & 1;
4987 count /= 2;
4989 if (!count)
4990 continue;
4991 vec_safe_grow_cleared (args->jump_functions, count);
4992 if (contexts_computed)
4993 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4995 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4997 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4998 data_in);
4999 if (contexts_computed)
5000 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
5003 for (e = node->indirect_calls; e; e = e->next_callee)
5005 struct ipa_edge_args *args = IPA_EDGE_REF (e);
5006 int count = streamer_read_uhwi (ib);
5007 bool contexts_computed = count & 1;
5008 count /= 2;
5010 if (count)
5012 vec_safe_grow_cleared (args->jump_functions, count);
5013 if (contexts_computed)
5014 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
5015 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5017 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5018 data_in);
5019 if (contexts_computed)
5020 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
5023 ipa_read_indirect_edge_info (ib, data_in, e);
5027 /* Write jump functions for nodes in SET. */
5029 void
5030 ipa_prop_write_jump_functions (void)
5032 struct cgraph_node *node;
5033 struct output_block *ob;
5034 unsigned int count = 0;
5035 lto_symtab_encoder_iterator lsei;
5036 lto_symtab_encoder_t encoder;
5039 if (!ipa_node_params_vector.exists ())
5040 return;
5042 ob = create_output_block (LTO_section_jump_functions);
5043 encoder = ob->decl_state->symtab_node_encoder;
5044 ob->symbol = NULL;
5045 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5046 lsei_next_function_in_partition (&lsei))
5048 node = lsei_cgraph_node (lsei);
5049 if (node->has_gimple_body_p ()
5050 && IPA_NODE_REF (node) != NULL)
5051 count++;
5054 streamer_write_uhwi (ob, count);
5056 /* Process all of the functions. */
5057 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5058 lsei_next_function_in_partition (&lsei))
5060 node = lsei_cgraph_node (lsei);
5061 if (node->has_gimple_body_p ()
5062 && IPA_NODE_REF (node) != NULL)
5063 ipa_write_node_info (ob, node);
5065 streamer_write_char_stream (ob->main_stream, 0);
5066 produce_asm (ob, NULL);
5067 destroy_output_block (ob);
5070 /* Read section in file FILE_DATA of length LEN with data DATA. */
5072 static void
5073 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5074 size_t len)
5076 const struct lto_function_header *header =
5077 (const struct lto_function_header *) data;
5078 const int cfg_offset = sizeof (struct lto_function_header);
5079 const int main_offset = cfg_offset + header->cfg_size;
5080 const int string_offset = main_offset + header->main_size;
5081 struct data_in *data_in;
5082 unsigned int i;
5083 unsigned int count;
5085 lto_input_block ib_main ((const char *) data + main_offset,
5086 header->main_size);
5088 data_in =
5089 lto_data_in_create (file_data, (const char *) data + string_offset,
5090 header->string_size, vNULL);
5091 count = streamer_read_uhwi (&ib_main);
5093 for (i = 0; i < count; i++)
5095 unsigned int index;
5096 struct cgraph_node *node;
5097 lto_symtab_encoder_t encoder;
5099 index = streamer_read_uhwi (&ib_main);
5100 encoder = file_data->symtab_node_encoder;
5101 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5102 index));
5103 gcc_assert (node->definition);
5104 ipa_read_node_info (&ib_main, node, data_in);
5106 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5107 len);
5108 lto_data_in_delete (data_in);
5111 /* Read ipcp jump functions. */
5113 void
5114 ipa_prop_read_jump_functions (void)
5116 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5117 struct lto_file_decl_data *file_data;
5118 unsigned int j = 0;
5120 ipa_check_create_node_params ();
5121 ipa_check_create_edge_args ();
5122 ipa_register_cgraph_hooks ();
5124 while ((file_data = file_data_vec[j++]))
5126 size_t len;
5127 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
5129 if (data)
5130 ipa_prop_read_section (file_data, data, len);
5134 /* After merging units, we can get mismatch in argument counts.
5135 Also decl merging might've rendered parameter lists obsolete.
5136 Also compute called_with_variable_arg info. */
5138 void
5139 ipa_update_after_lto_read (void)
5141 ipa_check_create_node_params ();
5142 ipa_check_create_edge_args ();
5145 void
5146 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
5148 int node_ref;
5149 unsigned int count = 0;
5150 lto_symtab_encoder_t encoder;
5151 struct ipa_agg_replacement_value *aggvals, *av;
5153 aggvals = ipa_get_agg_replacements_for_node (node);
5154 encoder = ob->decl_state->symtab_node_encoder;
5155 node_ref = lto_symtab_encoder_encode (encoder, node);
5156 streamer_write_uhwi (ob, node_ref);
5158 for (av = aggvals; av; av = av->next)
5159 count++;
5160 streamer_write_uhwi (ob, count);
5162 for (av = aggvals; av; av = av->next)
5164 struct bitpack_d bp;
5166 streamer_write_uhwi (ob, av->offset);
5167 streamer_write_uhwi (ob, av->index);
5168 stream_write_tree (ob, av->value, true);
5170 bp = bitpack_create (ob->main_stream);
5171 bp_pack_value (&bp, av->by_ref, 1);
5172 streamer_write_bitpack (&bp);
5176 /* Stream in the aggregate value replacement chain for NODE from IB. */
5178 static void
5179 read_agg_replacement_chain (struct lto_input_block *ib,
5180 struct cgraph_node *node,
5181 struct data_in *data_in)
5183 struct ipa_agg_replacement_value *aggvals = NULL;
5184 unsigned int count, i;
5186 count = streamer_read_uhwi (ib);
5187 for (i = 0; i <count; i++)
5189 struct ipa_agg_replacement_value *av;
5190 struct bitpack_d bp;
5192 av = ggc_alloc<ipa_agg_replacement_value> ();
5193 av->offset = streamer_read_uhwi (ib);
5194 av->index = streamer_read_uhwi (ib);
5195 av->value = stream_read_tree (ib, data_in);
5196 bp = streamer_read_bitpack (ib);
5197 av->by_ref = bp_unpack_value (&bp, 1);
5198 av->next = aggvals;
5199 aggvals = av;
5201 ipa_set_node_agg_value_chain (node, aggvals);
5204 /* Write all aggregate replacement for nodes in set. */
5206 void
5207 ipa_prop_write_all_agg_replacement (void)
5209 struct cgraph_node *node;
5210 struct output_block *ob;
5211 unsigned int count = 0;
5212 lto_symtab_encoder_iterator lsei;
5213 lto_symtab_encoder_t encoder;
5215 if (!ipa_node_agg_replacements)
5216 return;
5218 ob = create_output_block (LTO_section_ipcp_transform);
5219 encoder = ob->decl_state->symtab_node_encoder;
5220 ob->symbol = NULL;
5221 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5222 lsei_next_function_in_partition (&lsei))
5224 node = lsei_cgraph_node (lsei);
5225 if (node->has_gimple_body_p ()
5226 && ipa_get_agg_replacements_for_node (node) != NULL)
5227 count++;
5230 streamer_write_uhwi (ob, count);
5232 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5233 lsei_next_function_in_partition (&lsei))
5235 node = lsei_cgraph_node (lsei);
5236 if (node->has_gimple_body_p ()
5237 && ipa_get_agg_replacements_for_node (node) != NULL)
5238 write_agg_replacement_chain (ob, node);
5240 streamer_write_char_stream (ob->main_stream, 0);
5241 produce_asm (ob, NULL);
5242 destroy_output_block (ob);
5245 /* Read replacements section in file FILE_DATA of length LEN with data
5246 DATA. */
5248 static void
5249 read_replacements_section (struct lto_file_decl_data *file_data,
5250 const char *data,
5251 size_t len)
5253 const struct lto_function_header *header =
5254 (const struct lto_function_header *) data;
5255 const int cfg_offset = sizeof (struct lto_function_header);
5256 const int main_offset = cfg_offset + header->cfg_size;
5257 const int string_offset = main_offset + header->main_size;
5258 struct data_in *data_in;
5259 unsigned int i;
5260 unsigned int count;
5262 lto_input_block ib_main ((const char *) data + main_offset,
5263 header->main_size);
5265 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5266 header->string_size, vNULL);
5267 count = streamer_read_uhwi (&ib_main);
5269 for (i = 0; i < count; i++)
5271 unsigned int index;
5272 struct cgraph_node *node;
5273 lto_symtab_encoder_t encoder;
5275 index = streamer_read_uhwi (&ib_main);
5276 encoder = file_data->symtab_node_encoder;
5277 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5278 index));
5279 gcc_assert (node->definition);
5280 read_agg_replacement_chain (&ib_main, node, data_in);
5282 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5283 len);
5284 lto_data_in_delete (data_in);
5287 /* Read IPA-CP aggregate replacements. */
5289 void
5290 ipa_prop_read_all_agg_replacement (void)
5292 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5293 struct lto_file_decl_data *file_data;
5294 unsigned int j = 0;
5296 while ((file_data = file_data_vec[j++]))
5298 size_t len;
5299 const char *data = lto_get_section_data (file_data,
5300 LTO_section_ipcp_transform,
5301 NULL, &len);
5302 if (data)
5303 read_replacements_section (file_data, data, len);
5307 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5308 NODE. */
5310 static void
5311 adjust_agg_replacement_values (struct cgraph_node *node,
5312 struct ipa_agg_replacement_value *aggval)
5314 struct ipa_agg_replacement_value *v;
5315 int i, c = 0, d = 0, *adj;
5317 if (!node->clone.combined_args_to_skip)
5318 return;
5320 for (v = aggval; v; v = v->next)
5322 gcc_assert (v->index >= 0);
5323 if (c < v->index)
5324 c = v->index;
5326 c++;
5328 adj = XALLOCAVEC (int, c);
5329 for (i = 0; i < c; i++)
5330 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5332 adj[i] = -1;
5333 d++;
5335 else
5336 adj[i] = i - d;
5338 for (v = aggval; v; v = v->next)
5339 v->index = adj[v->index];
5342 /* Dominator walker driving the ipcp modification phase. */
5344 class ipcp_modif_dom_walker : public dom_walker
5346 public:
5347 ipcp_modif_dom_walker (struct func_body_info *fbi,
5348 vec<ipa_param_descriptor> descs,
5349 struct ipa_agg_replacement_value *av,
5350 bool *sc, bool *cc)
5351 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5352 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5354 virtual void before_dom_children (basic_block);
5356 private:
5357 struct func_body_info *m_fbi;
5358 vec<ipa_param_descriptor> m_descriptors;
5359 struct ipa_agg_replacement_value *m_aggval;
5360 bool *m_something_changed, *m_cfg_changed;
5363 void
5364 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5366 gimple_stmt_iterator gsi;
5367 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5369 struct ipa_agg_replacement_value *v;
5370 gimple stmt = gsi_stmt (gsi);
5371 tree rhs, val, t;
5372 HOST_WIDE_INT offset, size;
5373 int index;
5374 bool by_ref, vce;
5376 if (!gimple_assign_load_p (stmt))
5377 continue;
5378 rhs = gimple_assign_rhs1 (stmt);
5379 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5380 continue;
5382 vce = false;
5383 t = rhs;
5384 while (handled_component_p (t))
5386 /* V_C_E can do things like convert an array of integers to one
5387 bigger integer and similar things we do not handle below. */
5388 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5390 vce = true;
5391 break;
5393 t = TREE_OPERAND (t, 0);
5395 if (vce)
5396 continue;
5398 if (!ipa_load_from_parm_agg_1 (m_fbi, m_descriptors, stmt, rhs, &index,
5399 &offset, &size, &by_ref))
5400 continue;
5401 for (v = m_aggval; v; v = v->next)
5402 if (v->index == index
5403 && v->offset == offset)
5404 break;
5405 if (!v
5406 || v->by_ref != by_ref
5407 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5408 continue;
5410 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5411 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5413 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5414 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5415 else if (TYPE_SIZE (TREE_TYPE (rhs))
5416 == TYPE_SIZE (TREE_TYPE (v->value)))
5417 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5418 else
5420 if (dump_file)
5422 fprintf (dump_file, " const ");
5423 print_generic_expr (dump_file, v->value, 0);
5424 fprintf (dump_file, " can't be converted to type of ");
5425 print_generic_expr (dump_file, rhs, 0);
5426 fprintf (dump_file, "\n");
5428 continue;
5431 else
5432 val = v->value;
5434 if (dump_file && (dump_flags & TDF_DETAILS))
5436 fprintf (dump_file, "Modifying stmt:\n ");
5437 print_gimple_stmt (dump_file, stmt, 0, 0);
5439 gimple_assign_set_rhs_from_tree (&gsi, val);
5440 update_stmt (stmt);
5442 if (dump_file && (dump_flags & TDF_DETAILS))
5444 fprintf (dump_file, "into:\n ");
5445 print_gimple_stmt (dump_file, stmt, 0, 0);
5446 fprintf (dump_file, "\n");
5449 *m_something_changed = true;
5450 if (maybe_clean_eh_stmt (stmt)
5451 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5452 *m_cfg_changed = true;
5457 /* IPCP transformation phase doing propagation of aggregate values. */
5459 unsigned int
5460 ipcp_transform_function (struct cgraph_node *node)
5462 vec<ipa_param_descriptor> descriptors = vNULL;
5463 struct func_body_info fbi;
5464 struct ipa_agg_replacement_value *aggval;
5465 int param_count;
5466 bool cfg_changed = false, something_changed = false;
5468 gcc_checking_assert (cfun);
5469 gcc_checking_assert (current_function_decl);
5471 if (dump_file)
5472 fprintf (dump_file, "Modification phase of node %s/%i\n",
5473 node->name (), node->order);
5475 aggval = ipa_get_agg_replacements_for_node (node);
5476 if (!aggval)
5477 return 0;
5478 param_count = count_formal_params (node->decl);
5479 if (param_count == 0)
5480 return 0;
5481 adjust_agg_replacement_values (node, aggval);
5482 if (dump_file)
5483 ipa_dump_agg_replacement_values (dump_file, aggval);
5485 fbi.node = node;
5486 fbi.info = NULL;
5487 fbi.bb_infos = vNULL;
5488 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5489 fbi.param_count = param_count;
5490 fbi.aa_walked = 0;
5492 descriptors.safe_grow_cleared (param_count);
5493 ipa_populate_param_decls (node, descriptors);
5494 calculate_dominance_info (CDI_DOMINATORS);
5495 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5496 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5498 int i;
5499 struct ipa_bb_info *bi;
5500 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5501 free_ipa_bb_info (bi);
5502 fbi.bb_infos.release ();
5503 free_dominance_info (CDI_DOMINATORS);
5504 (*ipa_node_agg_replacements)[node->uid] = NULL;
5505 descriptors.release ();
5507 if (!something_changed)
5508 return 0;
5509 else if (cfg_changed)
5510 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5511 else
5512 return TODO_update_ssa_only_virtuals;