svn merge -r215707:216846 svn+ssh://gcc.gnu.org/svn/gcc/trunk
[official-gcc.git] / gcc / ipa-prop.c
blobdb85c7d1e7f92218a37a6e1f4df43d944538b8f6
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "predict.h"
25 #include "vec.h"
26 #include "hashtab.h"
27 #include "hash-set.h"
28 #include "machmode.h"
29 #include "tm.h"
30 #include "hard-reg-set.h"
31 #include "input.h"
32 #include "function.h"
33 #include "dominance.h"
34 #include "cfg.h"
35 #include "basic-block.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-fold.h"
39 #include "tree-eh.h"
40 #include "gimple-expr.h"
41 #include "is-a.h"
42 #include "gimple.h"
43 #include "expr.h"
44 #include "stor-layout.h"
45 #include "print-tree.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "langhooks.h"
51 #include "target.h"
52 #include "hash-map.h"
53 #include "plugin-api.h"
54 #include "ipa-ref.h"
55 #include "cgraph.h"
56 #include "alloc-pool.h"
57 #include "ipa-prop.h"
58 #include "bitmap.h"
59 #include "gimple-ssa.h"
60 #include "tree-cfg.h"
61 #include "tree-phinodes.h"
62 #include "ssa-iterators.h"
63 #include "tree-into-ssa.h"
64 #include "tree-dfa.h"
65 #include "tree-pass.h"
66 #include "tree-inline.h"
67 #include "ipa-inline.h"
68 #include "flags.h"
69 #include "diagnostic.h"
70 #include "gimple-pretty-print.h"
71 #include "lto-streamer.h"
72 #include "data-streamer.h"
73 #include "tree-streamer.h"
74 #include "params.h"
75 #include "ipa-utils.h"
76 #include "stringpool.h"
77 #include "tree-ssanames.h"
78 #include "dbgcnt.h"
79 #include "domwalk.h"
80 #include "builtins.h"
81 #include "calls.h"
83 /* Intermediate information that we get from alias analysis about a particular
84 parameter in a particular basic_block. When a parameter or the memory it
85 references is marked modified, we use that information in all dominatd
86 blocks without cosulting alias analysis oracle. */
88 struct param_aa_status
90 /* Set when this structure contains meaningful information. If not, the
91 structure describing a dominating BB should be used instead. */
92 bool valid;
94 /* Whether we have seen something which might have modified the data in
95 question. PARM is for the parameter itself, REF is for data it points to
96 but using the alias type of individual accesses and PT is the same thing
97 but for computing aggregate pass-through functions using a very inclusive
98 ao_ref. */
99 bool parm_modified, ref_modified, pt_modified;
102 /* Information related to a given BB that used only when looking at function
103 body. */
105 struct ipa_bb_info
107 /* Call graph edges going out of this BB. */
108 vec<cgraph_edge *> cg_edges;
109 /* Alias analysis statuses of each formal parameter at this bb. */
110 vec<param_aa_status> param_aa_statuses;
113 /* Structure with global information that is only used when looking at function
114 body. */
116 struct func_body_info
118 /* The node that is being analyzed. */
119 cgraph_node *node;
121 /* Its info. */
122 struct ipa_node_params *info;
124 /* Information about individual BBs. */
125 vec<ipa_bb_info> bb_infos;
127 /* Number of parameters. */
128 int param_count;
130 /* Number of statements already walked by when analyzing this function. */
131 unsigned int aa_walked;
134 /* Vector where the parameter infos are actually stored. */
135 vec<ipa_node_params> ipa_node_params_vector;
136 /* Vector of known aggregate values in cloned nodes. */
137 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
138 /* Vector where the parameter infos are actually stored. */
139 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
141 /* Holders of ipa cgraph hooks: */
142 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
143 static struct cgraph_node_hook_list *node_removal_hook_holder;
144 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
145 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
146 static struct cgraph_node_hook_list *function_insertion_hook_holder;
148 /* Description of a reference to an IPA constant. */
149 struct ipa_cst_ref_desc
151 /* Edge that corresponds to the statement which took the reference. */
152 struct cgraph_edge *cs;
153 /* Linked list of duplicates created when call graph edges are cloned. */
154 struct ipa_cst_ref_desc *next_duplicate;
155 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
156 if out of control. */
157 int refcount;
160 /* Allocation pool for reference descriptions. */
162 static alloc_pool ipa_refdesc_pool;
164 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
165 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
167 static bool
168 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
170 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
171 struct cl_optimization *os;
173 if (!fs_opts)
174 return false;
175 os = TREE_OPTIMIZATION (fs_opts);
176 return !os->x_optimize || !os->x_flag_ipa_cp;
179 /* Return index of the formal whose tree is PTREE in function which corresponds
180 to INFO. */
182 static int
183 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
185 int i, count;
187 count = descriptors.length ();
188 for (i = 0; i < count; i++)
189 if (descriptors[i].decl == ptree)
190 return i;
192 return -1;
195 /* Return index of the formal whose tree is PTREE in function which corresponds
196 to INFO. */
199 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
201 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
204 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
205 NODE. */
207 static void
208 ipa_populate_param_decls (struct cgraph_node *node,
209 vec<ipa_param_descriptor> &descriptors)
211 tree fndecl;
212 tree fnargs;
213 tree parm;
214 int param_num;
216 fndecl = node->decl;
217 gcc_assert (gimple_has_body_p (fndecl));
218 fnargs = DECL_ARGUMENTS (fndecl);
219 param_num = 0;
220 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
222 descriptors[param_num].decl = parm;
223 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
224 true);
225 param_num++;
229 /* Return how many formal parameters FNDECL has. */
232 count_formal_params (tree fndecl)
234 tree parm;
235 int count = 0;
236 gcc_assert (gimple_has_body_p (fndecl));
238 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
239 count++;
241 return count;
244 /* Return the declaration of Ith formal parameter of the function corresponding
245 to INFO. Note there is no setter function as this array is built just once
246 using ipa_initialize_node_params. */
248 void
249 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
251 fprintf (file, "param #%i", i);
252 if (info->descriptors[i].decl)
254 fprintf (file, " ");
255 print_generic_expr (file, info->descriptors[i].decl, 0);
259 /* Initialize the ipa_node_params structure associated with NODE
260 to hold PARAM_COUNT parameters. */
262 void
263 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
265 struct ipa_node_params *info = IPA_NODE_REF (node);
267 if (!info->descriptors.exists () && param_count)
268 info->descriptors.safe_grow_cleared (param_count);
271 /* Initialize the ipa_node_params structure associated with NODE by counting
272 the function parameters, creating the descriptors and populating their
273 param_decls. */
275 void
276 ipa_initialize_node_params (struct cgraph_node *node)
278 struct ipa_node_params *info = IPA_NODE_REF (node);
280 if (!info->descriptors.exists ())
282 ipa_alloc_node_params (node, count_formal_params (node->decl));
283 ipa_populate_param_decls (node, info->descriptors);
287 /* Print the jump functions associated with call graph edge CS to file F. */
289 static void
290 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
292 int i, count;
294 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
295 for (i = 0; i < count; i++)
297 struct ipa_jump_func *jump_func;
298 enum jump_func_type type;
300 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
301 type = jump_func->type;
303 fprintf (f, " param %d: ", i);
304 if (type == IPA_JF_UNKNOWN)
305 fprintf (f, "UNKNOWN\n");
306 else if (type == IPA_JF_KNOWN_TYPE)
308 fprintf (f, "KNOWN TYPE: base ");
309 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
310 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
311 jump_func->value.known_type.offset);
312 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
313 fprintf (f, "\n");
315 else if (type == IPA_JF_CONST)
317 tree val = jump_func->value.constant.value;
318 fprintf (f, "CONST: ");
319 print_generic_expr (f, val, 0);
320 if (TREE_CODE (val) == ADDR_EXPR
321 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
323 fprintf (f, " -> ");
324 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
327 fprintf (f, "\n");
329 else if (type == IPA_JF_PASS_THROUGH)
331 fprintf (f, "PASS THROUGH: ");
332 fprintf (f, "%d, op %s",
333 jump_func->value.pass_through.formal_id,
334 get_tree_code_name(jump_func->value.pass_through.operation));
335 if (jump_func->value.pass_through.operation != NOP_EXPR)
337 fprintf (f, " ");
338 print_generic_expr (f,
339 jump_func->value.pass_through.operand, 0);
341 if (jump_func->value.pass_through.agg_preserved)
342 fprintf (f, ", agg_preserved");
343 if (jump_func->value.pass_through.type_preserved)
344 fprintf (f, ", type_preserved");
345 fprintf (f, "\n");
347 else if (type == IPA_JF_ANCESTOR)
349 fprintf (f, "ANCESTOR: ");
350 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
351 jump_func->value.ancestor.formal_id,
352 jump_func->value.ancestor.offset);
353 print_generic_expr (f, jump_func->value.ancestor.type, 0);
354 if (jump_func->value.ancestor.agg_preserved)
355 fprintf (f, ", agg_preserved");
356 if (jump_func->value.ancestor.type_preserved)
357 fprintf (f, ", type_preserved");
358 fprintf (f, "\n");
361 if (jump_func->agg.items)
363 struct ipa_agg_jf_item *item;
364 int j;
366 fprintf (f, " Aggregate passed by %s:\n",
367 jump_func->agg.by_ref ? "reference" : "value");
368 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
370 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
371 item->offset);
372 if (TYPE_P (item->value))
373 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
374 tree_to_uhwi (TYPE_SIZE (item->value)));
375 else
377 fprintf (f, "cst: ");
378 print_generic_expr (f, item->value, 0);
380 fprintf (f, "\n");
383 if (IPA_EDGE_REF (cs)->polymorphic_call_contexts)
384 ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i)->dump (f);
389 /* Print the jump functions of all arguments on all call graph edges going from
390 NODE to file F. */
392 void
393 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
395 struct cgraph_edge *cs;
397 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
398 node->order);
399 for (cs = node->callees; cs; cs = cs->next_callee)
401 if (!ipa_edge_args_info_available_for_edge_p (cs))
402 continue;
404 fprintf (f, " callsite %s/%i -> %s/%i : \n",
405 xstrdup (node->name ()), node->order,
406 xstrdup (cs->callee->name ()),
407 cs->callee->order);
408 ipa_print_node_jump_functions_for_edge (f, cs);
411 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
413 struct cgraph_indirect_call_info *ii;
414 if (!ipa_edge_args_info_available_for_edge_p (cs))
415 continue;
417 ii = cs->indirect_info;
418 if (ii->agg_contents)
419 fprintf (f, " indirect %s callsite, calling param %i, "
420 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
421 ii->member_ptr ? "member ptr" : "aggregate",
422 ii->param_index, ii->offset,
423 ii->by_ref ? "by reference" : "by_value");
424 else
425 fprintf (f, " indirect %s callsite, calling param %i, "
426 "offset " HOST_WIDE_INT_PRINT_DEC,
427 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
428 ii->offset);
430 if (cs->call_stmt)
432 fprintf (f, ", for stmt ");
433 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
435 else
436 fprintf (f, "\n");
437 if (ii->polymorphic)
438 ii->context.dump (f);
439 ipa_print_node_jump_functions_for_edge (f, cs);
443 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
445 void
446 ipa_print_all_jump_functions (FILE *f)
448 struct cgraph_node *node;
450 fprintf (f, "\nJump functions:\n");
451 FOR_EACH_FUNCTION (node)
453 ipa_print_node_jump_functions (f, node);
457 /* Set JFUNC to be a known type jump function. */
459 static void
460 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
461 tree base_type, tree component_type)
463 /* Recording and propagating main variants increases change that types
464 will match. */
465 base_type = TYPE_MAIN_VARIANT (base_type);
466 component_type = TYPE_MAIN_VARIANT (component_type);
468 gcc_assert (contains_polymorphic_type_p (base_type)
469 && contains_polymorphic_type_p (component_type));
470 if (!flag_devirtualize)
471 return;
472 jfunc->type = IPA_JF_KNOWN_TYPE;
473 jfunc->value.known_type.offset = offset,
474 jfunc->value.known_type.base_type = base_type;
475 jfunc->value.known_type.component_type = component_type;
476 gcc_assert (component_type);
479 /* Set JFUNC to be a copy of another jmp (to be used by jump function
480 combination code). The two functions will share their rdesc. */
482 static void
483 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
484 struct ipa_jump_func *src)
487 gcc_checking_assert (src->type == IPA_JF_CONST);
488 dst->type = IPA_JF_CONST;
489 dst->value.constant = src->value.constant;
492 /* Set JFUNC to be a constant jmp function. */
494 static void
495 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
496 struct cgraph_edge *cs)
498 constant = unshare_expr (constant);
499 if (constant && EXPR_P (constant))
500 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
501 jfunc->type = IPA_JF_CONST;
502 jfunc->value.constant.value = unshare_expr_without_location (constant);
504 if (TREE_CODE (constant) == ADDR_EXPR
505 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
507 struct ipa_cst_ref_desc *rdesc;
508 if (!ipa_refdesc_pool)
509 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
510 sizeof (struct ipa_cst_ref_desc), 32);
512 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
513 rdesc->cs = cs;
514 rdesc->next_duplicate = NULL;
515 rdesc->refcount = 1;
516 jfunc->value.constant.rdesc = rdesc;
518 else
519 jfunc->value.constant.rdesc = NULL;
522 /* Set JFUNC to be a simple pass-through jump function. */
523 static void
524 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
525 bool agg_preserved, bool type_preserved)
527 jfunc->type = IPA_JF_PASS_THROUGH;
528 jfunc->value.pass_through.operand = NULL_TREE;
529 jfunc->value.pass_through.formal_id = formal_id;
530 jfunc->value.pass_through.operation = NOP_EXPR;
531 jfunc->value.pass_through.agg_preserved = agg_preserved;
532 jfunc->value.pass_through.type_preserved = type_preserved;
535 /* Set JFUNC to be an arithmetic pass through jump function. */
537 static void
538 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
539 tree operand, enum tree_code operation)
541 jfunc->type = IPA_JF_PASS_THROUGH;
542 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
543 jfunc->value.pass_through.formal_id = formal_id;
544 jfunc->value.pass_through.operation = operation;
545 jfunc->value.pass_through.agg_preserved = false;
546 jfunc->value.pass_through.type_preserved = false;
549 /* Set JFUNC to be an ancestor jump function. */
551 static void
552 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
553 tree type, int formal_id, bool agg_preserved,
554 bool type_preserved)
556 if (!flag_devirtualize)
557 type_preserved = false;
558 if (!type_preserved)
559 type = NULL_TREE;
560 if (type)
561 type = TYPE_MAIN_VARIANT (type);
562 gcc_assert (!type_preserved || contains_polymorphic_type_p (type));
563 jfunc->type = IPA_JF_ANCESTOR;
564 jfunc->value.ancestor.formal_id = formal_id;
565 jfunc->value.ancestor.offset = offset;
566 jfunc->value.ancestor.type = type_preserved ? type : NULL;
567 jfunc->value.ancestor.agg_preserved = agg_preserved;
568 jfunc->value.ancestor.type_preserved = type_preserved;
571 /* Extract the acual BINFO being described by JFUNC which must be a known type
572 jump function. */
574 tree
575 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
577 if (!RECORD_OR_UNION_TYPE_P (jfunc->value.known_type.base_type))
578 return NULL_TREE;
580 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
582 if (!base_binfo)
583 return NULL_TREE;
584 /* FIXME: At LTO we can't propagate to non-polymorphic type, because
585 we have no ODR equivalency on those. This should be fixed by
586 propagating on types rather than binfos that would make type
587 matching here unnecesary. */
588 if (in_lto_p
589 && (TREE_CODE (jfunc->value.known_type.component_type) != RECORD_TYPE
590 || !TYPE_BINFO (jfunc->value.known_type.component_type)
591 || !BINFO_VTABLE (TYPE_BINFO (jfunc->value.known_type.component_type))))
593 if (!jfunc->value.known_type.offset)
594 return base_binfo;
595 return NULL;
597 return get_binfo_at_offset (base_binfo,
598 jfunc->value.known_type.offset,
599 jfunc->value.known_type.component_type);
602 /* Get IPA BB information about the given BB. FBI is the context of analyzis
603 of this function body. */
605 static struct ipa_bb_info *
606 ipa_get_bb_info (struct func_body_info *fbi, basic_block bb)
608 gcc_checking_assert (fbi);
609 return &fbi->bb_infos[bb->index];
612 /* Structure to be passed in between detect_type_change and
613 check_stmt_for_type_change. */
615 struct prop_type_change_info
617 /* Offset into the object where there is the virtual method pointer we are
618 looking for. */
619 HOST_WIDE_INT offset;
620 /* The declaration or SSA_NAME pointer of the base that we are checking for
621 type change. */
622 tree object;
623 /* If we actually can tell the type that the object has changed to, it is
624 stored in this field. Otherwise it remains NULL_TREE. */
625 tree known_current_type;
626 /* Set to true if dynamic type change has been detected. */
627 bool type_maybe_changed;
628 /* Set to true if multiple types have been encountered. known_current_type
629 must be disregarded in that case. */
630 bool multiple_types_encountered;
633 /* Return true if STMT can modify a virtual method table pointer.
635 This function makes special assumptions about both constructors and
636 destructors which are all the functions that are allowed to alter the VMT
637 pointers. It assumes that destructors begin with assignment into all VMT
638 pointers and that constructors essentially look in the following way:
640 1) The very first thing they do is that they call constructors of ancestor
641 sub-objects that have them.
643 2) Then VMT pointers of this and all its ancestors is set to new values
644 corresponding to the type corresponding to the constructor.
646 3) Only afterwards, other stuff such as constructor of member sub-objects
647 and the code written by the user is run. Only this may include calling
648 virtual functions, directly or indirectly.
650 There is no way to call a constructor of an ancestor sub-object in any
651 other way.
653 This means that we do not have to care whether constructors get the correct
654 type information because they will always change it (in fact, if we define
655 the type to be given by the VMT pointer, it is undefined).
657 The most important fact to derive from the above is that if, for some
658 statement in the section 3, we try to detect whether the dynamic type has
659 changed, we can safely ignore all calls as we examine the function body
660 backwards until we reach statements in section 2 because these calls cannot
661 be ancestor constructors or destructors (if the input is not bogus) and so
662 do not change the dynamic type (this holds true only for automatically
663 allocated objects but at the moment we devirtualize only these). We then
664 must detect that statements in section 2 change the dynamic type and can try
665 to derive the new type. That is enough and we can stop, we will never see
666 the calls into constructors of sub-objects in this code. Therefore we can
667 safely ignore all call statements that we traverse.
670 static bool
671 stmt_may_be_vtbl_ptr_store (gimple stmt)
673 if (is_gimple_call (stmt))
674 return false;
675 if (gimple_clobber_p (stmt))
676 return false;
677 else if (is_gimple_assign (stmt))
679 tree lhs = gimple_assign_lhs (stmt);
681 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
683 if (flag_strict_aliasing
684 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
685 return false;
687 if (TREE_CODE (lhs) == COMPONENT_REF
688 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
689 return false;
690 /* In the future we might want to use get_base_ref_and_offset to find
691 if there is a field corresponding to the offset and if so, proceed
692 almost like if it was a component ref. */
695 return true;
698 /* If STMT can be proved to be an assignment to the virtual method table
699 pointer of ANALYZED_OBJ and the type associated with the new table
700 identified, return the type. Otherwise return NULL_TREE. */
702 static tree
703 extr_type_from_vtbl_ptr_store (gimple stmt, struct prop_type_change_info *tci)
705 HOST_WIDE_INT offset, size, max_size;
706 tree lhs, rhs, base, binfo;
708 if (!gimple_assign_single_p (stmt))
709 return NULL_TREE;
711 lhs = gimple_assign_lhs (stmt);
712 rhs = gimple_assign_rhs1 (stmt);
713 if (TREE_CODE (lhs) != COMPONENT_REF
714 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
715 return NULL_TREE;
717 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
718 if (offset != tci->offset
719 || size != POINTER_SIZE
720 || max_size != POINTER_SIZE)
721 return NULL_TREE;
722 if (TREE_CODE (base) == MEM_REF)
724 if (TREE_CODE (tci->object) != MEM_REF
725 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
726 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
727 TREE_OPERAND (base, 1)))
728 return NULL_TREE;
730 else if (tci->object != base)
731 return NULL_TREE;
733 binfo = vtable_pointer_value_to_binfo (rhs);
735 /* FIXME: vtable_pointer_value_to_binfo may return BINFO of a
736 base of outer type. In this case we would need to either
737 work on binfos or translate it back to outer type and offset.
738 KNOWN_TYPE jump functions are not ready for that, yet. */
739 if (!binfo || TYPE_BINFO (BINFO_TYPE (binfo)) != binfo)
740 return NULL;
742 return BINFO_TYPE (binfo);
745 /* Callback of walk_aliased_vdefs and a helper function for
746 detect_type_change to check whether a particular statement may modify
747 the virtual table pointer, and if possible also determine the new type of
748 the (sub-)object. It stores its result into DATA, which points to a
749 prop_type_change_info structure. */
751 static bool
752 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
754 gimple stmt = SSA_NAME_DEF_STMT (vdef);
755 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
757 if (stmt_may_be_vtbl_ptr_store (stmt))
759 tree type;
761 type = extr_type_from_vtbl_ptr_store (stmt, tci);
762 gcc_assert (!type || TYPE_MAIN_VARIANT (type) == type);
763 if (tci->type_maybe_changed
764 && type != tci->known_current_type)
765 tci->multiple_types_encountered = true;
766 tci->known_current_type = type;
767 tci->type_maybe_changed = true;
768 return true;
770 else
771 return false;
774 /* See if ARG is PARAM_DECl describing instance passed by pointer
775 or reference in FUNCTION. Return false if the dynamic type may change
776 in between beggining of the function until CALL is invoked.
778 Generally functions are not allowed to change type of such instances,
779 but they call destructors. We assume that methods can not destroy the THIS
780 pointer. Also as a special cases, constructor and destructors may change
781 type of the THIS pointer. */
783 static bool
784 param_type_may_change_p (tree function, tree arg, gimple call)
786 /* Pure functions can not do any changes on the dynamic type;
787 that require writting to memory. */
788 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
789 return false;
790 /* We need to check if we are within inlined consturctor
791 or destructor (ideally we would have way to check that the
792 inline cdtor is actually working on ARG, but we don't have
793 easy tie on this, so punt on all non-pure cdtors.
794 We may also record the types of cdtors and once we know type
795 of the instance match them.
797 Also code unification optimizations may merge calls from
798 different blocks making return values unreliable. So
799 do nothing during late optimization. */
800 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
801 return true;
802 if (TREE_CODE (arg) == SSA_NAME
803 && SSA_NAME_IS_DEFAULT_DEF (arg)
804 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
806 /* Normal (non-THIS) argument. */
807 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
808 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
809 /* THIS pointer of an method - here we we want to watch constructors
810 and destructors as those definitely may change the dynamic
811 type. */
812 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
813 && !DECL_CXX_CONSTRUCTOR_P (function)
814 && !DECL_CXX_DESTRUCTOR_P (function)
815 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
817 /* Walk the inline stack and watch out for ctors/dtors. */
818 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
819 block = BLOCK_SUPERCONTEXT (block))
820 if (BLOCK_ABSTRACT_ORIGIN (block)
821 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
823 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
825 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
826 continue;
827 if (TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
828 && (DECL_CXX_CONSTRUCTOR_P (fn)
829 || DECL_CXX_DESTRUCTOR_P (fn)))
830 return true;
832 return false;
835 return true;
838 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
839 callsite CALL) by looking for assignments to its virtual table pointer. If
840 it is, return true and fill in the jump function JFUNC with relevant type
841 information or set it to unknown. ARG is the object itself (not a pointer
842 to it, unless dereferenced). BASE is the base of the memory access as
843 returned by get_ref_base_and_extent, as is the offset.
845 This is helper function for detect_type_change and detect_type_change_ssa
846 that does the heavy work which is usually unnecesary. */
848 static bool
849 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
850 gimple call, struct ipa_jump_func *jfunc,
851 HOST_WIDE_INT offset)
853 struct prop_type_change_info tci;
854 ao_ref ao;
855 bool entry_reached = false;
857 gcc_checking_assert (DECL_P (arg)
858 || TREE_CODE (arg) == MEM_REF
859 || handled_component_p (arg));
861 comp_type = TYPE_MAIN_VARIANT (comp_type);
863 /* Const calls cannot call virtual methods through VMT and so type changes do
864 not matter. */
865 if (!flag_devirtualize || !gimple_vuse (call)
866 /* Be sure expected_type is polymorphic. */
867 || !comp_type
868 || TREE_CODE (comp_type) != RECORD_TYPE
869 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
870 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
871 return true;
873 ao_ref_init (&ao, arg);
874 ao.base = base;
875 ao.offset = offset;
876 ao.size = POINTER_SIZE;
877 ao.max_size = ao.size;
879 tci.offset = offset;
880 tci.object = get_base_address (arg);
881 tci.known_current_type = NULL_TREE;
882 tci.type_maybe_changed = false;
883 tci.multiple_types_encountered = false;
885 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
886 &tci, NULL, &entry_reached);
887 if (!tci.type_maybe_changed)
888 return false;
890 if (!tci.known_current_type
891 || tci.multiple_types_encountered
892 || offset != 0
893 /* When the walk reached function entry, it means that type
894 is set along some paths but not along others. */
895 || entry_reached)
896 jfunc->type = IPA_JF_UNKNOWN;
897 else
898 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
900 return true;
903 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
904 If it is, return true and fill in the jump function JFUNC with relevant type
905 information or set it to unknown. ARG is the object itself (not a pointer
906 to it, unless dereferenced). BASE is the base of the memory access as
907 returned by get_ref_base_and_extent, as is the offset. */
909 static bool
910 detect_type_change (tree arg, tree base, tree comp_type, gimple call,
911 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
913 if (!flag_devirtualize)
914 return false;
916 if (TREE_CODE (base) == MEM_REF
917 && !param_type_may_change_p (current_function_decl,
918 TREE_OPERAND (base, 0),
919 call))
920 return false;
921 return detect_type_change_from_memory_writes (arg, base, comp_type,
922 call, jfunc, offset);
925 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
926 SSA name (its dereference will become the base and the offset is assumed to
927 be zero). */
929 static bool
930 detect_type_change_ssa (tree arg, tree comp_type,
931 gimple call, struct ipa_jump_func *jfunc)
933 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
934 if (!flag_devirtualize
935 || !POINTER_TYPE_P (TREE_TYPE (arg)))
936 return false;
938 if (!param_type_may_change_p (current_function_decl, arg, call))
939 return false;
941 arg = build2 (MEM_REF, ptr_type_node, arg,
942 build_int_cst (ptr_type_node, 0));
944 return detect_type_change_from_memory_writes (arg, arg, comp_type,
945 call, jfunc, 0);
948 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
949 boolean variable pointed to by DATA. */
951 static bool
952 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
953 void *data)
955 bool *b = (bool *) data;
956 *b = true;
957 return true;
960 /* Return true if we have already walked so many statements in AA that we
961 should really just start giving up. */
963 static bool
964 aa_overwalked (struct func_body_info *fbi)
966 gcc_checking_assert (fbi);
967 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
970 /* Find the nearest valid aa status for parameter specified by INDEX that
971 dominates BB. */
973 static struct param_aa_status *
974 find_dominating_aa_status (struct func_body_info *fbi, basic_block bb,
975 int index)
977 while (true)
979 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
980 if (!bb)
981 return NULL;
982 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
983 if (!bi->param_aa_statuses.is_empty ()
984 && bi->param_aa_statuses[index].valid)
985 return &bi->param_aa_statuses[index];
989 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
990 structures and/or intialize the result with a dominating description as
991 necessary. */
993 static struct param_aa_status *
994 parm_bb_aa_status_for_bb (struct func_body_info *fbi, basic_block bb,
995 int index)
997 gcc_checking_assert (fbi);
998 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
999 if (bi->param_aa_statuses.is_empty ())
1000 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
1001 struct param_aa_status *paa = &bi->param_aa_statuses[index];
1002 if (!paa->valid)
1004 gcc_checking_assert (!paa->parm_modified
1005 && !paa->ref_modified
1006 && !paa->pt_modified);
1007 struct param_aa_status *dom_paa;
1008 dom_paa = find_dominating_aa_status (fbi, bb, index);
1009 if (dom_paa)
1010 *paa = *dom_paa;
1011 else
1012 paa->valid = true;
1015 return paa;
1018 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
1019 a value known not to be modified in this function before reaching the
1020 statement STMT. FBI holds information about the function we have so far
1021 gathered but do not survive the summary building stage. */
1023 static bool
1024 parm_preserved_before_stmt_p (struct func_body_info *fbi, int index,
1025 gimple stmt, tree parm_load)
1027 struct param_aa_status *paa;
1028 bool modified = false;
1029 ao_ref refd;
1031 /* FIXME: FBI can be NULL if we are being called from outside
1032 ipa_node_analysis or ipcp_transform_function, which currently happens
1033 during inlining analysis. It would be great to extend fbi's lifetime and
1034 always have it. Currently, we are just not afraid of too much walking in
1035 that case. */
1036 if (fbi)
1038 if (aa_overwalked (fbi))
1039 return false;
1040 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1041 if (paa->parm_modified)
1042 return false;
1044 else
1045 paa = NULL;
1047 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
1048 ao_ref_init (&refd, parm_load);
1049 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1050 &modified, NULL);
1051 if (fbi)
1052 fbi->aa_walked += walked;
1053 if (paa && modified)
1054 paa->parm_modified = true;
1055 return !modified;
1058 /* If STMT is an assignment that loads a value from an parameter declaration,
1059 return the index of the parameter in ipa_node_params which has not been
1060 modified. Otherwise return -1. */
1062 static int
1063 load_from_unmodified_param (struct func_body_info *fbi,
1064 vec<ipa_param_descriptor> descriptors,
1065 gimple stmt)
1067 int index;
1068 tree op1;
1070 if (!gimple_assign_single_p (stmt))
1071 return -1;
1073 op1 = gimple_assign_rhs1 (stmt);
1074 if (TREE_CODE (op1) != PARM_DECL)
1075 return -1;
1077 index = ipa_get_param_decl_index_1 (descriptors, op1);
1078 if (index < 0
1079 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
1080 return -1;
1082 return index;
1085 /* Return true if memory reference REF (which must be a load through parameter
1086 with INDEX) loads data that are known to be unmodified in this function
1087 before reaching statement STMT. */
1089 static bool
1090 parm_ref_data_preserved_p (struct func_body_info *fbi,
1091 int index, gimple stmt, tree ref)
1093 struct param_aa_status *paa;
1094 bool modified = false;
1095 ao_ref refd;
1097 /* FIXME: FBI can be NULL if we are being called from outside
1098 ipa_node_analysis or ipcp_transform_function, which currently happens
1099 during inlining analysis. It would be great to extend fbi's lifetime and
1100 always have it. Currently, we are just not afraid of too much walking in
1101 that case. */
1102 if (fbi)
1104 if (aa_overwalked (fbi))
1105 return false;
1106 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1107 if (paa->ref_modified)
1108 return false;
1110 else
1111 paa = NULL;
1113 gcc_checking_assert (gimple_vuse (stmt));
1114 ao_ref_init (&refd, ref);
1115 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1116 &modified, NULL);
1117 if (fbi)
1118 fbi->aa_walked += walked;
1119 if (paa && modified)
1120 paa->ref_modified = true;
1121 return !modified;
1124 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1125 is known to be unmodified in this function before reaching call statement
1126 CALL into which it is passed. FBI describes the function body. */
1128 static bool
1129 parm_ref_data_pass_through_p (struct func_body_info *fbi, int index,
1130 gimple call, tree parm)
1132 bool modified = false;
1133 ao_ref refd;
1135 /* It's unnecessary to calculate anything about memory contnets for a const
1136 function because it is not goin to use it. But do not cache the result
1137 either. Also, no such calculations for non-pointers. */
1138 if (!gimple_vuse (call)
1139 || !POINTER_TYPE_P (TREE_TYPE (parm))
1140 || aa_overwalked (fbi))
1141 return false;
1143 struct param_aa_status *paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (call),
1144 index);
1145 if (paa->pt_modified)
1146 return false;
1148 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1149 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1150 &modified, NULL);
1151 fbi->aa_walked += walked;
1152 if (modified)
1153 paa->pt_modified = true;
1154 return !modified;
1157 /* Return true if we can prove that OP is a memory reference loading unmodified
1158 data from an aggregate passed as a parameter and if the aggregate is passed
1159 by reference, that the alias type of the load corresponds to the type of the
1160 formal parameter (so that we can rely on this type for TBAA in callers).
1161 INFO and PARMS_AINFO describe parameters of the current function (but the
1162 latter can be NULL), STMT is the load statement. If function returns true,
1163 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1164 within the aggregate and whether it is a load from a value passed by
1165 reference respectively. */
1167 static bool
1168 ipa_load_from_parm_agg_1 (struct func_body_info *fbi,
1169 vec<ipa_param_descriptor> descriptors,
1170 gimple stmt, tree op, int *index_p,
1171 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1172 bool *by_ref_p)
1174 int index;
1175 HOST_WIDE_INT size, max_size;
1176 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
1178 if (max_size == -1 || max_size != size || *offset_p < 0)
1179 return false;
1181 if (DECL_P (base))
1183 int index = ipa_get_param_decl_index_1 (descriptors, base);
1184 if (index >= 0
1185 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1187 *index_p = index;
1188 *by_ref_p = false;
1189 if (size_p)
1190 *size_p = size;
1191 return true;
1193 return false;
1196 if (TREE_CODE (base) != MEM_REF
1197 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1198 || !integer_zerop (TREE_OPERAND (base, 1)))
1199 return false;
1201 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1203 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1204 index = ipa_get_param_decl_index_1 (descriptors, parm);
1206 else
1208 /* This branch catches situations where a pointer parameter is not a
1209 gimple register, for example:
1211 void hip7(S*) (struct S * p)
1213 void (*<T2e4>) (struct S *) D.1867;
1214 struct S * p.1;
1216 <bb 2>:
1217 p.1_1 = p;
1218 D.1867_2 = p.1_1->f;
1219 D.1867_2 ();
1220 gdp = &p;
1223 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1224 index = load_from_unmodified_param (fbi, descriptors, def);
1227 if (index >= 0
1228 && parm_ref_data_preserved_p (fbi, index, stmt, op))
1230 *index_p = index;
1231 *by_ref_p = true;
1232 if (size_p)
1233 *size_p = size;
1234 return true;
1236 return false;
1239 /* Just like the previous function, just without the param_analysis_info
1240 pointer, for users outside of this file. */
1242 bool
1243 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
1244 tree op, int *index_p, HOST_WIDE_INT *offset_p,
1245 bool *by_ref_p)
1247 return ipa_load_from_parm_agg_1 (NULL, info->descriptors, stmt, op, index_p,
1248 offset_p, NULL, by_ref_p);
1251 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1252 of an assignment statement STMT, try to determine whether we are actually
1253 handling any of the following cases and construct an appropriate jump
1254 function into JFUNC if so:
1256 1) The passed value is loaded from a formal parameter which is not a gimple
1257 register (most probably because it is addressable, the value has to be
1258 scalar) and we can guarantee the value has not changed. This case can
1259 therefore be described by a simple pass-through jump function. For example:
1261 foo (int a)
1263 int a.0;
1265 a.0_2 = a;
1266 bar (a.0_2);
1268 2) The passed value can be described by a simple arithmetic pass-through
1269 jump function. E.g.
1271 foo (int a)
1273 int D.2064;
1275 D.2064_4 = a.1(D) + 4;
1276 bar (D.2064_4);
1278 This case can also occur in combination of the previous one, e.g.:
1280 foo (int a, int z)
1282 int a.0;
1283 int D.2064;
1285 a.0_3 = a;
1286 D.2064_4 = a.0_3 + 4;
1287 foo (D.2064_4);
1289 3) The passed value is an address of an object within another one (which
1290 also passed by reference). Such situations are described by an ancestor
1291 jump function and describe situations such as:
1293 B::foo() (struct B * const this)
1295 struct A * D.1845;
1297 D.1845_2 = &this_1(D)->D.1748;
1298 A::bar (D.1845_2);
1300 INFO is the structure describing individual parameters access different
1301 stages of IPA optimizations. PARMS_AINFO contains the information that is
1302 only needed for intraprocedural analysis. */
1304 static void
1305 compute_complex_assign_jump_func (struct func_body_info *fbi,
1306 struct ipa_node_params *info,
1307 struct ipa_jump_func *jfunc,
1308 gimple call, gimple stmt, tree name,
1309 tree param_type)
1311 HOST_WIDE_INT offset, size, max_size;
1312 tree op1, tc_ssa, base, ssa;
1313 int index;
1315 op1 = gimple_assign_rhs1 (stmt);
1317 if (TREE_CODE (op1) == SSA_NAME)
1319 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1320 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1321 else
1322 index = load_from_unmodified_param (fbi, info->descriptors,
1323 SSA_NAME_DEF_STMT (op1));
1324 tc_ssa = op1;
1326 else
1328 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1329 tc_ssa = gimple_assign_lhs (stmt);
1332 if (index >= 0)
1334 tree op2 = gimple_assign_rhs2 (stmt);
1336 if (op2)
1338 if (!is_gimple_ip_invariant (op2)
1339 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1340 && !useless_type_conversion_p (TREE_TYPE (name),
1341 TREE_TYPE (op1))))
1342 return;
1344 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1345 gimple_assign_rhs_code (stmt));
1347 else if (gimple_assign_single_p (stmt))
1349 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1350 bool type_p = false;
1352 if (param_type && POINTER_TYPE_P (param_type))
1353 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1354 call, jfunc);
1355 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1356 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
1358 return;
1361 if (TREE_CODE (op1) != ADDR_EXPR)
1362 return;
1363 op1 = TREE_OPERAND (op1, 0);
1364 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1365 return;
1366 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1367 if (TREE_CODE (base) != MEM_REF
1368 /* If this is a varying address, punt. */
1369 || max_size == -1
1370 || max_size != size)
1371 return;
1372 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1373 ssa = TREE_OPERAND (base, 0);
1374 if (TREE_CODE (ssa) != SSA_NAME
1375 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1376 || offset < 0)
1377 return;
1379 /* Dynamic types are changed in constructors and destructors. */
1380 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1381 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1383 bool type_p = (contains_polymorphic_type_p (TREE_TYPE (param_type))
1384 && !detect_type_change (op1, base, TREE_TYPE (param_type),
1385 call, jfunc, offset));
1386 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1387 ipa_set_ancestor_jf (jfunc, offset,
1388 type_p ? TREE_TYPE (param_type) : NULL, index,
1389 parm_ref_data_pass_through_p (fbi, index,
1390 call, ssa), type_p);
1394 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1395 it looks like:
1397 iftmp.1_3 = &obj_2(D)->D.1762;
1399 The base of the MEM_REF must be a default definition SSA NAME of a
1400 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1401 whole MEM_REF expression is returned and the offset calculated from any
1402 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1403 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1405 static tree
1406 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1408 HOST_WIDE_INT size, max_size;
1409 tree expr, parm, obj;
1411 if (!gimple_assign_single_p (assign))
1412 return NULL_TREE;
1413 expr = gimple_assign_rhs1 (assign);
1415 if (TREE_CODE (expr) != ADDR_EXPR)
1416 return NULL_TREE;
1417 expr = TREE_OPERAND (expr, 0);
1418 obj = expr;
1419 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1421 if (TREE_CODE (expr) != MEM_REF
1422 /* If this is a varying address, punt. */
1423 || max_size == -1
1424 || max_size != size
1425 || *offset < 0)
1426 return NULL_TREE;
1427 parm = TREE_OPERAND (expr, 0);
1428 if (TREE_CODE (parm) != SSA_NAME
1429 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1430 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1431 return NULL_TREE;
1433 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1434 *obj_p = obj;
1435 return expr;
1439 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1440 statement PHI, try to find out whether NAME is in fact a
1441 multiple-inheritance typecast from a descendant into an ancestor of a formal
1442 parameter and thus can be described by an ancestor jump function and if so,
1443 write the appropriate function into JFUNC.
1445 Essentially we want to match the following pattern:
1447 if (obj_2(D) != 0B)
1448 goto <bb 3>;
1449 else
1450 goto <bb 4>;
1452 <bb 3>:
1453 iftmp.1_3 = &obj_2(D)->D.1762;
1455 <bb 4>:
1456 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1457 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1458 return D.1879_6; */
1460 static void
1461 compute_complex_ancestor_jump_func (struct func_body_info *fbi,
1462 struct ipa_node_params *info,
1463 struct ipa_jump_func *jfunc,
1464 gimple call, gimple phi, tree param_type)
1466 HOST_WIDE_INT offset;
1467 gimple assign, cond;
1468 basic_block phi_bb, assign_bb, cond_bb;
1469 tree tmp, parm, expr, obj;
1470 int index, i;
1472 if (gimple_phi_num_args (phi) != 2)
1473 return;
1475 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1476 tmp = PHI_ARG_DEF (phi, 0);
1477 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1478 tmp = PHI_ARG_DEF (phi, 1);
1479 else
1480 return;
1481 if (TREE_CODE (tmp) != SSA_NAME
1482 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1483 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1484 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1485 return;
1487 assign = SSA_NAME_DEF_STMT (tmp);
1488 assign_bb = gimple_bb (assign);
1489 if (!single_pred_p (assign_bb))
1490 return;
1491 expr = get_ancestor_addr_info (assign, &obj, &offset);
1492 if (!expr)
1493 return;
1494 parm = TREE_OPERAND (expr, 0);
1495 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1496 if (index < 0)
1497 return;
1499 cond_bb = single_pred (assign_bb);
1500 cond = last_stmt (cond_bb);
1501 if (!cond
1502 || gimple_code (cond) != GIMPLE_COND
1503 || gimple_cond_code (cond) != NE_EXPR
1504 || gimple_cond_lhs (cond) != parm
1505 || !integer_zerop (gimple_cond_rhs (cond)))
1506 return;
1508 phi_bb = gimple_bb (phi);
1509 for (i = 0; i < 2; i++)
1511 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1512 if (pred != assign_bb && pred != cond_bb)
1513 return;
1516 bool type_p = false;
1517 if (param_type && POINTER_TYPE_P (param_type)
1518 && contains_polymorphic_type_p (TREE_TYPE (param_type)))
1519 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1520 call, jfunc, offset);
1521 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1522 ipa_set_ancestor_jf (jfunc, offset, type_p ? TREE_TYPE (param_type) : NULL,
1523 index,
1524 parm_ref_data_pass_through_p (fbi, index, call, parm),
1525 type_p);
1528 /* Given OP which is passed as an actual argument to a called function,
1529 determine if it is possible to construct a KNOWN_TYPE jump function for it
1530 and if so, create one and store it to JFUNC.
1531 EXPECTED_TYPE represents a type the argument should be in */
1533 static void
1534 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1535 gimple call, tree expected_type)
1537 HOST_WIDE_INT offset, size, max_size;
1538 tree base;
1540 if (!flag_devirtualize
1541 || TREE_CODE (op) != ADDR_EXPR
1542 || !contains_polymorphic_type_p (TREE_TYPE (TREE_TYPE (op)))
1543 /* Be sure expected_type is polymorphic. */
1544 || !expected_type
1545 || !contains_polymorphic_type_p (expected_type))
1546 return;
1548 op = TREE_OPERAND (op, 0);
1549 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1550 if (!DECL_P (base)
1551 || max_size == -1
1552 || max_size != size
1553 || !contains_polymorphic_type_p (TREE_TYPE (base)))
1554 return;
1556 if (decl_maybe_in_construction_p (base, TREE_TYPE (base),
1557 call, current_function_decl)
1558 /* Even if the var seems to be in construction by inline call stack,
1559 we may work out the actual type by walking memory writes. */
1560 && (is_global_var (base)
1561 || detect_type_change (op, base, expected_type, call, jfunc, offset)))
1562 return;
1564 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1565 expected_type);
1568 /* Inspect the given TYPE and return true iff it has the same structure (the
1569 same number of fields of the same types) as a C++ member pointer. If
1570 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1571 corresponding fields there. */
1573 static bool
1574 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1576 tree fld;
1578 if (TREE_CODE (type) != RECORD_TYPE)
1579 return false;
1581 fld = TYPE_FIELDS (type);
1582 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1583 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1584 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1585 return false;
1587 if (method_ptr)
1588 *method_ptr = fld;
1590 fld = DECL_CHAIN (fld);
1591 if (!fld || INTEGRAL_TYPE_P (fld)
1592 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1593 return false;
1594 if (delta)
1595 *delta = fld;
1597 if (DECL_CHAIN (fld))
1598 return false;
1600 return true;
1603 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1604 return the rhs of its defining statement. Otherwise return RHS as it
1605 is. */
1607 static inline tree
1608 get_ssa_def_if_simple_copy (tree rhs)
1610 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1612 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1614 if (gimple_assign_single_p (def_stmt))
1615 rhs = gimple_assign_rhs1 (def_stmt);
1616 else
1617 break;
1619 return rhs;
1622 /* Simple linked list, describing known contents of an aggregate beforere
1623 call. */
1625 struct ipa_known_agg_contents_list
1627 /* Offset and size of the described part of the aggregate. */
1628 HOST_WIDE_INT offset, size;
1629 /* Known constant value or NULL if the contents is known to be unknown. */
1630 tree constant;
1631 /* Pointer to the next structure in the list. */
1632 struct ipa_known_agg_contents_list *next;
1635 /* Find the proper place in linked list of ipa_known_agg_contents_list
1636 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1637 unless there is a partial overlap, in which case return NULL, or such
1638 element is already there, in which case set *ALREADY_THERE to true. */
1640 static struct ipa_known_agg_contents_list **
1641 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1642 HOST_WIDE_INT lhs_offset,
1643 HOST_WIDE_INT lhs_size,
1644 bool *already_there)
1646 struct ipa_known_agg_contents_list **p = list;
1647 while (*p && (*p)->offset < lhs_offset)
1649 if ((*p)->offset + (*p)->size > lhs_offset)
1650 return NULL;
1651 p = &(*p)->next;
1654 if (*p && (*p)->offset < lhs_offset + lhs_size)
1656 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1657 /* We already know this value is subsequently overwritten with
1658 something else. */
1659 *already_there = true;
1660 else
1661 /* Otherwise this is a partial overlap which we cannot
1662 represent. */
1663 return NULL;
1665 return p;
1668 /* Build aggregate jump function from LIST, assuming there are exactly
1669 CONST_COUNT constant entries there and that th offset of the passed argument
1670 is ARG_OFFSET and store it into JFUNC. */
1672 static void
1673 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1674 int const_count, HOST_WIDE_INT arg_offset,
1675 struct ipa_jump_func *jfunc)
1677 vec_alloc (jfunc->agg.items, const_count);
1678 while (list)
1680 if (list->constant)
1682 struct ipa_agg_jf_item item;
1683 item.offset = list->offset - arg_offset;
1684 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1685 item.value = unshare_expr_without_location (list->constant);
1686 jfunc->agg.items->quick_push (item);
1688 list = list->next;
1692 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1693 in ARG is filled in with constant values. ARG can either be an aggregate
1694 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1695 aggregate. JFUNC is the jump function into which the constants are
1696 subsequently stored. */
1698 static void
1699 determine_locally_known_aggregate_parts (gimple call, tree arg, tree arg_type,
1700 struct ipa_jump_func *jfunc)
1702 struct ipa_known_agg_contents_list *list = NULL;
1703 int item_count = 0, const_count = 0;
1704 HOST_WIDE_INT arg_offset, arg_size;
1705 gimple_stmt_iterator gsi;
1706 tree arg_base;
1707 bool check_ref, by_ref;
1708 ao_ref r;
1710 /* The function operates in three stages. First, we prepare check_ref, r,
1711 arg_base and arg_offset based on what is actually passed as an actual
1712 argument. */
1714 if (POINTER_TYPE_P (arg_type))
1716 by_ref = true;
1717 if (TREE_CODE (arg) == SSA_NAME)
1719 tree type_size;
1720 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1721 return;
1722 check_ref = true;
1723 arg_base = arg;
1724 arg_offset = 0;
1725 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1726 arg_size = tree_to_uhwi (type_size);
1727 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1729 else if (TREE_CODE (arg) == ADDR_EXPR)
1731 HOST_WIDE_INT arg_max_size;
1733 arg = TREE_OPERAND (arg, 0);
1734 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1735 &arg_max_size);
1736 if (arg_max_size == -1
1737 || arg_max_size != arg_size
1738 || arg_offset < 0)
1739 return;
1740 if (DECL_P (arg_base))
1742 check_ref = false;
1743 ao_ref_init (&r, arg_base);
1745 else
1746 return;
1748 else
1749 return;
1751 else
1753 HOST_WIDE_INT arg_max_size;
1755 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1757 by_ref = false;
1758 check_ref = false;
1759 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1760 &arg_max_size);
1761 if (arg_max_size == -1
1762 || arg_max_size != arg_size
1763 || arg_offset < 0)
1764 return;
1766 ao_ref_init (&r, arg);
1769 /* Second stage walks back the BB, looks at individual statements and as long
1770 as it is confident of how the statements affect contents of the
1771 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1772 describing it. */
1773 gsi = gsi_for_stmt (call);
1774 gsi_prev (&gsi);
1775 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1777 struct ipa_known_agg_contents_list *n, **p;
1778 gimple stmt = gsi_stmt (gsi);
1779 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1780 tree lhs, rhs, lhs_base;
1782 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1783 continue;
1784 if (!gimple_assign_single_p (stmt))
1785 break;
1787 lhs = gimple_assign_lhs (stmt);
1788 rhs = gimple_assign_rhs1 (stmt);
1789 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1790 || TREE_CODE (lhs) == BIT_FIELD_REF
1791 || contains_bitfld_component_ref_p (lhs))
1792 break;
1794 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1795 &lhs_max_size);
1796 if (lhs_max_size == -1
1797 || lhs_max_size != lhs_size)
1798 break;
1800 if (check_ref)
1802 if (TREE_CODE (lhs_base) != MEM_REF
1803 || TREE_OPERAND (lhs_base, 0) != arg_base
1804 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1805 break;
1807 else if (lhs_base != arg_base)
1809 if (DECL_P (lhs_base))
1810 continue;
1811 else
1812 break;
1815 bool already_there = false;
1816 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1817 &already_there);
1818 if (!p)
1819 break;
1820 if (already_there)
1821 continue;
1823 rhs = get_ssa_def_if_simple_copy (rhs);
1824 n = XALLOCA (struct ipa_known_agg_contents_list);
1825 n->size = lhs_size;
1826 n->offset = lhs_offset;
1827 if (is_gimple_ip_invariant (rhs))
1829 n->constant = rhs;
1830 const_count++;
1832 else
1833 n->constant = NULL_TREE;
1834 n->next = *p;
1835 *p = n;
1837 item_count++;
1838 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1839 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1840 break;
1843 /* Third stage just goes over the list and creates an appropriate vector of
1844 ipa_agg_jf_item structures out of it, of sourse only if there are
1845 any known constants to begin with. */
1847 if (const_count)
1849 jfunc->agg.by_ref = by_ref;
1850 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1854 static tree
1855 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1857 int n;
1858 tree type = (e->callee
1859 ? TREE_TYPE (e->callee->decl)
1860 : gimple_call_fntype (e->call_stmt));
1861 tree t = TYPE_ARG_TYPES (type);
1863 for (n = 0; n < i; n++)
1865 if (!t)
1866 break;
1867 t = TREE_CHAIN (t);
1869 if (t)
1870 return TREE_VALUE (t);
1871 if (!e->callee)
1872 return NULL;
1873 t = DECL_ARGUMENTS (e->callee->decl);
1874 for (n = 0; n < i; n++)
1876 if (!t)
1877 return NULL;
1878 t = TREE_CHAIN (t);
1880 if (t)
1881 return TREE_TYPE (t);
1882 return NULL;
1885 /* Compute jump function for all arguments of callsite CS and insert the
1886 information in the jump_functions array in the ipa_edge_args corresponding
1887 to this callsite. */
1889 static void
1890 ipa_compute_jump_functions_for_edge (struct func_body_info *fbi,
1891 struct cgraph_edge *cs)
1893 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1894 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1895 gimple call = cs->call_stmt;
1896 int n, arg_num = gimple_call_num_args (call);
1897 bool useful_context = false;
1899 if (arg_num == 0 || args->jump_functions)
1900 return;
1901 vec_safe_grow_cleared (args->jump_functions, arg_num);
1902 if (flag_devirtualize)
1903 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1905 if (gimple_call_internal_p (call))
1906 return;
1907 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1908 return;
1910 for (n = 0; n < arg_num; n++)
1912 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1913 tree arg = gimple_call_arg (call, n);
1914 tree param_type = ipa_get_callee_param_type (cs, n);
1915 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1917 tree instance;
1918 struct ipa_polymorphic_call_context context (cs->caller->decl,
1919 arg, cs->call_stmt,
1920 &instance);
1921 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1922 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1923 if (!context.useless_p ())
1924 useful_context = true;
1927 if (is_gimple_ip_invariant (arg))
1928 ipa_set_jf_constant (jfunc, arg, cs);
1929 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1930 && TREE_CODE (arg) == PARM_DECL)
1932 int index = ipa_get_param_decl_index (info, arg);
1934 gcc_assert (index >=0);
1935 /* Aggregate passed by value, check for pass-through, otherwise we
1936 will attempt to fill in aggregate contents later in this
1937 for cycle. */
1938 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1940 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
1941 continue;
1944 else if (TREE_CODE (arg) == SSA_NAME)
1946 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1948 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1949 if (index >= 0)
1951 bool agg_p, type_p;
1952 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1953 if (param_type && POINTER_TYPE_P (param_type))
1954 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1955 call, jfunc);
1956 else
1957 type_p = false;
1958 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1959 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1960 type_p);
1963 else
1965 gimple stmt = SSA_NAME_DEF_STMT (arg);
1966 if (is_gimple_assign (stmt))
1967 compute_complex_assign_jump_func (fbi, info, jfunc,
1968 call, stmt, arg, param_type);
1969 else if (gimple_code (stmt) == GIMPLE_PHI)
1970 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1971 call, stmt, param_type);
1974 else
1975 compute_known_type_jump_func (arg, jfunc, call,
1976 param_type
1977 && POINTER_TYPE_P (param_type)
1978 ? TREE_TYPE (param_type)
1979 : NULL);
1981 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1982 passed (because type conversions are ignored in gimple). Usually we can
1983 safely get type from function declaration, but in case of K&R prototypes or
1984 variadic functions we can try our luck with type of the pointer passed.
1985 TODO: Since we look for actual initialization of the memory object, we may better
1986 work out the type based on the memory stores we find. */
1987 if (!param_type)
1988 param_type = TREE_TYPE (arg);
1990 if ((jfunc->type != IPA_JF_PASS_THROUGH
1991 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1992 && (jfunc->type != IPA_JF_ANCESTOR
1993 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1994 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1995 || POINTER_TYPE_P (param_type)))
1996 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1998 if (!useful_context)
1999 vec_free (args->polymorphic_call_contexts);
2002 /* Compute jump functions for all edges - both direct and indirect - outgoing
2003 from BB. */
2005 static void
2006 ipa_compute_jump_functions_for_bb (struct func_body_info *fbi, basic_block bb)
2008 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2009 int i;
2010 struct cgraph_edge *cs;
2012 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
2014 struct cgraph_node *callee = cs->callee;
2016 if (callee)
2018 callee->ultimate_alias_target ();
2019 /* We do not need to bother analyzing calls to unknown functions
2020 unless they may become known during lto/whopr. */
2021 if (!callee->definition && !flag_lto)
2022 continue;
2024 ipa_compute_jump_functions_for_edge (fbi, cs);
2028 /* If STMT looks like a statement loading a value from a member pointer formal
2029 parameter, return that parameter and store the offset of the field to
2030 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2031 might be clobbered). If USE_DELTA, then we look for a use of the delta
2032 field rather than the pfn. */
2034 static tree
2035 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
2036 HOST_WIDE_INT *offset_p)
2038 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2040 if (!gimple_assign_single_p (stmt))
2041 return NULL_TREE;
2043 rhs = gimple_assign_rhs1 (stmt);
2044 if (TREE_CODE (rhs) == COMPONENT_REF)
2046 ref_field = TREE_OPERAND (rhs, 1);
2047 rhs = TREE_OPERAND (rhs, 0);
2049 else
2050 ref_field = NULL_TREE;
2051 if (TREE_CODE (rhs) != MEM_REF)
2052 return NULL_TREE;
2053 rec = TREE_OPERAND (rhs, 0);
2054 if (TREE_CODE (rec) != ADDR_EXPR)
2055 return NULL_TREE;
2056 rec = TREE_OPERAND (rec, 0);
2057 if (TREE_CODE (rec) != PARM_DECL
2058 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2059 return NULL_TREE;
2060 ref_offset = TREE_OPERAND (rhs, 1);
2062 if (use_delta)
2063 fld = delta_field;
2064 else
2065 fld = ptr_field;
2066 if (offset_p)
2067 *offset_p = int_bit_position (fld);
2069 if (ref_field)
2071 if (integer_nonzerop (ref_offset))
2072 return NULL_TREE;
2073 return ref_field == fld ? rec : NULL_TREE;
2075 else
2076 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2077 : NULL_TREE;
2080 /* Returns true iff T is an SSA_NAME defined by a statement. */
2082 static bool
2083 ipa_is_ssa_with_stmt_def (tree t)
2085 if (TREE_CODE (t) == SSA_NAME
2086 && !SSA_NAME_IS_DEFAULT_DEF (t))
2087 return true;
2088 else
2089 return false;
2092 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2093 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2094 indirect call graph edge. */
2096 static struct cgraph_edge *
2097 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
2099 struct cgraph_edge *cs;
2101 cs = node->get_edge (stmt);
2102 cs->indirect_info->param_index = param_index;
2103 cs->indirect_info->agg_contents = 0;
2104 cs->indirect_info->member_ptr = 0;
2105 return cs;
2108 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2109 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2110 intermediate information about each formal parameter. Currently it checks
2111 whether the call calls a pointer that is a formal parameter and if so, the
2112 parameter is marked with the called flag and an indirect call graph edge
2113 describing the call is created. This is very simple for ordinary pointers
2114 represented in SSA but not-so-nice when it comes to member pointers. The
2115 ugly part of this function does nothing more than trying to match the
2116 pattern of such a call. An example of such a pattern is the gimple dump
2117 below, the call is on the last line:
2119 <bb 2>:
2120 f$__delta_5 = f.__delta;
2121 f$__pfn_24 = f.__pfn;
2124 <bb 2>:
2125 f$__delta_5 = MEM[(struct *)&f];
2126 f$__pfn_24 = MEM[(struct *)&f + 4B];
2128 and a few lines below:
2130 <bb 5>
2131 D.2496_3 = (int) f$__pfn_24;
2132 D.2497_4 = D.2496_3 & 1;
2133 if (D.2497_4 != 0)
2134 goto <bb 3>;
2135 else
2136 goto <bb 4>;
2138 <bb 6>:
2139 D.2500_7 = (unsigned int) f$__delta_5;
2140 D.2501_8 = &S + D.2500_7;
2141 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2142 D.2503_10 = *D.2502_9;
2143 D.2504_12 = f$__pfn_24 + -1;
2144 D.2505_13 = (unsigned int) D.2504_12;
2145 D.2506_14 = D.2503_10 + D.2505_13;
2146 D.2507_15 = *D.2506_14;
2147 iftmp.11_16 = (String:: *) D.2507_15;
2149 <bb 7>:
2150 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2151 D.2500_19 = (unsigned int) f$__delta_5;
2152 D.2508_20 = &S + D.2500_19;
2153 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2155 Such patterns are results of simple calls to a member pointer:
2157 int doprinting (int (MyString::* f)(int) const)
2159 MyString S ("somestring");
2161 return (S.*f)(4);
2164 Moreover, the function also looks for called pointers loaded from aggregates
2165 passed by value or reference. */
2167 static void
2168 ipa_analyze_indirect_call_uses (struct func_body_info *fbi, gimple call,
2169 tree target)
2171 struct ipa_node_params *info = fbi->info;
2172 HOST_WIDE_INT offset;
2173 bool by_ref;
2175 if (SSA_NAME_IS_DEFAULT_DEF (target))
2177 tree var = SSA_NAME_VAR (target);
2178 int index = ipa_get_param_decl_index (info, var);
2179 if (index >= 0)
2180 ipa_note_param_call (fbi->node, index, call);
2181 return;
2184 int index;
2185 gimple def = SSA_NAME_DEF_STMT (target);
2186 if (gimple_assign_single_p (def)
2187 && ipa_load_from_parm_agg_1 (fbi, info->descriptors, def,
2188 gimple_assign_rhs1 (def), &index, &offset,
2189 NULL, &by_ref))
2191 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2192 cs->indirect_info->offset = offset;
2193 cs->indirect_info->agg_contents = 1;
2194 cs->indirect_info->by_ref = by_ref;
2195 return;
2198 /* Now we need to try to match the complex pattern of calling a member
2199 pointer. */
2200 if (gimple_code (def) != GIMPLE_PHI
2201 || gimple_phi_num_args (def) != 2
2202 || !POINTER_TYPE_P (TREE_TYPE (target))
2203 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2204 return;
2206 /* First, we need to check whether one of these is a load from a member
2207 pointer that is a parameter to this function. */
2208 tree n1 = PHI_ARG_DEF (def, 0);
2209 tree n2 = PHI_ARG_DEF (def, 1);
2210 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2211 return;
2212 gimple d1 = SSA_NAME_DEF_STMT (n1);
2213 gimple d2 = SSA_NAME_DEF_STMT (n2);
2215 tree rec;
2216 basic_block bb, virt_bb;
2217 basic_block join = gimple_bb (def);
2218 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2220 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2221 return;
2223 bb = EDGE_PRED (join, 0)->src;
2224 virt_bb = gimple_bb (d2);
2226 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2228 bb = EDGE_PRED (join, 1)->src;
2229 virt_bb = gimple_bb (d1);
2231 else
2232 return;
2234 /* Second, we need to check that the basic blocks are laid out in the way
2235 corresponding to the pattern. */
2237 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2238 || single_pred (virt_bb) != bb
2239 || single_succ (virt_bb) != join)
2240 return;
2242 /* Third, let's see that the branching is done depending on the least
2243 significant bit of the pfn. */
2245 gimple branch = last_stmt (bb);
2246 if (!branch || gimple_code (branch) != GIMPLE_COND)
2247 return;
2249 if ((gimple_cond_code (branch) != NE_EXPR
2250 && gimple_cond_code (branch) != EQ_EXPR)
2251 || !integer_zerop (gimple_cond_rhs (branch)))
2252 return;
2254 tree cond = gimple_cond_lhs (branch);
2255 if (!ipa_is_ssa_with_stmt_def (cond))
2256 return;
2258 def = SSA_NAME_DEF_STMT (cond);
2259 if (!is_gimple_assign (def)
2260 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2261 || !integer_onep (gimple_assign_rhs2 (def)))
2262 return;
2264 cond = gimple_assign_rhs1 (def);
2265 if (!ipa_is_ssa_with_stmt_def (cond))
2266 return;
2268 def = SSA_NAME_DEF_STMT (cond);
2270 if (is_gimple_assign (def)
2271 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2273 cond = gimple_assign_rhs1 (def);
2274 if (!ipa_is_ssa_with_stmt_def (cond))
2275 return;
2276 def = SSA_NAME_DEF_STMT (cond);
2279 tree rec2;
2280 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2281 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2282 == ptrmemfunc_vbit_in_delta),
2283 NULL);
2284 if (rec != rec2)
2285 return;
2287 index = ipa_get_param_decl_index (info, rec);
2288 if (index >= 0
2289 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2291 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2292 cs->indirect_info->offset = offset;
2293 cs->indirect_info->agg_contents = 1;
2294 cs->indirect_info->member_ptr = 1;
2297 return;
2300 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2301 object referenced in the expression is a formal parameter of the caller
2302 FBI->node (described by FBI->info), create a call note for the
2303 statement. */
2305 static void
2306 ipa_analyze_virtual_call_uses (struct func_body_info *fbi,
2307 gimple call, tree target)
2309 tree obj = OBJ_TYPE_REF_OBJECT (target);
2310 int index;
2311 HOST_WIDE_INT anc_offset;
2313 if (!flag_devirtualize)
2314 return;
2316 if (TREE_CODE (obj) != SSA_NAME)
2317 return;
2319 struct ipa_node_params *info = fbi->info;
2320 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2322 struct ipa_jump_func jfunc;
2323 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2324 return;
2326 anc_offset = 0;
2327 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2328 gcc_assert (index >= 0);
2329 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2330 call, &jfunc))
2331 return;
2333 else
2335 struct ipa_jump_func jfunc;
2336 gimple stmt = SSA_NAME_DEF_STMT (obj);
2337 tree expr;
2339 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2340 if (!expr)
2341 return;
2342 index = ipa_get_param_decl_index (info,
2343 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2344 gcc_assert (index >= 0);
2345 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2346 call, &jfunc, anc_offset))
2347 return;
2350 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2351 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2352 ii->offset = anc_offset;
2353 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2354 ii->otr_type = obj_type_ref_class (target);
2355 ii->polymorphic = 1;
2358 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2359 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2360 containing intermediate information about each formal parameter. */
2362 static void
2363 ipa_analyze_call_uses (struct func_body_info *fbi, gimple call)
2365 tree target = gimple_call_fn (call);
2367 if (!target
2368 || (TREE_CODE (target) != SSA_NAME
2369 && !virtual_method_call_p (target)))
2370 return;
2372 struct cgraph_edge *cs = fbi->node->get_edge (call);
2373 /* If we previously turned the call into a direct call, there is
2374 no need to analyze. */
2375 if (cs && !cs->indirect_unknown_callee)
2376 return;
2378 if (cs->indirect_info->polymorphic)
2380 tree instance;
2381 tree target = gimple_call_fn (call);
2382 ipa_polymorphic_call_context context (current_function_decl,
2383 target, call, &instance);
2385 gcc_checking_assert (cs->indirect_info->otr_type
2386 == obj_type_ref_class (target));
2387 gcc_checking_assert (cs->indirect_info->otr_token
2388 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2390 cs->indirect_info->vptr_changed
2391 = !context.get_dynamic_type (instance,
2392 OBJ_TYPE_REF_OBJECT (target),
2393 obj_type_ref_class (target), call);
2394 cs->indirect_info->context = context;
2397 if (TREE_CODE (target) == SSA_NAME)
2398 ipa_analyze_indirect_call_uses (fbi, call, target);
2399 else if (virtual_method_call_p (target))
2400 ipa_analyze_virtual_call_uses (fbi, call, target);
2404 /* Analyze the call statement STMT with respect to formal parameters (described
2405 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2406 formal parameters are called. */
2408 static void
2409 ipa_analyze_stmt_uses (struct func_body_info *fbi, gimple stmt)
2411 if (is_gimple_call (stmt))
2412 ipa_analyze_call_uses (fbi, stmt);
2415 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2416 If OP is a parameter declaration, mark it as used in the info structure
2417 passed in DATA. */
2419 static bool
2420 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2422 struct ipa_node_params *info = (struct ipa_node_params *) data;
2424 op = get_base_address (op);
2425 if (op
2426 && TREE_CODE (op) == PARM_DECL)
2428 int index = ipa_get_param_decl_index (info, op);
2429 gcc_assert (index >= 0);
2430 ipa_set_param_used (info, index, true);
2433 return false;
2436 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2437 the findings in various structures of the associated ipa_node_params
2438 structure, such as parameter flags, notes etc. FBI holds various data about
2439 the function being analyzed. */
2441 static void
2442 ipa_analyze_params_uses_in_bb (struct func_body_info *fbi, basic_block bb)
2444 gimple_stmt_iterator gsi;
2445 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2447 gimple stmt = gsi_stmt (gsi);
2449 if (is_gimple_debug (stmt))
2450 continue;
2452 ipa_analyze_stmt_uses (fbi, stmt);
2453 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2454 visit_ref_for_mod_analysis,
2455 visit_ref_for_mod_analysis,
2456 visit_ref_for_mod_analysis);
2458 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2459 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2460 visit_ref_for_mod_analysis,
2461 visit_ref_for_mod_analysis,
2462 visit_ref_for_mod_analysis);
2465 /* Calculate controlled uses of parameters of NODE. */
2467 static void
2468 ipa_analyze_controlled_uses (struct cgraph_node *node)
2470 struct ipa_node_params *info = IPA_NODE_REF (node);
2472 for (int i = 0; i < ipa_get_param_count (info); i++)
2474 tree parm = ipa_get_param (info, i);
2475 int controlled_uses = 0;
2477 /* For SSA regs see if parameter is used. For non-SSA we compute
2478 the flag during modification analysis. */
2479 if (is_gimple_reg (parm))
2481 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2482 parm);
2483 if (ddef && !has_zero_uses (ddef))
2485 imm_use_iterator imm_iter;
2486 use_operand_p use_p;
2488 ipa_set_param_used (info, i, true);
2489 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2490 if (!is_gimple_call (USE_STMT (use_p)))
2492 if (!is_gimple_debug (USE_STMT (use_p)))
2494 controlled_uses = IPA_UNDESCRIBED_USE;
2495 break;
2498 else
2499 controlled_uses++;
2501 else
2502 controlled_uses = 0;
2504 else
2505 controlled_uses = IPA_UNDESCRIBED_USE;
2506 ipa_set_controlled_uses (info, i, controlled_uses);
2510 /* Free stuff in BI. */
2512 static void
2513 free_ipa_bb_info (struct ipa_bb_info *bi)
2515 bi->cg_edges.release ();
2516 bi->param_aa_statuses.release ();
2519 /* Dominator walker driving the analysis. */
2521 class analysis_dom_walker : public dom_walker
2523 public:
2524 analysis_dom_walker (struct func_body_info *fbi)
2525 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2527 virtual void before_dom_children (basic_block);
2529 private:
2530 struct func_body_info *m_fbi;
2533 void
2534 analysis_dom_walker::before_dom_children (basic_block bb)
2536 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2537 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2540 /* Initialize the array describing properties of of formal parameters
2541 of NODE, analyze their uses and compute jump functions associated
2542 with actual arguments of calls from within NODE. */
2544 void
2545 ipa_analyze_node (struct cgraph_node *node)
2547 struct func_body_info fbi;
2548 struct ipa_node_params *info;
2550 ipa_check_create_node_params ();
2551 ipa_check_create_edge_args ();
2552 info = IPA_NODE_REF (node);
2554 if (info->analysis_done)
2555 return;
2556 info->analysis_done = 1;
2558 if (ipa_func_spec_opts_forbid_analysis_p (node))
2560 for (int i = 0; i < ipa_get_param_count (info); i++)
2562 ipa_set_param_used (info, i, true);
2563 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2565 return;
2568 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2569 push_cfun (func);
2570 calculate_dominance_info (CDI_DOMINATORS);
2571 ipa_initialize_node_params (node);
2572 ipa_analyze_controlled_uses (node);
2574 fbi.node = node;
2575 fbi.info = IPA_NODE_REF (node);
2576 fbi.bb_infos = vNULL;
2577 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2578 fbi.param_count = ipa_get_param_count (info);
2579 fbi.aa_walked = 0;
2581 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2583 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2584 bi->cg_edges.safe_push (cs);
2587 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2589 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2590 bi->cg_edges.safe_push (cs);
2593 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2595 int i;
2596 struct ipa_bb_info *bi;
2597 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
2598 free_ipa_bb_info (bi);
2599 fbi.bb_infos.release ();
2600 free_dominance_info (CDI_DOMINATORS);
2601 pop_cfun ();
2604 /* Update the jump function DST when the call graph edge corresponding to SRC is
2605 is being inlined, knowing that DST is of type ancestor and src of known
2606 type. */
2608 static void
2609 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2610 struct ipa_jump_func *dst)
2612 HOST_WIDE_INT combined_offset;
2613 tree combined_type;
2615 if (!ipa_get_jf_ancestor_type_preserved (dst))
2617 dst->type = IPA_JF_UNKNOWN;
2618 return;
2621 combined_offset = ipa_get_jf_known_type_offset (src)
2622 + ipa_get_jf_ancestor_offset (dst);
2623 combined_type = ipa_get_jf_ancestor_type (dst);
2625 ipa_set_jf_known_type (dst, combined_offset,
2626 ipa_get_jf_known_type_base_type (src),
2627 combined_type);
2630 /* Update the jump functions associated with call graph edge E when the call
2631 graph edge CS is being inlined, assuming that E->caller is already (possibly
2632 indirectly) inlined into CS->callee and that E has not been inlined. */
2634 static void
2635 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2636 struct cgraph_edge *e)
2638 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2639 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2640 int count = ipa_get_cs_argument_count (args);
2641 int i;
2643 for (i = 0; i < count; i++)
2645 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2646 struct ipa_polymorphic_call_context *dst_ctx
2647 = ipa_get_ith_polymorhic_call_context (args, i);
2649 if (dst->type == IPA_JF_ANCESTOR)
2651 struct ipa_jump_func *src;
2652 int dst_fid = dst->value.ancestor.formal_id;
2653 struct ipa_polymorphic_call_context *src_ctx
2654 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2656 /* Variable number of arguments can cause havoc if we try to access
2657 one that does not exist in the inlined edge. So make sure we
2658 don't. */
2659 if (dst_fid >= ipa_get_cs_argument_count (top))
2661 dst->type = IPA_JF_UNKNOWN;
2662 continue;
2665 src = ipa_get_ith_jump_func (top, dst_fid);
2667 if (src_ctx && !src_ctx->useless_p ())
2669 struct ipa_polymorphic_call_context ctx = *src_ctx;
2671 /* TODO: Make type preserved safe WRT contexts. */
2672 if (!dst->value.ancestor.agg_preserved)
2673 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2674 ctx.offset_by (dst->value.ancestor.offset);
2675 if (!ctx.useless_p ())
2677 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2678 count);
2679 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2683 if (src->agg.items
2684 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2686 struct ipa_agg_jf_item *item;
2687 int j;
2689 /* Currently we do not produce clobber aggregate jump functions,
2690 replace with merging when we do. */
2691 gcc_assert (!dst->agg.items);
2693 dst->agg.items = vec_safe_copy (src->agg.items);
2694 dst->agg.by_ref = src->agg.by_ref;
2695 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2696 item->offset -= dst->value.ancestor.offset;
2699 if (src->type == IPA_JF_KNOWN_TYPE)
2700 combine_known_type_and_ancestor_jfs (src, dst);
2701 else if (src->type == IPA_JF_PASS_THROUGH
2702 && src->value.pass_through.operation == NOP_EXPR)
2704 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2705 dst->value.ancestor.agg_preserved &=
2706 src->value.pass_through.agg_preserved;
2707 dst->value.ancestor.type_preserved &=
2708 src->value.pass_through.type_preserved;
2710 else if (src->type == IPA_JF_ANCESTOR)
2712 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2713 dst->value.ancestor.offset += src->value.ancestor.offset;
2714 dst->value.ancestor.agg_preserved &=
2715 src->value.ancestor.agg_preserved;
2716 dst->value.ancestor.type_preserved &=
2717 src->value.ancestor.type_preserved;
2719 else
2720 dst->type = IPA_JF_UNKNOWN;
2722 else if (dst->type == IPA_JF_PASS_THROUGH)
2724 struct ipa_jump_func *src;
2725 /* We must check range due to calls with variable number of arguments
2726 and we cannot combine jump functions with operations. */
2727 if (dst->value.pass_through.operation == NOP_EXPR
2728 && (dst->value.pass_through.formal_id
2729 < ipa_get_cs_argument_count (top)))
2731 int dst_fid = dst->value.pass_through.formal_id;
2732 src = ipa_get_ith_jump_func (top, dst_fid);
2733 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2734 struct ipa_polymorphic_call_context *src_ctx
2735 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2737 if (src_ctx && !src_ctx->useless_p ())
2739 struct ipa_polymorphic_call_context ctx = *src_ctx;
2741 /* TODO: Make type preserved safe WRT contexts. */
2742 if (!dst->value.ancestor.agg_preserved)
2743 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2744 if (!ctx.useless_p ())
2746 if (!dst_ctx)
2748 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2749 count);
2750 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2752 dst_ctx->combine_with (ctx);
2755 switch (src->type)
2757 case IPA_JF_UNKNOWN:
2758 dst->type = IPA_JF_UNKNOWN;
2759 break;
2760 case IPA_JF_KNOWN_TYPE:
2761 if (ipa_get_jf_pass_through_type_preserved (dst))
2762 ipa_set_jf_known_type (dst,
2763 ipa_get_jf_known_type_offset (src),
2764 ipa_get_jf_known_type_base_type (src),
2765 ipa_get_jf_known_type_component_type (src));
2766 else
2767 dst->type = IPA_JF_UNKNOWN;
2768 break;
2769 case IPA_JF_CONST:
2770 ipa_set_jf_cst_copy (dst, src);
2771 break;
2773 case IPA_JF_PASS_THROUGH:
2775 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2776 enum tree_code operation;
2777 operation = ipa_get_jf_pass_through_operation (src);
2779 if (operation == NOP_EXPR)
2781 bool agg_p, type_p;
2782 agg_p = dst_agg_p
2783 && ipa_get_jf_pass_through_agg_preserved (src);
2784 type_p = ipa_get_jf_pass_through_type_preserved (src)
2785 && ipa_get_jf_pass_through_type_preserved (dst);
2786 ipa_set_jf_simple_pass_through (dst, formal_id,
2787 agg_p, type_p);
2789 else
2791 tree operand = ipa_get_jf_pass_through_operand (src);
2792 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2793 operation);
2795 break;
2797 case IPA_JF_ANCESTOR:
2799 bool agg_p, type_p;
2800 agg_p = dst_agg_p
2801 && ipa_get_jf_ancestor_agg_preserved (src);
2802 type_p = ipa_get_jf_ancestor_type_preserved (src)
2803 && ipa_get_jf_pass_through_type_preserved (dst);
2804 ipa_set_ancestor_jf (dst,
2805 ipa_get_jf_ancestor_offset (src),
2806 ipa_get_jf_ancestor_type (src),
2807 ipa_get_jf_ancestor_formal_id (src),
2808 agg_p, type_p);
2809 break;
2811 default:
2812 gcc_unreachable ();
2815 if (src->agg.items
2816 && (dst_agg_p || !src->agg.by_ref))
2818 /* Currently we do not produce clobber aggregate jump
2819 functions, replace with merging when we do. */
2820 gcc_assert (!dst->agg.items);
2822 dst->agg.by_ref = src->agg.by_ref;
2823 dst->agg.items = vec_safe_copy (src->agg.items);
2826 else
2827 dst->type = IPA_JF_UNKNOWN;
2832 /* If TARGET is an addr_expr of a function declaration, make it the
2833 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2834 Otherwise, return NULL. */
2836 struct cgraph_edge *
2837 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2838 bool speculative)
2840 struct cgraph_node *callee;
2841 struct inline_edge_summary *es = inline_edge_summary (ie);
2842 bool unreachable = false;
2844 if (TREE_CODE (target) == ADDR_EXPR)
2845 target = TREE_OPERAND (target, 0);
2846 if (TREE_CODE (target) != FUNCTION_DECL)
2848 target = canonicalize_constructor_val (target, NULL);
2849 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2851 if (ie->indirect_info->member_ptr)
2852 /* Member pointer call that goes through a VMT lookup. */
2853 return NULL;
2855 if (dump_enabled_p ())
2857 location_t loc = gimple_location_safe (ie->call_stmt);
2858 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2859 "discovered direct call to non-function in %s/%i, "
2860 "making it __builtin_unreachable\n",
2861 ie->caller->name (), ie->caller->order);
2864 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2865 callee = cgraph_node::get_create (target);
2866 unreachable = true;
2868 else
2869 callee = cgraph_node::get (target);
2871 else
2872 callee = cgraph_node::get (target);
2874 /* Because may-edges are not explicitely represented and vtable may be external,
2875 we may create the first reference to the object in the unit. */
2876 if (!callee || callee->global.inlined_to)
2879 /* We are better to ensure we can refer to it.
2880 In the case of static functions we are out of luck, since we already
2881 removed its body. In the case of public functions we may or may
2882 not introduce the reference. */
2883 if (!canonicalize_constructor_val (target, NULL)
2884 || !TREE_PUBLIC (target))
2886 if (dump_file)
2887 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2888 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2889 xstrdup (ie->caller->name ()),
2890 ie->caller->order,
2891 xstrdup (ie->callee->name ()),
2892 ie->callee->order);
2893 return NULL;
2895 callee = cgraph_node::get_create (target);
2898 /* If the edge is already speculated. */
2899 if (speculative && ie->speculative)
2901 struct cgraph_edge *e2;
2902 struct ipa_ref *ref;
2903 ie->speculative_call_info (e2, ie, ref);
2904 if (e2->callee->ultimate_alias_target ()
2905 != callee->ultimate_alias_target ())
2907 if (dump_file)
2908 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2909 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2910 xstrdup (ie->caller->name ()),
2911 ie->caller->order,
2912 xstrdup (callee->name ()),
2913 callee->order,
2914 xstrdup (e2->callee->name ()),
2915 e2->callee->order);
2917 else
2919 if (dump_file)
2920 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2921 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2922 xstrdup (ie->caller->name ()),
2923 ie->caller->order,
2924 xstrdup (callee->name ()),
2925 callee->order);
2927 return NULL;
2930 if (!dbg_cnt (devirt))
2931 return NULL;
2933 ipa_check_create_node_params ();
2935 /* We can not make edges to inline clones. It is bug that someone removed
2936 the cgraph node too early. */
2937 gcc_assert (!callee->global.inlined_to);
2939 if (dump_file && !unreachable)
2941 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2942 "(%s/%i -> %s/%i), for stmt ",
2943 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2944 speculative ? "speculative" : "known",
2945 xstrdup (ie->caller->name ()),
2946 ie->caller->order,
2947 xstrdup (callee->name ()),
2948 callee->order);
2949 if (ie->call_stmt)
2950 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2951 else
2952 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2954 if (dump_enabled_p ())
2956 location_t loc = gimple_location_safe (ie->call_stmt);
2958 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2959 "converting indirect call in %s to direct call to %s\n",
2960 ie->caller->name (), callee->name ());
2962 if (!speculative)
2963 ie = ie->make_direct (callee);
2964 else
2966 if (!callee->can_be_discarded_p ())
2968 cgraph_node *alias;
2969 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2970 if (alias)
2971 callee = alias;
2973 ie = ie->make_speculative
2974 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2976 es = inline_edge_summary (ie);
2977 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2978 - eni_size_weights.call_cost);
2979 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2980 - eni_time_weights.call_cost);
2982 return ie;
2985 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2986 return NULL if there is not any. BY_REF specifies whether the value has to
2987 be passed by reference or by value. */
2989 tree
2990 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2991 HOST_WIDE_INT offset, bool by_ref)
2993 struct ipa_agg_jf_item *item;
2994 int i;
2996 if (by_ref != agg->by_ref)
2997 return NULL;
2999 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
3000 if (item->offset == offset)
3002 /* Currently we do not have clobber values, return NULL for them once
3003 we do. */
3004 gcc_checking_assert (is_gimple_ip_invariant (item->value));
3005 return item->value;
3007 return NULL;
3010 /* Remove a reference to SYMBOL from the list of references of a node given by
3011 reference description RDESC. Return true if the reference has been
3012 successfully found and removed. */
3014 static bool
3015 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3017 struct ipa_ref *to_del;
3018 struct cgraph_edge *origin;
3020 origin = rdesc->cs;
3021 if (!origin)
3022 return false;
3023 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3024 origin->lto_stmt_uid);
3025 if (!to_del)
3026 return false;
3028 to_del->remove_reference ();
3029 if (dump_file)
3030 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
3031 xstrdup (origin->caller->name ()),
3032 origin->caller->order, xstrdup (symbol->name ()));
3033 return true;
3036 /* If JFUNC has a reference description with refcount different from
3037 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3038 NULL. JFUNC must be a constant jump function. */
3040 static struct ipa_cst_ref_desc *
3041 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3043 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3044 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3045 return rdesc;
3046 else
3047 return NULL;
3050 /* If the value of constant jump function JFUNC is an address of a function
3051 declaration, return the associated call graph node. Otherwise return
3052 NULL. */
3054 static cgraph_node *
3055 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3057 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3058 tree cst = ipa_get_jf_constant (jfunc);
3059 if (TREE_CODE (cst) != ADDR_EXPR
3060 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3061 return NULL;
3063 return cgraph_node::get (TREE_OPERAND (cst, 0));
3067 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3068 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3069 the edge specified in the rdesc. Return false if either the symbol or the
3070 reference could not be found, otherwise return true. */
3072 static bool
3073 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3075 struct ipa_cst_ref_desc *rdesc;
3076 if (jfunc->type == IPA_JF_CONST
3077 && (rdesc = jfunc_rdesc_usable (jfunc))
3078 && --rdesc->refcount == 0)
3080 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3081 if (!symbol)
3082 return false;
3084 return remove_described_reference (symbol, rdesc);
3086 return true;
3089 /* Try to find a destination for indirect edge IE that corresponds to a simple
3090 call or a call of a member function pointer and where the destination is a
3091 pointer formal parameter described by jump function JFUNC. If it can be
3092 determined, return the newly direct edge, otherwise return NULL.
3093 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3095 static struct cgraph_edge *
3096 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3097 struct ipa_jump_func *jfunc,
3098 struct ipa_node_params *new_root_info)
3100 struct cgraph_edge *cs;
3101 tree target;
3102 bool agg_contents = ie->indirect_info->agg_contents;
3104 if (ie->indirect_info->agg_contents)
3105 target = ipa_find_agg_cst_for_param (&jfunc->agg,
3106 ie->indirect_info->offset,
3107 ie->indirect_info->by_ref);
3108 else
3109 target = ipa_value_from_jfunc (new_root_info, jfunc);
3110 if (!target)
3111 return NULL;
3112 cs = ipa_make_edge_direct_to_target (ie, target);
3114 if (cs && !agg_contents)
3116 bool ok;
3117 gcc_checking_assert (cs->callee
3118 && (cs != ie
3119 || jfunc->type != IPA_JF_CONST
3120 || !cgraph_node_for_jfunc (jfunc)
3121 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3122 ok = try_decrement_rdesc_refcount (jfunc);
3123 gcc_checking_assert (ok);
3126 return cs;
3129 /* Return the target to be used in cases of impossible devirtualization. IE
3130 and target (the latter can be NULL) are dumped when dumping is enabled. */
3132 tree
3133 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3135 if (dump_file)
3137 if (target)
3138 fprintf (dump_file,
3139 "Type inconsistent devirtualization: %s/%i->%s\n",
3140 ie->caller->name (), ie->caller->order,
3141 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3142 else
3143 fprintf (dump_file,
3144 "No devirtualization target in %s/%i\n",
3145 ie->caller->name (), ie->caller->order);
3147 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3148 cgraph_node::get_create (new_target);
3149 return new_target;
3152 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3153 call based on a formal parameter which is described by jump function JFUNC
3154 and if it can be determined, make it direct and return the direct edge.
3155 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
3156 are relative to. */
3158 static struct cgraph_edge *
3159 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3160 struct ipa_jump_func *jfunc,
3161 struct ipa_node_params *new_root_info,
3162 struct ipa_polymorphic_call_context *ctx_ptr)
3164 tree binfo, target = NULL;
3165 bool speculative = false;
3166 bool updated = false;
3168 if (!flag_devirtualize)
3169 return NULL;
3171 /* If this is call of a function parameter, restrict its type
3172 based on knowlede of the context. */
3173 if (ctx_ptr && !ie->indirect_info->by_ref)
3175 struct ipa_polymorphic_call_context ctx = *ctx_ptr;
3177 ctx.offset_by (ie->indirect_info->offset);
3179 if (ie->indirect_info->vptr_changed)
3180 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3181 ie->indirect_info->otr_type);
3183 updated = ie->indirect_info->context.combine_with
3184 (ctx, ie->indirect_info->otr_type);
3187 /* Try to do lookup via known virtual table pointer value. */
3188 if (!ie->indirect_info->by_ref
3189 && (!ie->indirect_info->vptr_changed || flag_devirtualize_speculatively))
3191 tree vtable;
3192 unsigned HOST_WIDE_INT offset;
3193 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
3194 ie->indirect_info->offset,
3195 true);
3196 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3198 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3199 vtable, offset);
3200 if (t)
3202 if ((TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
3203 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
3204 || !possible_polymorphic_call_target_p
3205 (ie, cgraph_node::get (t)))
3207 /* Do not speculate builtin_unreachable, it is stpid! */
3208 if (!ie->indirect_info->vptr_changed)
3209 target = ipa_impossible_devirt_target (ie, target);
3211 else
3213 target = t;
3214 speculative = ie->indirect_info->vptr_changed;
3220 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
3222 if (binfo && TREE_CODE (binfo) != TREE_BINFO)
3224 struct ipa_polymorphic_call_context ctx (binfo,
3225 ie->indirect_info->otr_type,
3226 ie->indirect_info->offset);
3227 updated |= ie->indirect_info->context.combine_with
3228 (ctx, ie->indirect_info->otr_type);
3231 if (updated)
3233 ipa_polymorphic_call_context context (ie);
3234 vec <cgraph_node *>targets;
3235 bool final;
3237 targets = possible_polymorphic_call_targets
3238 (ie->indirect_info->otr_type,
3239 ie->indirect_info->otr_token,
3240 context, &final);
3241 if (final && targets.length () <= 1)
3243 if (targets.length () == 1)
3244 target = targets[0]->decl;
3245 else
3246 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3248 else if (!target && flag_devirtualize_speculatively
3249 && !ie->speculative && ie->maybe_hot_p ())
3251 cgraph_node *n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3252 ie->indirect_info->otr_token,
3253 ie->indirect_info->context);
3254 if (n)
3256 target = n->decl;
3257 speculative = true;
3262 if (binfo && TREE_CODE (binfo) == TREE_BINFO)
3264 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
3265 ie->indirect_info->otr_type);
3266 if (binfo)
3268 tree t = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
3269 binfo);
3270 if (t)
3272 target = t;
3273 speculative = false;
3278 if (target)
3280 if (!possible_polymorphic_call_target_p (ie, cgraph_node::get_create (target)))
3282 if (speculative)
3283 return NULL;
3284 target = ipa_impossible_devirt_target (ie, target);
3286 return ipa_make_edge_direct_to_target (ie, target, speculative);
3288 else
3289 return NULL;
3292 /* Update the param called notes associated with NODE when CS is being inlined,
3293 assuming NODE is (potentially indirectly) inlined into CS->callee.
3294 Moreover, if the callee is discovered to be constant, create a new cgraph
3295 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3296 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3298 static bool
3299 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3300 struct cgraph_node *node,
3301 vec<cgraph_edge *> *new_edges)
3303 struct ipa_edge_args *top;
3304 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3305 struct ipa_node_params *new_root_info;
3306 bool res = false;
3308 ipa_check_create_edge_args ();
3309 top = IPA_EDGE_REF (cs);
3310 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3311 ? cs->caller->global.inlined_to
3312 : cs->caller);
3314 for (ie = node->indirect_calls; ie; ie = next_ie)
3316 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3317 struct ipa_jump_func *jfunc;
3318 int param_index;
3320 next_ie = ie->next_callee;
3322 if (ici->param_index == -1)
3323 continue;
3325 /* We must check range due to calls with variable number of arguments: */
3326 if (ici->param_index >= ipa_get_cs_argument_count (top))
3328 ici->param_index = -1;
3329 continue;
3332 param_index = ici->param_index;
3333 jfunc = ipa_get_ith_jump_func (top, param_index);
3335 if (!flag_indirect_inlining)
3336 new_direct_edge = NULL;
3337 else if (ici->polymorphic)
3339 ipa_polymorphic_call_context *ctx;
3340 ctx = ipa_get_ith_polymorhic_call_context (top, param_index);
3341 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
3342 new_root_info,
3343 ctx);
3345 else
3346 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3347 new_root_info);
3348 /* If speculation was removed, then we need to do nothing. */
3349 if (new_direct_edge && new_direct_edge != ie)
3351 new_direct_edge->indirect_inlining_edge = 1;
3352 top = IPA_EDGE_REF (cs);
3353 res = true;
3355 else if (new_direct_edge)
3357 new_direct_edge->indirect_inlining_edge = 1;
3358 if (new_direct_edge->call_stmt)
3359 new_direct_edge->call_stmt_cannot_inline_p
3360 = !gimple_check_call_matching_types (
3361 new_direct_edge->call_stmt,
3362 new_direct_edge->callee->decl, false);
3363 if (new_edges)
3365 new_edges->safe_push (new_direct_edge);
3366 res = true;
3368 top = IPA_EDGE_REF (cs);
3370 else if (jfunc->type == IPA_JF_PASS_THROUGH
3371 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3373 if ((ici->agg_contents
3374 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
3375 || (ici->polymorphic
3376 && !ipa_get_jf_pass_through_type_preserved (jfunc)))
3377 ici->param_index = -1;
3378 else
3379 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3381 else if (jfunc->type == IPA_JF_ANCESTOR)
3383 if ((ici->agg_contents
3384 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
3385 || (ici->polymorphic
3386 && !ipa_get_jf_ancestor_type_preserved (jfunc)))
3387 ici->param_index = -1;
3388 else
3390 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3391 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3394 else
3395 /* Either we can find a destination for this edge now or never. */
3396 ici->param_index = -1;
3399 return res;
3402 /* Recursively traverse subtree of NODE (including node) made of inlined
3403 cgraph_edges when CS has been inlined and invoke
3404 update_indirect_edges_after_inlining on all nodes and
3405 update_jump_functions_after_inlining on all non-inlined edges that lead out
3406 of this subtree. Newly discovered indirect edges will be added to
3407 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3408 created. */
3410 static bool
3411 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3412 struct cgraph_node *node,
3413 vec<cgraph_edge *> *new_edges)
3415 struct cgraph_edge *e;
3416 bool res;
3418 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3420 for (e = node->callees; e; e = e->next_callee)
3421 if (!e->inline_failed)
3422 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3423 else
3424 update_jump_functions_after_inlining (cs, e);
3425 for (e = node->indirect_calls; e; e = e->next_callee)
3426 update_jump_functions_after_inlining (cs, e);
3428 return res;
3431 /* Combine two controlled uses counts as done during inlining. */
3433 static int
3434 combine_controlled_uses_counters (int c, int d)
3436 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3437 return IPA_UNDESCRIBED_USE;
3438 else
3439 return c + d - 1;
3442 /* Propagate number of controlled users from CS->caleee to the new root of the
3443 tree of inlined nodes. */
3445 static void
3446 propagate_controlled_uses (struct cgraph_edge *cs)
3448 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3449 struct cgraph_node *new_root = cs->caller->global.inlined_to
3450 ? cs->caller->global.inlined_to : cs->caller;
3451 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3452 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3453 int count, i;
3455 count = MIN (ipa_get_cs_argument_count (args),
3456 ipa_get_param_count (old_root_info));
3457 for (i = 0; i < count; i++)
3459 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3460 struct ipa_cst_ref_desc *rdesc;
3462 if (jf->type == IPA_JF_PASS_THROUGH)
3464 int src_idx, c, d;
3465 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3466 c = ipa_get_controlled_uses (new_root_info, src_idx);
3467 d = ipa_get_controlled_uses (old_root_info, i);
3469 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3470 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3471 c = combine_controlled_uses_counters (c, d);
3472 ipa_set_controlled_uses (new_root_info, src_idx, c);
3473 if (c == 0 && new_root_info->ipcp_orig_node)
3475 struct cgraph_node *n;
3476 struct ipa_ref *ref;
3477 tree t = new_root_info->known_vals[src_idx];
3479 if (t && TREE_CODE (t) == ADDR_EXPR
3480 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3481 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3482 && (ref = new_root->find_reference (n, NULL, 0)))
3484 if (dump_file)
3485 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3486 "reference from %s/%i to %s/%i.\n",
3487 xstrdup (new_root->name ()),
3488 new_root->order,
3489 xstrdup (n->name ()), n->order);
3490 ref->remove_reference ();
3494 else if (jf->type == IPA_JF_CONST
3495 && (rdesc = jfunc_rdesc_usable (jf)))
3497 int d = ipa_get_controlled_uses (old_root_info, i);
3498 int c = rdesc->refcount;
3499 rdesc->refcount = combine_controlled_uses_counters (c, d);
3500 if (rdesc->refcount == 0)
3502 tree cst = ipa_get_jf_constant (jf);
3503 struct cgraph_node *n;
3504 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3505 && TREE_CODE (TREE_OPERAND (cst, 0))
3506 == FUNCTION_DECL);
3507 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3508 if (n)
3510 struct cgraph_node *clone;
3511 bool ok;
3512 ok = remove_described_reference (n, rdesc);
3513 gcc_checking_assert (ok);
3515 clone = cs->caller;
3516 while (clone->global.inlined_to
3517 && clone != rdesc->cs->caller
3518 && IPA_NODE_REF (clone)->ipcp_orig_node)
3520 struct ipa_ref *ref;
3521 ref = clone->find_reference (n, NULL, 0);
3522 if (ref)
3524 if (dump_file)
3525 fprintf (dump_file, "ipa-prop: Removing "
3526 "cloning-created reference "
3527 "from %s/%i to %s/%i.\n",
3528 xstrdup (clone->name ()),
3529 clone->order,
3530 xstrdup (n->name ()),
3531 n->order);
3532 ref->remove_reference ();
3534 clone = clone->callers->caller;
3541 for (i = ipa_get_param_count (old_root_info);
3542 i < ipa_get_cs_argument_count (args);
3543 i++)
3545 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3547 if (jf->type == IPA_JF_CONST)
3549 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3550 if (rdesc)
3551 rdesc->refcount = IPA_UNDESCRIBED_USE;
3553 else if (jf->type == IPA_JF_PASS_THROUGH)
3554 ipa_set_controlled_uses (new_root_info,
3555 jf->value.pass_through.formal_id,
3556 IPA_UNDESCRIBED_USE);
3560 /* Update jump functions and call note functions on inlining the call site CS.
3561 CS is expected to lead to a node already cloned by
3562 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3563 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3564 created. */
3566 bool
3567 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3568 vec<cgraph_edge *> *new_edges)
3570 bool changed;
3571 /* Do nothing if the preparation phase has not been carried out yet
3572 (i.e. during early inlining). */
3573 if (!ipa_node_params_vector.exists ())
3574 return false;
3575 gcc_assert (ipa_edge_args_vector);
3577 propagate_controlled_uses (cs);
3578 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3580 return changed;
3583 /* Frees all dynamically allocated structures that the argument info points
3584 to. */
3586 void
3587 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3589 vec_free (args->jump_functions);
3590 memset (args, 0, sizeof (*args));
3593 /* Free all ipa_edge structures. */
3595 void
3596 ipa_free_all_edge_args (void)
3598 int i;
3599 struct ipa_edge_args *args;
3601 if (!ipa_edge_args_vector)
3602 return;
3604 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3605 ipa_free_edge_args_substructures (args);
3607 vec_free (ipa_edge_args_vector);
3610 /* Frees all dynamically allocated structures that the param info points
3611 to. */
3613 void
3614 ipa_free_node_params_substructures (struct ipa_node_params *info)
3616 info->descriptors.release ();
3617 free (info->lattices);
3618 /* Lattice values and their sources are deallocated with their alocation
3619 pool. */
3620 info->known_vals.release ();
3621 memset (info, 0, sizeof (*info));
3624 /* Free all ipa_node_params structures. */
3626 void
3627 ipa_free_all_node_params (void)
3629 int i;
3630 struct ipa_node_params *info;
3632 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
3633 ipa_free_node_params_substructures (info);
3635 ipa_node_params_vector.release ();
3638 /* Set the aggregate replacements of NODE to be AGGVALS. */
3640 void
3641 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3642 struct ipa_agg_replacement_value *aggvals)
3644 if (vec_safe_length (ipa_node_agg_replacements)
3645 <= (unsigned) symtab->cgraph_max_uid)
3646 vec_safe_grow_cleared (ipa_node_agg_replacements,
3647 symtab->cgraph_max_uid + 1);
3649 (*ipa_node_agg_replacements)[node->uid] = aggvals;
3652 /* Hook that is called by cgraph.c when an edge is removed. */
3654 static void
3655 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3657 struct ipa_edge_args *args;
3659 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3660 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3661 return;
3663 args = IPA_EDGE_REF (cs);
3664 if (args->jump_functions)
3666 struct ipa_jump_func *jf;
3667 int i;
3668 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3670 struct ipa_cst_ref_desc *rdesc;
3671 try_decrement_rdesc_refcount (jf);
3672 if (jf->type == IPA_JF_CONST
3673 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3674 && rdesc->cs == cs)
3675 rdesc->cs = NULL;
3679 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3682 /* Hook that is called by cgraph.c when a node is removed. */
3684 static void
3685 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3687 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3688 if (ipa_node_params_vector.length () > (unsigned)node->uid)
3689 ipa_free_node_params_substructures (IPA_NODE_REF (node));
3690 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3691 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
3694 /* Hook that is called by cgraph.c when an edge is duplicated. */
3696 static void
3697 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3698 __attribute__((unused)) void *data)
3700 struct ipa_edge_args *old_args, *new_args;
3701 unsigned int i;
3703 ipa_check_create_edge_args ();
3705 old_args = IPA_EDGE_REF (src);
3706 new_args = IPA_EDGE_REF (dst);
3708 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3709 if (old_args->polymorphic_call_contexts)
3710 new_args->polymorphic_call_contexts
3711 = vec_safe_copy (old_args->polymorphic_call_contexts);
3713 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3715 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3716 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3718 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3720 if (src_jf->type == IPA_JF_CONST)
3722 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3724 if (!src_rdesc)
3725 dst_jf->value.constant.rdesc = NULL;
3726 else if (src->caller == dst->caller)
3728 struct ipa_ref *ref;
3729 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3730 gcc_checking_assert (n);
3731 ref = src->caller->find_reference (n, src->call_stmt,
3732 src->lto_stmt_uid);
3733 gcc_checking_assert (ref);
3734 dst->caller->clone_reference (ref, ref->stmt);
3736 gcc_checking_assert (ipa_refdesc_pool);
3737 struct ipa_cst_ref_desc *dst_rdesc
3738 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3739 dst_rdesc->cs = dst;
3740 dst_rdesc->refcount = src_rdesc->refcount;
3741 dst_rdesc->next_duplicate = NULL;
3742 dst_jf->value.constant.rdesc = dst_rdesc;
3744 else if (src_rdesc->cs == src)
3746 struct ipa_cst_ref_desc *dst_rdesc;
3747 gcc_checking_assert (ipa_refdesc_pool);
3748 dst_rdesc
3749 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3750 dst_rdesc->cs = dst;
3751 dst_rdesc->refcount = src_rdesc->refcount;
3752 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3753 src_rdesc->next_duplicate = dst_rdesc;
3754 dst_jf->value.constant.rdesc = dst_rdesc;
3756 else
3758 struct ipa_cst_ref_desc *dst_rdesc;
3759 /* This can happen during inlining, when a JFUNC can refer to a
3760 reference taken in a function up in the tree of inline clones.
3761 We need to find the duplicate that refers to our tree of
3762 inline clones. */
3764 gcc_assert (dst->caller->global.inlined_to);
3765 for (dst_rdesc = src_rdesc->next_duplicate;
3766 dst_rdesc;
3767 dst_rdesc = dst_rdesc->next_duplicate)
3769 struct cgraph_node *top;
3770 top = dst_rdesc->cs->caller->global.inlined_to
3771 ? dst_rdesc->cs->caller->global.inlined_to
3772 : dst_rdesc->cs->caller;
3773 if (dst->caller->global.inlined_to == top)
3774 break;
3776 gcc_assert (dst_rdesc);
3777 dst_jf->value.constant.rdesc = dst_rdesc;
3780 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3781 && src->caller == dst->caller)
3783 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3784 ? dst->caller->global.inlined_to : dst->caller;
3785 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3786 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3788 int c = ipa_get_controlled_uses (root_info, idx);
3789 if (c != IPA_UNDESCRIBED_USE)
3791 c++;
3792 ipa_set_controlled_uses (root_info, idx, c);
3798 /* Hook that is called by cgraph.c when a node is duplicated. */
3800 static void
3801 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3802 ATTRIBUTE_UNUSED void *data)
3804 struct ipa_node_params *old_info, *new_info;
3805 struct ipa_agg_replacement_value *old_av, *new_av;
3807 ipa_check_create_node_params ();
3808 old_info = IPA_NODE_REF (src);
3809 new_info = IPA_NODE_REF (dst);
3811 new_info->descriptors = old_info->descriptors.copy ();
3812 new_info->lattices = NULL;
3813 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3815 new_info->analysis_done = old_info->analysis_done;
3816 new_info->node_enqueued = old_info->node_enqueued;
3818 old_av = ipa_get_agg_replacements_for_node (src);
3819 if (!old_av)
3820 return;
3822 new_av = NULL;
3823 while (old_av)
3825 struct ipa_agg_replacement_value *v;
3827 v = ggc_alloc<ipa_agg_replacement_value> ();
3828 memcpy (v, old_av, sizeof (*v));
3829 v->next = new_av;
3830 new_av = v;
3831 old_av = old_av->next;
3833 ipa_set_node_agg_value_chain (dst, new_av);
3837 /* Analyze newly added function into callgraph. */
3839 static void
3840 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3842 if (node->has_gimple_body_p ())
3843 ipa_analyze_node (node);
3846 /* Register our cgraph hooks if they are not already there. */
3848 void
3849 ipa_register_cgraph_hooks (void)
3851 if (!edge_removal_hook_holder)
3852 edge_removal_hook_holder =
3853 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3854 if (!node_removal_hook_holder)
3855 node_removal_hook_holder =
3856 symtab->add_cgraph_removal_hook (&ipa_node_removal_hook, NULL);
3857 if (!edge_duplication_hook_holder)
3858 edge_duplication_hook_holder =
3859 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3860 if (!node_duplication_hook_holder)
3861 node_duplication_hook_holder =
3862 symtab->add_cgraph_duplication_hook (&ipa_node_duplication_hook, NULL);
3863 function_insertion_hook_holder =
3864 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3867 /* Unregister our cgraph hooks if they are not already there. */
3869 static void
3870 ipa_unregister_cgraph_hooks (void)
3872 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
3873 edge_removal_hook_holder = NULL;
3874 symtab->remove_cgraph_removal_hook (node_removal_hook_holder);
3875 node_removal_hook_holder = NULL;
3876 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
3877 edge_duplication_hook_holder = NULL;
3878 symtab->remove_cgraph_duplication_hook (node_duplication_hook_holder);
3879 node_duplication_hook_holder = NULL;
3880 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3881 function_insertion_hook_holder = NULL;
3884 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3885 longer needed after ipa-cp. */
3887 void
3888 ipa_free_all_structures_after_ipa_cp (void)
3890 if (!optimize)
3892 ipa_free_all_edge_args ();
3893 ipa_free_all_node_params ();
3894 free_alloc_pool (ipcp_sources_pool);
3895 free_alloc_pool (ipcp_values_pool);
3896 free_alloc_pool (ipcp_agg_lattice_pool);
3897 ipa_unregister_cgraph_hooks ();
3898 if (ipa_refdesc_pool)
3899 free_alloc_pool (ipa_refdesc_pool);
3903 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3904 longer needed after indirect inlining. */
3906 void
3907 ipa_free_all_structures_after_iinln (void)
3909 ipa_free_all_edge_args ();
3910 ipa_free_all_node_params ();
3911 ipa_unregister_cgraph_hooks ();
3912 if (ipcp_sources_pool)
3913 free_alloc_pool (ipcp_sources_pool);
3914 if (ipcp_values_pool)
3915 free_alloc_pool (ipcp_values_pool);
3916 if (ipcp_agg_lattice_pool)
3917 free_alloc_pool (ipcp_agg_lattice_pool);
3918 if (ipa_refdesc_pool)
3919 free_alloc_pool (ipa_refdesc_pool);
3922 /* Print ipa_tree_map data structures of all functions in the
3923 callgraph to F. */
3925 void
3926 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3928 int i, count;
3929 struct ipa_node_params *info;
3931 if (!node->definition)
3932 return;
3933 info = IPA_NODE_REF (node);
3934 fprintf (f, " function %s/%i parameter descriptors:\n",
3935 node->name (), node->order);
3936 count = ipa_get_param_count (info);
3937 for (i = 0; i < count; i++)
3939 int c;
3941 fprintf (f, " ");
3942 ipa_dump_param (f, info, i);
3943 if (ipa_is_param_used (info, i))
3944 fprintf (f, " used");
3945 c = ipa_get_controlled_uses (info, i);
3946 if (c == IPA_UNDESCRIBED_USE)
3947 fprintf (f, " undescribed_use");
3948 else
3949 fprintf (f, " controlled_uses=%i", c);
3950 fprintf (f, "\n");
3954 /* Print ipa_tree_map data structures of all functions in the
3955 callgraph to F. */
3957 void
3958 ipa_print_all_params (FILE * f)
3960 struct cgraph_node *node;
3962 fprintf (f, "\nFunction parameters:\n");
3963 FOR_EACH_FUNCTION (node)
3964 ipa_print_node_params (f, node);
3967 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3969 vec<tree>
3970 ipa_get_vector_of_formal_parms (tree fndecl)
3972 vec<tree> args;
3973 int count;
3974 tree parm;
3976 gcc_assert (!flag_wpa);
3977 count = count_formal_params (fndecl);
3978 args.create (count);
3979 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3980 args.quick_push (parm);
3982 return args;
3985 /* Return a heap allocated vector containing types of formal parameters of
3986 function type FNTYPE. */
3988 vec<tree>
3989 ipa_get_vector_of_formal_parm_types (tree fntype)
3991 vec<tree> types;
3992 int count = 0;
3993 tree t;
3995 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3996 count++;
3998 types.create (count);
3999 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
4000 types.quick_push (TREE_VALUE (t));
4002 return types;
4005 /* Modify the function declaration FNDECL and its type according to the plan in
4006 ADJUSTMENTS. It also sets base fields of individual adjustments structures
4007 to reflect the actual parameters being modified which are determined by the
4008 base_index field. */
4010 void
4011 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
4013 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
4014 tree orig_type = TREE_TYPE (fndecl);
4015 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
4017 /* The following test is an ugly hack, some functions simply don't have any
4018 arguments in their type. This is probably a bug but well... */
4019 bool care_for_types = (old_arg_types != NULL_TREE);
4020 bool last_parm_void;
4021 vec<tree> otypes;
4022 if (care_for_types)
4024 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
4025 == void_type_node);
4026 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
4027 if (last_parm_void)
4028 gcc_assert (oparms.length () + 1 == otypes.length ());
4029 else
4030 gcc_assert (oparms.length () == otypes.length ());
4032 else
4034 last_parm_void = false;
4035 otypes.create (0);
4038 int len = adjustments.length ();
4039 tree *link = &DECL_ARGUMENTS (fndecl);
4040 tree new_arg_types = NULL;
4041 for (int i = 0; i < len; i++)
4043 struct ipa_parm_adjustment *adj;
4044 gcc_assert (link);
4046 adj = &adjustments[i];
4047 tree parm;
4048 if (adj->op == IPA_PARM_OP_NEW)
4049 parm = NULL;
4050 else
4051 parm = oparms[adj->base_index];
4052 adj->base = parm;
4054 if (adj->op == IPA_PARM_OP_COPY)
4056 if (care_for_types)
4057 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
4058 new_arg_types);
4059 *link = parm;
4060 link = &DECL_CHAIN (parm);
4062 else if (adj->op != IPA_PARM_OP_REMOVE)
4064 tree new_parm;
4065 tree ptype;
4067 if (adj->by_ref)
4068 ptype = build_pointer_type (adj->type);
4069 else
4071 ptype = adj->type;
4072 if (is_gimple_reg_type (ptype))
4074 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
4075 if (TYPE_ALIGN (ptype) < malign)
4076 ptype = build_aligned_type (ptype, malign);
4080 if (care_for_types)
4081 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
4083 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
4084 ptype);
4085 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
4086 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
4087 DECL_ARTIFICIAL (new_parm) = 1;
4088 DECL_ARG_TYPE (new_parm) = ptype;
4089 DECL_CONTEXT (new_parm) = fndecl;
4090 TREE_USED (new_parm) = 1;
4091 DECL_IGNORED_P (new_parm) = 1;
4092 layout_decl (new_parm, 0);
4094 if (adj->op == IPA_PARM_OP_NEW)
4095 adj->base = NULL;
4096 else
4097 adj->base = parm;
4098 adj->new_decl = new_parm;
4100 *link = new_parm;
4101 link = &DECL_CHAIN (new_parm);
4105 *link = NULL_TREE;
4107 tree new_reversed = NULL;
4108 if (care_for_types)
4110 new_reversed = nreverse (new_arg_types);
4111 if (last_parm_void)
4113 if (new_reversed)
4114 TREE_CHAIN (new_arg_types) = void_list_node;
4115 else
4116 new_reversed = void_list_node;
4120 /* Use copy_node to preserve as much as possible from original type
4121 (debug info, attribute lists etc.)
4122 Exception is METHOD_TYPEs must have THIS argument.
4123 When we are asked to remove it, we need to build new FUNCTION_TYPE
4124 instead. */
4125 tree new_type = NULL;
4126 if (TREE_CODE (orig_type) != METHOD_TYPE
4127 || (adjustments[0].op == IPA_PARM_OP_COPY
4128 && adjustments[0].base_index == 0))
4130 new_type = build_distinct_type_copy (orig_type);
4131 TYPE_ARG_TYPES (new_type) = new_reversed;
4133 else
4135 new_type
4136 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
4137 new_reversed));
4138 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
4139 DECL_VINDEX (fndecl) = NULL_TREE;
4142 /* When signature changes, we need to clear builtin info. */
4143 if (DECL_BUILT_IN (fndecl))
4145 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
4146 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
4149 TREE_TYPE (fndecl) = new_type;
4150 DECL_VIRTUAL_P (fndecl) = 0;
4151 DECL_LANG_SPECIFIC (fndecl) = NULL;
4152 otypes.release ();
4153 oparms.release ();
4156 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
4157 If this is a directly recursive call, CS must be NULL. Otherwise it must
4158 contain the corresponding call graph edge. */
4160 void
4161 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
4162 ipa_parm_adjustment_vec adjustments)
4164 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
4165 vec<tree> vargs;
4166 vec<tree, va_gc> **debug_args = NULL;
4167 gimple new_stmt;
4168 gimple_stmt_iterator gsi, prev_gsi;
4169 tree callee_decl;
4170 int i, len;
4172 len = adjustments.length ();
4173 vargs.create (len);
4174 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
4175 current_node->remove_stmt_references (stmt);
4177 gsi = gsi_for_stmt (stmt);
4178 prev_gsi = gsi;
4179 gsi_prev (&prev_gsi);
4180 for (i = 0; i < len; i++)
4182 struct ipa_parm_adjustment *adj;
4184 adj = &adjustments[i];
4186 if (adj->op == IPA_PARM_OP_COPY)
4188 tree arg = gimple_call_arg (stmt, adj->base_index);
4190 vargs.quick_push (arg);
4192 else if (adj->op != IPA_PARM_OP_REMOVE)
4194 tree expr, base, off;
4195 location_t loc;
4196 unsigned int deref_align = 0;
4197 bool deref_base = false;
4199 /* We create a new parameter out of the value of the old one, we can
4200 do the following kind of transformations:
4202 - A scalar passed by reference is converted to a scalar passed by
4203 value. (adj->by_ref is false and the type of the original
4204 actual argument is a pointer to a scalar).
4206 - A part of an aggregate is passed instead of the whole aggregate.
4207 The part can be passed either by value or by reference, this is
4208 determined by value of adj->by_ref. Moreover, the code below
4209 handles both situations when the original aggregate is passed by
4210 value (its type is not a pointer) and when it is passed by
4211 reference (it is a pointer to an aggregate).
4213 When the new argument is passed by reference (adj->by_ref is true)
4214 it must be a part of an aggregate and therefore we form it by
4215 simply taking the address of a reference inside the original
4216 aggregate. */
4218 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
4219 base = gimple_call_arg (stmt, adj->base_index);
4220 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
4221 : EXPR_LOCATION (base);
4223 if (TREE_CODE (base) != ADDR_EXPR
4224 && POINTER_TYPE_P (TREE_TYPE (base)))
4225 off = build_int_cst (adj->alias_ptr_type,
4226 adj->offset / BITS_PER_UNIT);
4227 else
4229 HOST_WIDE_INT base_offset;
4230 tree prev_base;
4231 bool addrof;
4233 if (TREE_CODE (base) == ADDR_EXPR)
4235 base = TREE_OPERAND (base, 0);
4236 addrof = true;
4238 else
4239 addrof = false;
4240 prev_base = base;
4241 base = get_addr_base_and_unit_offset (base, &base_offset);
4242 /* Aggregate arguments can have non-invariant addresses. */
4243 if (!base)
4245 base = build_fold_addr_expr (prev_base);
4246 off = build_int_cst (adj->alias_ptr_type,
4247 adj->offset / BITS_PER_UNIT);
4249 else if (TREE_CODE (base) == MEM_REF)
4251 if (!addrof)
4253 deref_base = true;
4254 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4256 off = build_int_cst (adj->alias_ptr_type,
4257 base_offset
4258 + adj->offset / BITS_PER_UNIT);
4259 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
4260 off);
4261 base = TREE_OPERAND (base, 0);
4263 else
4265 off = build_int_cst (adj->alias_ptr_type,
4266 base_offset
4267 + adj->offset / BITS_PER_UNIT);
4268 base = build_fold_addr_expr (base);
4272 if (!adj->by_ref)
4274 tree type = adj->type;
4275 unsigned int align;
4276 unsigned HOST_WIDE_INT misalign;
4278 if (deref_base)
4280 align = deref_align;
4281 misalign = 0;
4283 else
4285 get_pointer_alignment_1 (base, &align, &misalign);
4286 if (TYPE_ALIGN (type) > align)
4287 align = TYPE_ALIGN (type);
4289 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
4290 * BITS_PER_UNIT);
4291 misalign = misalign & (align - 1);
4292 if (misalign != 0)
4293 align = (misalign & -misalign);
4294 if (align < TYPE_ALIGN (type))
4295 type = build_aligned_type (type, align);
4296 base = force_gimple_operand_gsi (&gsi, base,
4297 true, NULL, true, GSI_SAME_STMT);
4298 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4299 /* If expr is not a valid gimple call argument emit
4300 a load into a temporary. */
4301 if (is_gimple_reg_type (TREE_TYPE (expr)))
4303 gimple tem = gimple_build_assign (NULL_TREE, expr);
4304 if (gimple_in_ssa_p (cfun))
4306 gimple_set_vuse (tem, gimple_vuse (stmt));
4307 expr = make_ssa_name (TREE_TYPE (expr), tem);
4309 else
4310 expr = create_tmp_reg (TREE_TYPE (expr), NULL);
4311 gimple_assign_set_lhs (tem, expr);
4312 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4315 else
4317 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4318 expr = build_fold_addr_expr (expr);
4319 expr = force_gimple_operand_gsi (&gsi, expr,
4320 true, NULL, true, GSI_SAME_STMT);
4322 vargs.quick_push (expr);
4324 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4326 unsigned int ix;
4327 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4328 gimple def_temp;
4330 arg = gimple_call_arg (stmt, adj->base_index);
4331 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4333 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4334 continue;
4335 arg = fold_convert_loc (gimple_location (stmt),
4336 TREE_TYPE (origin), arg);
4338 if (debug_args == NULL)
4339 debug_args = decl_debug_args_insert (callee_decl);
4340 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4341 if (ddecl == origin)
4343 ddecl = (**debug_args)[ix + 1];
4344 break;
4346 if (ddecl == NULL)
4348 ddecl = make_node (DEBUG_EXPR_DECL);
4349 DECL_ARTIFICIAL (ddecl) = 1;
4350 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4351 DECL_MODE (ddecl) = DECL_MODE (origin);
4353 vec_safe_push (*debug_args, origin);
4354 vec_safe_push (*debug_args, ddecl);
4356 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4357 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4361 if (dump_file && (dump_flags & TDF_DETAILS))
4363 fprintf (dump_file, "replacing stmt:");
4364 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4367 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4368 vargs.release ();
4369 if (gimple_call_lhs (stmt))
4370 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4372 gimple_set_block (new_stmt, gimple_block (stmt));
4373 if (gimple_has_location (stmt))
4374 gimple_set_location (new_stmt, gimple_location (stmt));
4375 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4376 gimple_call_copy_flags (new_stmt, stmt);
4377 if (gimple_in_ssa_p (cfun))
4379 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4380 if (gimple_vdef (stmt))
4382 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4383 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4387 if (dump_file && (dump_flags & TDF_DETAILS))
4389 fprintf (dump_file, "with stmt:");
4390 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4391 fprintf (dump_file, "\n");
4393 gsi_replace (&gsi, new_stmt, true);
4394 if (cs)
4395 cs->set_call_stmt (new_stmt);
4398 current_node->record_stmt_references (gsi_stmt (gsi));
4399 gsi_prev (&gsi);
4401 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4404 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4405 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4406 specifies whether the function should care about type incompatibility the
4407 current and new expressions. If it is false, the function will leave
4408 incompatibility issues to the caller. Return true iff the expression
4409 was modified. */
4411 bool
4412 ipa_modify_expr (tree *expr, bool convert,
4413 ipa_parm_adjustment_vec adjustments)
4415 struct ipa_parm_adjustment *cand
4416 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4417 if (!cand)
4418 return false;
4420 tree src;
4421 if (cand->by_ref)
4422 src = build_simple_mem_ref (cand->new_decl);
4423 else
4424 src = cand->new_decl;
4426 if (dump_file && (dump_flags & TDF_DETAILS))
4428 fprintf (dump_file, "About to replace expr ");
4429 print_generic_expr (dump_file, *expr, 0);
4430 fprintf (dump_file, " with ");
4431 print_generic_expr (dump_file, src, 0);
4432 fprintf (dump_file, "\n");
4435 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4437 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4438 *expr = vce;
4440 else
4441 *expr = src;
4442 return true;
4445 /* If T is an SSA_NAME, return NULL if it is not a default def or
4446 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4447 the base variable is always returned, regardless if it is a default
4448 def. Return T if it is not an SSA_NAME. */
4450 static tree
4451 get_ssa_base_param (tree t, bool ignore_default_def)
4453 if (TREE_CODE (t) == SSA_NAME)
4455 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4456 return SSA_NAME_VAR (t);
4457 else
4458 return NULL_TREE;
4460 return t;
4463 /* Given an expression, return an adjustment entry specifying the
4464 transformation to be done on EXPR. If no suitable adjustment entry
4465 was found, returns NULL.
4467 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4468 default def, otherwise bail on them.
4470 If CONVERT is non-NULL, this function will set *CONVERT if the
4471 expression provided is a component reference. ADJUSTMENTS is the
4472 adjustments vector. */
4474 ipa_parm_adjustment *
4475 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4476 ipa_parm_adjustment_vec adjustments,
4477 bool ignore_default_def)
4479 if (TREE_CODE (**expr) == BIT_FIELD_REF
4480 || TREE_CODE (**expr) == IMAGPART_EXPR
4481 || TREE_CODE (**expr) == REALPART_EXPR)
4483 *expr = &TREE_OPERAND (**expr, 0);
4484 if (convert)
4485 *convert = true;
4488 HOST_WIDE_INT offset, size, max_size;
4489 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
4490 if (!base || size == -1 || max_size == -1)
4491 return NULL;
4493 if (TREE_CODE (base) == MEM_REF)
4495 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4496 base = TREE_OPERAND (base, 0);
4499 base = get_ssa_base_param (base, ignore_default_def);
4500 if (!base || TREE_CODE (base) != PARM_DECL)
4501 return NULL;
4503 struct ipa_parm_adjustment *cand = NULL;
4504 unsigned int len = adjustments.length ();
4505 for (unsigned i = 0; i < len; i++)
4507 struct ipa_parm_adjustment *adj = &adjustments[i];
4509 if (adj->base == base
4510 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4512 cand = adj;
4513 break;
4517 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4518 return NULL;
4519 return cand;
4522 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4524 static bool
4525 index_in_adjustments_multiple_times_p (int base_index,
4526 ipa_parm_adjustment_vec adjustments)
4528 int i, len = adjustments.length ();
4529 bool one = false;
4531 for (i = 0; i < len; i++)
4533 struct ipa_parm_adjustment *adj;
4534 adj = &adjustments[i];
4536 if (adj->base_index == base_index)
4538 if (one)
4539 return true;
4540 else
4541 one = true;
4544 return false;
4548 /* Return adjustments that should have the same effect on function parameters
4549 and call arguments as if they were first changed according to adjustments in
4550 INNER and then by adjustments in OUTER. */
4552 ipa_parm_adjustment_vec
4553 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4554 ipa_parm_adjustment_vec outer)
4556 int i, outlen = outer.length ();
4557 int inlen = inner.length ();
4558 int removals = 0;
4559 ipa_parm_adjustment_vec adjustments, tmp;
4561 tmp.create (inlen);
4562 for (i = 0; i < inlen; i++)
4564 struct ipa_parm_adjustment *n;
4565 n = &inner[i];
4567 if (n->op == IPA_PARM_OP_REMOVE)
4568 removals++;
4569 else
4571 /* FIXME: Handling of new arguments are not implemented yet. */
4572 gcc_assert (n->op != IPA_PARM_OP_NEW);
4573 tmp.quick_push (*n);
4577 adjustments.create (outlen + removals);
4578 for (i = 0; i < outlen; i++)
4580 struct ipa_parm_adjustment r;
4581 struct ipa_parm_adjustment *out = &outer[i];
4582 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4584 memset (&r, 0, sizeof (r));
4585 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4586 if (out->op == IPA_PARM_OP_REMOVE)
4588 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4590 r.op = IPA_PARM_OP_REMOVE;
4591 adjustments.quick_push (r);
4593 continue;
4595 else
4597 /* FIXME: Handling of new arguments are not implemented yet. */
4598 gcc_assert (out->op != IPA_PARM_OP_NEW);
4601 r.base_index = in->base_index;
4602 r.type = out->type;
4604 /* FIXME: Create nonlocal value too. */
4606 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4607 r.op = IPA_PARM_OP_COPY;
4608 else if (in->op == IPA_PARM_OP_COPY)
4609 r.offset = out->offset;
4610 else if (out->op == IPA_PARM_OP_COPY)
4611 r.offset = in->offset;
4612 else
4613 r.offset = in->offset + out->offset;
4614 adjustments.quick_push (r);
4617 for (i = 0; i < inlen; i++)
4619 struct ipa_parm_adjustment *n = &inner[i];
4621 if (n->op == IPA_PARM_OP_REMOVE)
4622 adjustments.quick_push (*n);
4625 tmp.release ();
4626 return adjustments;
4629 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4630 friendly way, assuming they are meant to be applied to FNDECL. */
4632 void
4633 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4634 tree fndecl)
4636 int i, len = adjustments.length ();
4637 bool first = true;
4638 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4640 fprintf (file, "IPA param adjustments: ");
4641 for (i = 0; i < len; i++)
4643 struct ipa_parm_adjustment *adj;
4644 adj = &adjustments[i];
4646 if (!first)
4647 fprintf (file, " ");
4648 else
4649 first = false;
4651 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4652 print_generic_expr (file, parms[adj->base_index], 0);
4653 if (adj->base)
4655 fprintf (file, ", base: ");
4656 print_generic_expr (file, adj->base, 0);
4658 if (adj->new_decl)
4660 fprintf (file, ", new_decl: ");
4661 print_generic_expr (file, adj->new_decl, 0);
4663 if (adj->new_ssa_base)
4665 fprintf (file, ", new_ssa_base: ");
4666 print_generic_expr (file, adj->new_ssa_base, 0);
4669 if (adj->op == IPA_PARM_OP_COPY)
4670 fprintf (file, ", copy_param");
4671 else if (adj->op == IPA_PARM_OP_REMOVE)
4672 fprintf (file, ", remove_param");
4673 else
4674 fprintf (file, ", offset %li", (long) adj->offset);
4675 if (adj->by_ref)
4676 fprintf (file, ", by_ref");
4677 print_node_brief (file, ", type: ", adj->type, 0);
4678 fprintf (file, "\n");
4680 parms.release ();
4683 /* Dump the AV linked list. */
4685 void
4686 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4688 bool comma = false;
4689 fprintf (f, " Aggregate replacements:");
4690 for (; av; av = av->next)
4692 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4693 av->index, av->offset);
4694 print_generic_expr (f, av->value, 0);
4695 comma = true;
4697 fprintf (f, "\n");
4700 /* Stream out jump function JUMP_FUNC to OB. */
4702 static void
4703 ipa_write_jump_function (struct output_block *ob,
4704 struct ipa_jump_func *jump_func)
4706 struct ipa_agg_jf_item *item;
4707 struct bitpack_d bp;
4708 int i, count;
4710 streamer_write_uhwi (ob, jump_func->type);
4711 switch (jump_func->type)
4713 case IPA_JF_UNKNOWN:
4714 break;
4715 case IPA_JF_KNOWN_TYPE:
4716 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
4717 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
4718 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
4719 break;
4720 case IPA_JF_CONST:
4721 gcc_assert (
4722 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4723 stream_write_tree (ob, jump_func->value.constant.value, true);
4724 break;
4725 case IPA_JF_PASS_THROUGH:
4726 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4727 if (jump_func->value.pass_through.operation == NOP_EXPR)
4729 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4730 bp = bitpack_create (ob->main_stream);
4731 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4732 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
4733 streamer_write_bitpack (&bp);
4735 else
4737 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4738 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4740 break;
4741 case IPA_JF_ANCESTOR:
4742 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4743 stream_write_tree (ob, jump_func->value.ancestor.type, true);
4744 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4745 bp = bitpack_create (ob->main_stream);
4746 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4747 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
4748 streamer_write_bitpack (&bp);
4749 break;
4752 count = vec_safe_length (jump_func->agg.items);
4753 streamer_write_uhwi (ob, count);
4754 if (count)
4756 bp = bitpack_create (ob->main_stream);
4757 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4758 streamer_write_bitpack (&bp);
4761 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4763 streamer_write_uhwi (ob, item->offset);
4764 stream_write_tree (ob, item->value, true);
4768 /* Read in jump function JUMP_FUNC from IB. */
4770 static void
4771 ipa_read_jump_function (struct lto_input_block *ib,
4772 struct ipa_jump_func *jump_func,
4773 struct cgraph_edge *cs,
4774 struct data_in *data_in)
4776 enum jump_func_type jftype;
4777 enum tree_code operation;
4778 int i, count;
4780 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4781 switch (jftype)
4783 case IPA_JF_UNKNOWN:
4784 jump_func->type = IPA_JF_UNKNOWN;
4785 break;
4786 case IPA_JF_KNOWN_TYPE:
4788 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4789 tree base_type = stream_read_tree (ib, data_in);
4790 tree component_type = stream_read_tree (ib, data_in);
4792 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
4793 break;
4795 case IPA_JF_CONST:
4796 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4797 break;
4798 case IPA_JF_PASS_THROUGH:
4799 operation = (enum tree_code) streamer_read_uhwi (ib);
4800 if (operation == NOP_EXPR)
4802 int formal_id = streamer_read_uhwi (ib);
4803 struct bitpack_d bp = streamer_read_bitpack (ib);
4804 bool agg_preserved = bp_unpack_value (&bp, 1);
4805 bool type_preserved = bp_unpack_value (&bp, 1);
4806 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4807 type_preserved);
4809 else
4811 tree operand = stream_read_tree (ib, data_in);
4812 int formal_id = streamer_read_uhwi (ib);
4813 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4814 operation);
4816 break;
4817 case IPA_JF_ANCESTOR:
4819 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4820 tree type = stream_read_tree (ib, data_in);
4821 int formal_id = streamer_read_uhwi (ib);
4822 struct bitpack_d bp = streamer_read_bitpack (ib);
4823 bool agg_preserved = bp_unpack_value (&bp, 1);
4824 bool type_preserved = bp_unpack_value (&bp, 1);
4826 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4827 type_preserved);
4828 break;
4832 count = streamer_read_uhwi (ib);
4833 vec_alloc (jump_func->agg.items, count);
4834 if (count)
4836 struct bitpack_d bp = streamer_read_bitpack (ib);
4837 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4839 for (i = 0; i < count; i++)
4841 struct ipa_agg_jf_item item;
4842 item.offset = streamer_read_uhwi (ib);
4843 item.value = stream_read_tree (ib, data_in);
4844 jump_func->agg.items->quick_push (item);
4848 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4849 relevant to indirect inlining to OB. */
4851 static void
4852 ipa_write_indirect_edge_info (struct output_block *ob,
4853 struct cgraph_edge *cs)
4855 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4856 struct bitpack_d bp;
4858 streamer_write_hwi (ob, ii->param_index);
4859 bp = bitpack_create (ob->main_stream);
4860 bp_pack_value (&bp, ii->polymorphic, 1);
4861 bp_pack_value (&bp, ii->agg_contents, 1);
4862 bp_pack_value (&bp, ii->member_ptr, 1);
4863 bp_pack_value (&bp, ii->by_ref, 1);
4864 bp_pack_value (&bp, ii->vptr_changed, 1);
4865 streamer_write_bitpack (&bp);
4866 if (ii->agg_contents || ii->polymorphic)
4867 streamer_write_hwi (ob, ii->offset);
4868 else
4869 gcc_assert (ii->offset == 0);
4871 if (ii->polymorphic)
4873 streamer_write_hwi (ob, ii->otr_token);
4874 stream_write_tree (ob, ii->otr_type, true);
4875 ii->context.stream_out (ob);
4879 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4880 relevant to indirect inlining from IB. */
4882 static void
4883 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4884 struct data_in *data_in,
4885 struct cgraph_edge *cs)
4887 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4888 struct bitpack_d bp;
4890 ii->param_index = (int) streamer_read_hwi (ib);
4891 bp = streamer_read_bitpack (ib);
4892 ii->polymorphic = bp_unpack_value (&bp, 1);
4893 ii->agg_contents = bp_unpack_value (&bp, 1);
4894 ii->member_ptr = bp_unpack_value (&bp, 1);
4895 ii->by_ref = bp_unpack_value (&bp, 1);
4896 ii->vptr_changed = bp_unpack_value (&bp, 1);
4897 if (ii->agg_contents || ii->polymorphic)
4898 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4899 else
4900 ii->offset = 0;
4901 if (ii->polymorphic)
4903 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4904 ii->otr_type = stream_read_tree (ib, data_in);
4905 ii->context.stream_in (ib, data_in);
4909 /* Stream out NODE info to OB. */
4911 static void
4912 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4914 int node_ref;
4915 lto_symtab_encoder_t encoder;
4916 struct ipa_node_params *info = IPA_NODE_REF (node);
4917 int j;
4918 struct cgraph_edge *e;
4919 struct bitpack_d bp;
4921 encoder = ob->decl_state->symtab_node_encoder;
4922 node_ref = lto_symtab_encoder_encode (encoder, node);
4923 streamer_write_uhwi (ob, node_ref);
4925 streamer_write_uhwi (ob, ipa_get_param_count (info));
4926 for (j = 0; j < ipa_get_param_count (info); j++)
4927 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4928 bp = bitpack_create (ob->main_stream);
4929 gcc_assert (info->analysis_done
4930 || ipa_get_param_count (info) == 0);
4931 gcc_assert (!info->node_enqueued);
4932 gcc_assert (!info->ipcp_orig_node);
4933 for (j = 0; j < ipa_get_param_count (info); j++)
4934 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4935 streamer_write_bitpack (&bp);
4936 for (j = 0; j < ipa_get_param_count (info); j++)
4937 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4938 for (e = node->callees; e; e = e->next_callee)
4940 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4942 streamer_write_uhwi (ob,
4943 ipa_get_cs_argument_count (args) * 2
4944 + (args->polymorphic_call_contexts != NULL));
4945 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4947 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4948 if (args->polymorphic_call_contexts != NULL)
4949 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4952 for (e = node->indirect_calls; e; e = e->next_callee)
4954 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4956 streamer_write_uhwi (ob,
4957 ipa_get_cs_argument_count (args) * 2
4958 + (args->polymorphic_call_contexts != NULL));
4959 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4961 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4962 if (args->polymorphic_call_contexts != NULL)
4963 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4965 ipa_write_indirect_edge_info (ob, e);
4969 /* Stream in NODE info from IB. */
4971 static void
4972 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4973 struct data_in *data_in)
4975 struct ipa_node_params *info = IPA_NODE_REF (node);
4976 int k;
4977 struct cgraph_edge *e;
4978 struct bitpack_d bp;
4980 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4982 for (k = 0; k < ipa_get_param_count (info); k++)
4983 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4985 bp = streamer_read_bitpack (ib);
4986 if (ipa_get_param_count (info) != 0)
4987 info->analysis_done = true;
4988 info->node_enqueued = false;
4989 for (k = 0; k < ipa_get_param_count (info); k++)
4990 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4991 for (k = 0; k < ipa_get_param_count (info); k++)
4992 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4993 for (e = node->callees; e; e = e->next_callee)
4995 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4996 int count = streamer_read_uhwi (ib);
4997 bool contexts_computed = count & 1;
4998 count /= 2;
5000 if (!count)
5001 continue;
5002 vec_safe_grow_cleared (args->jump_functions, count);
5003 if (contexts_computed)
5004 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
5006 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5008 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5009 data_in);
5010 if (contexts_computed)
5011 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
5014 for (e = node->indirect_calls; e; e = e->next_callee)
5016 struct ipa_edge_args *args = IPA_EDGE_REF (e);
5017 int count = streamer_read_uhwi (ib);
5018 bool contexts_computed = count & 1;
5019 count /= 2;
5021 if (count)
5023 vec_safe_grow_cleared (args->jump_functions, count);
5024 if (contexts_computed)
5025 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
5026 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5028 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5029 data_in);
5030 if (contexts_computed)
5031 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
5034 ipa_read_indirect_edge_info (ib, data_in, e);
5038 /* Write jump functions for nodes in SET. */
5040 void
5041 ipa_prop_write_jump_functions (void)
5043 struct cgraph_node *node;
5044 struct output_block *ob;
5045 unsigned int count = 0;
5046 lto_symtab_encoder_iterator lsei;
5047 lto_symtab_encoder_t encoder;
5050 if (!ipa_node_params_vector.exists ())
5051 return;
5053 ob = create_output_block (LTO_section_jump_functions);
5054 encoder = ob->decl_state->symtab_node_encoder;
5055 ob->symbol = NULL;
5056 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5057 lsei_next_function_in_partition (&lsei))
5059 node = lsei_cgraph_node (lsei);
5060 if (node->has_gimple_body_p ()
5061 && IPA_NODE_REF (node) != NULL)
5062 count++;
5065 streamer_write_uhwi (ob, count);
5067 /* Process all of the functions. */
5068 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5069 lsei_next_function_in_partition (&lsei))
5071 node = lsei_cgraph_node (lsei);
5072 if (node->has_gimple_body_p ()
5073 && IPA_NODE_REF (node) != NULL)
5074 ipa_write_node_info (ob, node);
5076 streamer_write_char_stream (ob->main_stream, 0);
5077 produce_asm (ob, NULL);
5078 destroy_output_block (ob);
5081 /* Read section in file FILE_DATA of length LEN with data DATA. */
5083 static void
5084 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5085 size_t len)
5087 const struct lto_function_header *header =
5088 (const struct lto_function_header *) data;
5089 const int cfg_offset = sizeof (struct lto_function_header);
5090 const int main_offset = cfg_offset + header->cfg_size;
5091 const int string_offset = main_offset + header->main_size;
5092 struct data_in *data_in;
5093 unsigned int i;
5094 unsigned int count;
5096 lto_input_block ib_main ((const char *) data + main_offset,
5097 header->main_size);
5099 data_in =
5100 lto_data_in_create (file_data, (const char *) data + string_offset,
5101 header->string_size, vNULL);
5102 count = streamer_read_uhwi (&ib_main);
5104 for (i = 0; i < count; i++)
5106 unsigned int index;
5107 struct cgraph_node *node;
5108 lto_symtab_encoder_t encoder;
5110 index = streamer_read_uhwi (&ib_main);
5111 encoder = file_data->symtab_node_encoder;
5112 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5113 index));
5114 gcc_assert (node->definition);
5115 ipa_read_node_info (&ib_main, node, data_in);
5117 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5118 len);
5119 lto_data_in_delete (data_in);
5122 /* Read ipcp jump functions. */
5124 void
5125 ipa_prop_read_jump_functions (void)
5127 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5128 struct lto_file_decl_data *file_data;
5129 unsigned int j = 0;
5131 ipa_check_create_node_params ();
5132 ipa_check_create_edge_args ();
5133 ipa_register_cgraph_hooks ();
5135 while ((file_data = file_data_vec[j++]))
5137 size_t len;
5138 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
5140 if (data)
5141 ipa_prop_read_section (file_data, data, len);
5145 /* After merging units, we can get mismatch in argument counts.
5146 Also decl merging might've rendered parameter lists obsolete.
5147 Also compute called_with_variable_arg info. */
5149 void
5150 ipa_update_after_lto_read (void)
5152 ipa_check_create_node_params ();
5153 ipa_check_create_edge_args ();
5156 void
5157 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
5159 int node_ref;
5160 unsigned int count = 0;
5161 lto_symtab_encoder_t encoder;
5162 struct ipa_agg_replacement_value *aggvals, *av;
5164 aggvals = ipa_get_agg_replacements_for_node (node);
5165 encoder = ob->decl_state->symtab_node_encoder;
5166 node_ref = lto_symtab_encoder_encode (encoder, node);
5167 streamer_write_uhwi (ob, node_ref);
5169 for (av = aggvals; av; av = av->next)
5170 count++;
5171 streamer_write_uhwi (ob, count);
5173 for (av = aggvals; av; av = av->next)
5175 struct bitpack_d bp;
5177 streamer_write_uhwi (ob, av->offset);
5178 streamer_write_uhwi (ob, av->index);
5179 stream_write_tree (ob, av->value, true);
5181 bp = bitpack_create (ob->main_stream);
5182 bp_pack_value (&bp, av->by_ref, 1);
5183 streamer_write_bitpack (&bp);
5187 /* Stream in the aggregate value replacement chain for NODE from IB. */
5189 static void
5190 read_agg_replacement_chain (struct lto_input_block *ib,
5191 struct cgraph_node *node,
5192 struct data_in *data_in)
5194 struct ipa_agg_replacement_value *aggvals = NULL;
5195 unsigned int count, i;
5197 count = streamer_read_uhwi (ib);
5198 for (i = 0; i <count; i++)
5200 struct ipa_agg_replacement_value *av;
5201 struct bitpack_d bp;
5203 av = ggc_alloc<ipa_agg_replacement_value> ();
5204 av->offset = streamer_read_uhwi (ib);
5205 av->index = streamer_read_uhwi (ib);
5206 av->value = stream_read_tree (ib, data_in);
5207 bp = streamer_read_bitpack (ib);
5208 av->by_ref = bp_unpack_value (&bp, 1);
5209 av->next = aggvals;
5210 aggvals = av;
5212 ipa_set_node_agg_value_chain (node, aggvals);
5215 /* Write all aggregate replacement for nodes in set. */
5217 void
5218 ipa_prop_write_all_agg_replacement (void)
5220 struct cgraph_node *node;
5221 struct output_block *ob;
5222 unsigned int count = 0;
5223 lto_symtab_encoder_iterator lsei;
5224 lto_symtab_encoder_t encoder;
5226 if (!ipa_node_agg_replacements)
5227 return;
5229 ob = create_output_block (LTO_section_ipcp_transform);
5230 encoder = ob->decl_state->symtab_node_encoder;
5231 ob->symbol = NULL;
5232 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5233 lsei_next_function_in_partition (&lsei))
5235 node = lsei_cgraph_node (lsei);
5236 if (node->has_gimple_body_p ()
5237 && ipa_get_agg_replacements_for_node (node) != NULL)
5238 count++;
5241 streamer_write_uhwi (ob, count);
5243 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5244 lsei_next_function_in_partition (&lsei))
5246 node = lsei_cgraph_node (lsei);
5247 if (node->has_gimple_body_p ()
5248 && ipa_get_agg_replacements_for_node (node) != NULL)
5249 write_agg_replacement_chain (ob, node);
5251 streamer_write_char_stream (ob->main_stream, 0);
5252 produce_asm (ob, NULL);
5253 destroy_output_block (ob);
5256 /* Read replacements section in file FILE_DATA of length LEN with data
5257 DATA. */
5259 static void
5260 read_replacements_section (struct lto_file_decl_data *file_data,
5261 const char *data,
5262 size_t len)
5264 const struct lto_function_header *header =
5265 (const struct lto_function_header *) data;
5266 const int cfg_offset = sizeof (struct lto_function_header);
5267 const int main_offset = cfg_offset + header->cfg_size;
5268 const int string_offset = main_offset + header->main_size;
5269 struct data_in *data_in;
5270 unsigned int i;
5271 unsigned int count;
5273 lto_input_block ib_main ((const char *) data + main_offset,
5274 header->main_size);
5276 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5277 header->string_size, vNULL);
5278 count = streamer_read_uhwi (&ib_main);
5280 for (i = 0; i < count; i++)
5282 unsigned int index;
5283 struct cgraph_node *node;
5284 lto_symtab_encoder_t encoder;
5286 index = streamer_read_uhwi (&ib_main);
5287 encoder = file_data->symtab_node_encoder;
5288 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5289 index));
5290 gcc_assert (node->definition);
5291 read_agg_replacement_chain (&ib_main, node, data_in);
5293 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5294 len);
5295 lto_data_in_delete (data_in);
5298 /* Read IPA-CP aggregate replacements. */
5300 void
5301 ipa_prop_read_all_agg_replacement (void)
5303 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5304 struct lto_file_decl_data *file_data;
5305 unsigned int j = 0;
5307 while ((file_data = file_data_vec[j++]))
5309 size_t len;
5310 const char *data = lto_get_section_data (file_data,
5311 LTO_section_ipcp_transform,
5312 NULL, &len);
5313 if (data)
5314 read_replacements_section (file_data, data, len);
5318 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5319 NODE. */
5321 static void
5322 adjust_agg_replacement_values (struct cgraph_node *node,
5323 struct ipa_agg_replacement_value *aggval)
5325 struct ipa_agg_replacement_value *v;
5326 int i, c = 0, d = 0, *adj;
5328 if (!node->clone.combined_args_to_skip)
5329 return;
5331 for (v = aggval; v; v = v->next)
5333 gcc_assert (v->index >= 0);
5334 if (c < v->index)
5335 c = v->index;
5337 c++;
5339 adj = XALLOCAVEC (int, c);
5340 for (i = 0; i < c; i++)
5341 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5343 adj[i] = -1;
5344 d++;
5346 else
5347 adj[i] = i - d;
5349 for (v = aggval; v; v = v->next)
5350 v->index = adj[v->index];
5353 /* Dominator walker driving the ipcp modification phase. */
5355 class ipcp_modif_dom_walker : public dom_walker
5357 public:
5358 ipcp_modif_dom_walker (struct func_body_info *fbi,
5359 vec<ipa_param_descriptor> descs,
5360 struct ipa_agg_replacement_value *av,
5361 bool *sc, bool *cc)
5362 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5363 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5365 virtual void before_dom_children (basic_block);
5367 private:
5368 struct func_body_info *m_fbi;
5369 vec<ipa_param_descriptor> m_descriptors;
5370 struct ipa_agg_replacement_value *m_aggval;
5371 bool *m_something_changed, *m_cfg_changed;
5374 void
5375 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5377 gimple_stmt_iterator gsi;
5378 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5380 struct ipa_agg_replacement_value *v;
5381 gimple stmt = gsi_stmt (gsi);
5382 tree rhs, val, t;
5383 HOST_WIDE_INT offset, size;
5384 int index;
5385 bool by_ref, vce;
5387 if (!gimple_assign_load_p (stmt))
5388 continue;
5389 rhs = gimple_assign_rhs1 (stmt);
5390 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5391 continue;
5393 vce = false;
5394 t = rhs;
5395 while (handled_component_p (t))
5397 /* V_C_E can do things like convert an array of integers to one
5398 bigger integer and similar things we do not handle below. */
5399 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5401 vce = true;
5402 break;
5404 t = TREE_OPERAND (t, 0);
5406 if (vce)
5407 continue;
5409 if (!ipa_load_from_parm_agg_1 (m_fbi, m_descriptors, stmt, rhs, &index,
5410 &offset, &size, &by_ref))
5411 continue;
5412 for (v = m_aggval; v; v = v->next)
5413 if (v->index == index
5414 && v->offset == offset)
5415 break;
5416 if (!v
5417 || v->by_ref != by_ref
5418 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5419 continue;
5421 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5422 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5424 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5425 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5426 else if (TYPE_SIZE (TREE_TYPE (rhs))
5427 == TYPE_SIZE (TREE_TYPE (v->value)))
5428 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5429 else
5431 if (dump_file)
5433 fprintf (dump_file, " const ");
5434 print_generic_expr (dump_file, v->value, 0);
5435 fprintf (dump_file, " can't be converted to type of ");
5436 print_generic_expr (dump_file, rhs, 0);
5437 fprintf (dump_file, "\n");
5439 continue;
5442 else
5443 val = v->value;
5445 if (dump_file && (dump_flags & TDF_DETAILS))
5447 fprintf (dump_file, "Modifying stmt:\n ");
5448 print_gimple_stmt (dump_file, stmt, 0, 0);
5450 gimple_assign_set_rhs_from_tree (&gsi, val);
5451 update_stmt (stmt);
5453 if (dump_file && (dump_flags & TDF_DETAILS))
5455 fprintf (dump_file, "into:\n ");
5456 print_gimple_stmt (dump_file, stmt, 0, 0);
5457 fprintf (dump_file, "\n");
5460 *m_something_changed = true;
5461 if (maybe_clean_eh_stmt (stmt)
5462 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5463 *m_cfg_changed = true;
5468 /* IPCP transformation phase doing propagation of aggregate values. */
5470 unsigned int
5471 ipcp_transform_function (struct cgraph_node *node)
5473 vec<ipa_param_descriptor> descriptors = vNULL;
5474 struct func_body_info fbi;
5475 struct ipa_agg_replacement_value *aggval;
5476 int param_count;
5477 bool cfg_changed = false, something_changed = false;
5479 gcc_checking_assert (cfun);
5480 gcc_checking_assert (current_function_decl);
5482 if (dump_file)
5483 fprintf (dump_file, "Modification phase of node %s/%i\n",
5484 node->name (), node->order);
5486 aggval = ipa_get_agg_replacements_for_node (node);
5487 if (!aggval)
5488 return 0;
5489 param_count = count_formal_params (node->decl);
5490 if (param_count == 0)
5491 return 0;
5492 adjust_agg_replacement_values (node, aggval);
5493 if (dump_file)
5494 ipa_dump_agg_replacement_values (dump_file, aggval);
5496 fbi.node = node;
5497 fbi.info = NULL;
5498 fbi.bb_infos = vNULL;
5499 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5500 fbi.param_count = param_count;
5501 fbi.aa_walked = 0;
5503 descriptors.safe_grow_cleared (param_count);
5504 ipa_populate_param_decls (node, descriptors);
5505 calculate_dominance_info (CDI_DOMINATORS);
5506 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5507 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5509 int i;
5510 struct ipa_bb_info *bi;
5511 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5512 free_ipa_bb_info (bi);
5513 fbi.bb_infos.release ();
5514 free_dominance_info (CDI_DOMINATORS);
5515 (*ipa_node_agg_replacements)[node->uid] = NULL;
5516 descriptors.release ();
5518 if (!something_changed)
5519 return 0;
5520 else if (cfg_changed)
5521 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5522 else
5523 return TODO_update_ssa_only_virtuals;