Daily bump.
[official-gcc.git] / gcc / ipa-prop.c
blob36949e32709649f971ae6ff79ac997fa1930ecfe
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "basic-block.h"
25 #include "tree-ssa-alias.h"
26 #include "internal-fn.h"
27 #include "gimple-fold.h"
28 #include "tree-eh.h"
29 #include "gimple-expr.h"
30 #include "is-a.h"
31 #include "gimple.h"
32 #include "expr.h"
33 #include "stor-layout.h"
34 #include "print-tree.h"
35 #include "gimplify.h"
36 #include "gimple-iterator.h"
37 #include "gimplify-me.h"
38 #include "gimple-walk.h"
39 #include "langhooks.h"
40 #include "target.h"
41 #include "ipa-prop.h"
42 #include "bitmap.h"
43 #include "gimple-ssa.h"
44 #include "tree-cfg.h"
45 #include "tree-phinodes.h"
46 #include "ssa-iterators.h"
47 #include "tree-into-ssa.h"
48 #include "tree-dfa.h"
49 #include "tree-pass.h"
50 #include "tree-inline.h"
51 #include "ipa-inline.h"
52 #include "flags.h"
53 #include "diagnostic.h"
54 #include "gimple-pretty-print.h"
55 #include "lto-streamer.h"
56 #include "data-streamer.h"
57 #include "tree-streamer.h"
58 #include "params.h"
59 #include "ipa-utils.h"
60 #include "stringpool.h"
61 #include "tree-ssanames.h"
62 #include "dbgcnt.h"
63 #include "domwalk.h"
64 #include "builtins.h"
65 #include "calls.h"
67 /* Intermediate information that we get from alias analysis about a particular
68 parameter in a particular basic_block. When a parameter or the memory it
69 references is marked modified, we use that information in all dominatd
70 blocks without cosulting alias analysis oracle. */
72 struct param_aa_status
74 /* Set when this structure contains meaningful information. If not, the
75 structure describing a dominating BB should be used instead. */
76 bool valid;
78 /* Whether we have seen something which might have modified the data in
79 question. PARM is for the parameter itself, REF is for data it points to
80 but using the alias type of individual accesses and PT is the same thing
81 but for computing aggregate pass-through functions using a very inclusive
82 ao_ref. */
83 bool parm_modified, ref_modified, pt_modified;
86 /* Information related to a given BB that used only when looking at function
87 body. */
89 struct ipa_bb_info
91 /* Call graph edges going out of this BB. */
92 vec<cgraph_edge *> cg_edges;
93 /* Alias analysis statuses of each formal parameter at this bb. */
94 vec<param_aa_status> param_aa_statuses;
97 /* Structure with global information that is only used when looking at function
98 body. */
100 struct func_body_info
102 /* The node that is being analyzed. */
103 cgraph_node *node;
105 /* Its info. */
106 struct ipa_node_params *info;
108 /* Information about individual BBs. */
109 vec<ipa_bb_info> bb_infos;
111 /* Number of parameters. */
112 int param_count;
114 /* Number of statements already walked by when analyzing this function. */
115 unsigned int aa_walked;
118 /* Vector where the parameter infos are actually stored. */
119 vec<ipa_node_params> ipa_node_params_vector;
120 /* Vector of known aggregate values in cloned nodes. */
121 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
122 /* Vector where the parameter infos are actually stored. */
123 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
125 /* Holders of ipa cgraph hooks: */
126 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
127 static struct cgraph_node_hook_list *node_removal_hook_holder;
128 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
129 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
130 static struct cgraph_node_hook_list *function_insertion_hook_holder;
132 /* Description of a reference to an IPA constant. */
133 struct ipa_cst_ref_desc
135 /* Edge that corresponds to the statement which took the reference. */
136 struct cgraph_edge *cs;
137 /* Linked list of duplicates created when call graph edges are cloned. */
138 struct ipa_cst_ref_desc *next_duplicate;
139 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
140 if out of control. */
141 int refcount;
144 /* Allocation pool for reference descriptions. */
146 static alloc_pool ipa_refdesc_pool;
148 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
149 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
151 static bool
152 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
154 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
155 struct cl_optimization *os;
157 if (!fs_opts)
158 return false;
159 os = TREE_OPTIMIZATION (fs_opts);
160 return !os->x_optimize || !os->x_flag_ipa_cp;
163 /* Return index of the formal whose tree is PTREE in function which corresponds
164 to INFO. */
166 static int
167 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
169 int i, count;
171 count = descriptors.length ();
172 for (i = 0; i < count; i++)
173 if (descriptors[i].decl == ptree)
174 return i;
176 return -1;
179 /* Return index of the formal whose tree is PTREE in function which corresponds
180 to INFO. */
183 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
185 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
188 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
189 NODE. */
191 static void
192 ipa_populate_param_decls (struct cgraph_node *node,
193 vec<ipa_param_descriptor> &descriptors)
195 tree fndecl;
196 tree fnargs;
197 tree parm;
198 int param_num;
200 fndecl = node->decl;
201 gcc_assert (gimple_has_body_p (fndecl));
202 fnargs = DECL_ARGUMENTS (fndecl);
203 param_num = 0;
204 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
206 descriptors[param_num].decl = parm;
207 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
208 true);
209 param_num++;
213 /* Return how many formal parameters FNDECL has. */
216 count_formal_params (tree fndecl)
218 tree parm;
219 int count = 0;
220 gcc_assert (gimple_has_body_p (fndecl));
222 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
223 count++;
225 return count;
228 /* Return the declaration of Ith formal parameter of the function corresponding
229 to INFO. Note there is no setter function as this array is built just once
230 using ipa_initialize_node_params. */
232 void
233 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
235 fprintf (file, "param #%i", i);
236 if (info->descriptors[i].decl)
238 fprintf (file, " ");
239 print_generic_expr (file, info->descriptors[i].decl, 0);
243 /* Initialize the ipa_node_params structure associated with NODE
244 to hold PARAM_COUNT parameters. */
246 void
247 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
249 struct ipa_node_params *info = IPA_NODE_REF (node);
251 if (!info->descriptors.exists () && param_count)
252 info->descriptors.safe_grow_cleared (param_count);
255 /* Initialize the ipa_node_params structure associated with NODE by counting
256 the function parameters, creating the descriptors and populating their
257 param_decls. */
259 void
260 ipa_initialize_node_params (struct cgraph_node *node)
262 struct ipa_node_params *info = IPA_NODE_REF (node);
264 if (!info->descriptors.exists ())
266 ipa_alloc_node_params (node, count_formal_params (node->decl));
267 ipa_populate_param_decls (node, info->descriptors);
271 /* Print the jump functions associated with call graph edge CS to file F. */
273 static void
274 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
276 int i, count;
278 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
279 for (i = 0; i < count; i++)
281 struct ipa_jump_func *jump_func;
282 enum jump_func_type type;
284 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
285 type = jump_func->type;
287 fprintf (f, " param %d: ", i);
288 if (type == IPA_JF_UNKNOWN)
289 fprintf (f, "UNKNOWN\n");
290 else if (type == IPA_JF_KNOWN_TYPE)
292 fprintf (f, "KNOWN TYPE: base ");
293 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
294 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
295 jump_func->value.known_type.offset);
296 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
297 fprintf (f, "\n");
299 else if (type == IPA_JF_CONST)
301 tree val = jump_func->value.constant.value;
302 fprintf (f, "CONST: ");
303 print_generic_expr (f, val, 0);
304 if (TREE_CODE (val) == ADDR_EXPR
305 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
307 fprintf (f, " -> ");
308 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
311 fprintf (f, "\n");
313 else if (type == IPA_JF_PASS_THROUGH)
315 fprintf (f, "PASS THROUGH: ");
316 fprintf (f, "%d, op %s",
317 jump_func->value.pass_through.formal_id,
318 get_tree_code_name(jump_func->value.pass_through.operation));
319 if (jump_func->value.pass_through.operation != NOP_EXPR)
321 fprintf (f, " ");
322 print_generic_expr (f,
323 jump_func->value.pass_through.operand, 0);
325 if (jump_func->value.pass_through.agg_preserved)
326 fprintf (f, ", agg_preserved");
327 if (jump_func->value.pass_through.type_preserved)
328 fprintf (f, ", type_preserved");
329 fprintf (f, "\n");
331 else if (type == IPA_JF_ANCESTOR)
333 fprintf (f, "ANCESTOR: ");
334 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
335 jump_func->value.ancestor.formal_id,
336 jump_func->value.ancestor.offset);
337 print_generic_expr (f, jump_func->value.ancestor.type, 0);
338 if (jump_func->value.ancestor.agg_preserved)
339 fprintf (f, ", agg_preserved");
340 if (jump_func->value.ancestor.type_preserved)
341 fprintf (f, ", type_preserved");
342 fprintf (f, "\n");
345 if (jump_func->agg.items)
347 struct ipa_agg_jf_item *item;
348 int j;
350 fprintf (f, " Aggregate passed by %s:\n",
351 jump_func->agg.by_ref ? "reference" : "value");
352 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
354 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
355 item->offset);
356 if (TYPE_P (item->value))
357 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
358 tree_to_uhwi (TYPE_SIZE (item->value)));
359 else
361 fprintf (f, "cst: ");
362 print_generic_expr (f, item->value, 0);
364 fprintf (f, "\n");
371 /* Print the jump functions of all arguments on all call graph edges going from
372 NODE to file F. */
374 void
375 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
377 struct cgraph_edge *cs;
379 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
380 node->order);
381 for (cs = node->callees; cs; cs = cs->next_callee)
383 if (!ipa_edge_args_info_available_for_edge_p (cs))
384 continue;
386 fprintf (f, " callsite %s/%i -> %s/%i : \n",
387 xstrdup (node->name ()), node->order,
388 xstrdup (cs->callee->name ()),
389 cs->callee->order);
390 ipa_print_node_jump_functions_for_edge (f, cs);
393 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
395 struct cgraph_indirect_call_info *ii;
396 if (!ipa_edge_args_info_available_for_edge_p (cs))
397 continue;
399 ii = cs->indirect_info;
400 if (ii->agg_contents)
401 fprintf (f, " indirect %s callsite, calling param %i, "
402 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
403 ii->member_ptr ? "member ptr" : "aggregate",
404 ii->param_index, ii->offset,
405 ii->by_ref ? "by reference" : "by_value");
406 else
407 fprintf (f, " indirect %s callsite, calling param %i, "
408 "offset " HOST_WIDE_INT_PRINT_DEC,
409 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
410 ii->offset);
412 if (cs->call_stmt)
414 fprintf (f, ", for stmt ");
415 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
417 else
418 fprintf (f, "\n");
419 if (ii->polymorphic)
420 ii->context.dump (f);
421 ipa_print_node_jump_functions_for_edge (f, cs);
425 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
427 void
428 ipa_print_all_jump_functions (FILE *f)
430 struct cgraph_node *node;
432 fprintf (f, "\nJump functions:\n");
433 FOR_EACH_FUNCTION (node)
435 ipa_print_node_jump_functions (f, node);
439 /* Set JFUNC to be a known type jump function. */
441 static void
442 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
443 tree base_type, tree component_type)
445 /* Recording and propagating main variants increases change that types
446 will match. */
447 base_type = TYPE_MAIN_VARIANT (base_type);
448 component_type = TYPE_MAIN_VARIANT (component_type);
450 gcc_assert (contains_polymorphic_type_p (base_type)
451 && contains_polymorphic_type_p (component_type));
452 if (!flag_devirtualize)
453 return;
454 jfunc->type = IPA_JF_KNOWN_TYPE;
455 jfunc->value.known_type.offset = offset,
456 jfunc->value.known_type.base_type = base_type;
457 jfunc->value.known_type.component_type = component_type;
458 gcc_assert (component_type);
461 /* Set JFUNC to be a copy of another jmp (to be used by jump function
462 combination code). The two functions will share their rdesc. */
464 static void
465 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
466 struct ipa_jump_func *src)
469 gcc_checking_assert (src->type == IPA_JF_CONST);
470 dst->type = IPA_JF_CONST;
471 dst->value.constant = src->value.constant;
474 /* Set JFUNC to be a constant jmp function. */
476 static void
477 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
478 struct cgraph_edge *cs)
480 constant = unshare_expr (constant);
481 if (constant && EXPR_P (constant))
482 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
483 jfunc->type = IPA_JF_CONST;
484 jfunc->value.constant.value = unshare_expr_without_location (constant);
486 if (TREE_CODE (constant) == ADDR_EXPR
487 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
489 struct ipa_cst_ref_desc *rdesc;
490 if (!ipa_refdesc_pool)
491 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
492 sizeof (struct ipa_cst_ref_desc), 32);
494 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
495 rdesc->cs = cs;
496 rdesc->next_duplicate = NULL;
497 rdesc->refcount = 1;
498 jfunc->value.constant.rdesc = rdesc;
500 else
501 jfunc->value.constant.rdesc = NULL;
504 /* Set JFUNC to be a simple pass-through jump function. */
505 static void
506 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
507 bool agg_preserved, bool type_preserved)
509 jfunc->type = IPA_JF_PASS_THROUGH;
510 jfunc->value.pass_through.operand = NULL_TREE;
511 jfunc->value.pass_through.formal_id = formal_id;
512 jfunc->value.pass_through.operation = NOP_EXPR;
513 jfunc->value.pass_through.agg_preserved = agg_preserved;
514 jfunc->value.pass_through.type_preserved = type_preserved;
517 /* Set JFUNC to be an arithmetic pass through jump function. */
519 static void
520 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
521 tree operand, enum tree_code operation)
523 jfunc->type = IPA_JF_PASS_THROUGH;
524 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
525 jfunc->value.pass_through.formal_id = formal_id;
526 jfunc->value.pass_through.operation = operation;
527 jfunc->value.pass_through.agg_preserved = false;
528 jfunc->value.pass_through.type_preserved = false;
531 /* Set JFUNC to be an ancestor jump function. */
533 static void
534 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
535 tree type, int formal_id, bool agg_preserved,
536 bool type_preserved)
538 if (!flag_devirtualize)
539 type_preserved = false;
540 if (!type_preserved)
541 type = NULL_TREE;
542 if (type)
543 type = TYPE_MAIN_VARIANT (type);
544 gcc_assert (!type_preserved || contains_polymorphic_type_p (type));
545 jfunc->type = IPA_JF_ANCESTOR;
546 jfunc->value.ancestor.formal_id = formal_id;
547 jfunc->value.ancestor.offset = offset;
548 jfunc->value.ancestor.type = type_preserved ? type : NULL;
549 jfunc->value.ancestor.agg_preserved = agg_preserved;
550 jfunc->value.ancestor.type_preserved = type_preserved;
553 /* Extract the acual BINFO being described by JFUNC which must be a known type
554 jump function. */
556 tree
557 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
559 if (!RECORD_OR_UNION_TYPE_P (jfunc->value.known_type.base_type))
560 return NULL_TREE;
562 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
564 if (!base_binfo)
565 return NULL_TREE;
566 /* FIXME: At LTO we can't propagate to non-polymorphic type, because
567 we have no ODR equivalency on those. This should be fixed by
568 propagating on types rather than binfos that would make type
569 matching here unnecesary. */
570 if (in_lto_p
571 && (TREE_CODE (jfunc->value.known_type.component_type) != RECORD_TYPE
572 || !TYPE_BINFO (jfunc->value.known_type.component_type)
573 || !BINFO_VTABLE (TYPE_BINFO (jfunc->value.known_type.component_type))))
575 if (!jfunc->value.known_type.offset)
576 return base_binfo;
577 return NULL;
579 return get_binfo_at_offset (base_binfo,
580 jfunc->value.known_type.offset,
581 jfunc->value.known_type.component_type);
584 /* Get IPA BB information about the given BB. FBI is the context of analyzis
585 of this function body. */
587 static struct ipa_bb_info *
588 ipa_get_bb_info (struct func_body_info *fbi, basic_block bb)
590 gcc_checking_assert (fbi);
591 return &fbi->bb_infos[bb->index];
594 /* Structure to be passed in between detect_type_change and
595 check_stmt_for_type_change. */
597 struct prop_type_change_info
599 /* Offset into the object where there is the virtual method pointer we are
600 looking for. */
601 HOST_WIDE_INT offset;
602 /* The declaration or SSA_NAME pointer of the base that we are checking for
603 type change. */
604 tree object;
605 /* If we actually can tell the type that the object has changed to, it is
606 stored in this field. Otherwise it remains NULL_TREE. */
607 tree known_current_type;
608 /* Set to true if dynamic type change has been detected. */
609 bool type_maybe_changed;
610 /* Set to true if multiple types have been encountered. known_current_type
611 must be disregarded in that case. */
612 bool multiple_types_encountered;
615 /* Return true if STMT can modify a virtual method table pointer.
617 This function makes special assumptions about both constructors and
618 destructors which are all the functions that are allowed to alter the VMT
619 pointers. It assumes that destructors begin with assignment into all VMT
620 pointers and that constructors essentially look in the following way:
622 1) The very first thing they do is that they call constructors of ancestor
623 sub-objects that have them.
625 2) Then VMT pointers of this and all its ancestors is set to new values
626 corresponding to the type corresponding to the constructor.
628 3) Only afterwards, other stuff such as constructor of member sub-objects
629 and the code written by the user is run. Only this may include calling
630 virtual functions, directly or indirectly.
632 There is no way to call a constructor of an ancestor sub-object in any
633 other way.
635 This means that we do not have to care whether constructors get the correct
636 type information because they will always change it (in fact, if we define
637 the type to be given by the VMT pointer, it is undefined).
639 The most important fact to derive from the above is that if, for some
640 statement in the section 3, we try to detect whether the dynamic type has
641 changed, we can safely ignore all calls as we examine the function body
642 backwards until we reach statements in section 2 because these calls cannot
643 be ancestor constructors or destructors (if the input is not bogus) and so
644 do not change the dynamic type (this holds true only for automatically
645 allocated objects but at the moment we devirtualize only these). We then
646 must detect that statements in section 2 change the dynamic type and can try
647 to derive the new type. That is enough and we can stop, we will never see
648 the calls into constructors of sub-objects in this code. Therefore we can
649 safely ignore all call statements that we traverse.
652 static bool
653 stmt_may_be_vtbl_ptr_store (gimple stmt)
655 if (is_gimple_call (stmt))
656 return false;
657 if (gimple_clobber_p (stmt))
658 return false;
659 else if (is_gimple_assign (stmt))
661 tree lhs = gimple_assign_lhs (stmt);
663 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
665 if (flag_strict_aliasing
666 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
667 return false;
669 if (TREE_CODE (lhs) == COMPONENT_REF
670 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
671 return false;
672 /* In the future we might want to use get_base_ref_and_offset to find
673 if there is a field corresponding to the offset and if so, proceed
674 almost like if it was a component ref. */
677 return true;
680 /* If STMT can be proved to be an assignment to the virtual method table
681 pointer of ANALYZED_OBJ and the type associated with the new table
682 identified, return the type. Otherwise return NULL_TREE. */
684 static tree
685 extr_type_from_vtbl_ptr_store (gimple stmt, struct prop_type_change_info *tci)
687 HOST_WIDE_INT offset, size, max_size;
688 tree lhs, rhs, base, binfo;
690 if (!gimple_assign_single_p (stmt))
691 return NULL_TREE;
693 lhs = gimple_assign_lhs (stmt);
694 rhs = gimple_assign_rhs1 (stmt);
695 if (TREE_CODE (lhs) != COMPONENT_REF
696 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
697 return NULL_TREE;
699 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
700 if (offset != tci->offset
701 || size != POINTER_SIZE
702 || max_size != POINTER_SIZE)
703 return NULL_TREE;
704 if (TREE_CODE (base) == MEM_REF)
706 if (TREE_CODE (tci->object) != MEM_REF
707 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
708 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
709 TREE_OPERAND (base, 1)))
710 return NULL_TREE;
712 else if (tci->object != base)
713 return NULL_TREE;
715 binfo = vtable_pointer_value_to_binfo (rhs);
717 /* FIXME: vtable_pointer_value_to_binfo may return BINFO of a
718 base of outer type. In this case we would need to either
719 work on binfos or translate it back to outer type and offset.
720 KNOWN_TYPE jump functions are not ready for that, yet. */
721 if (!binfo || TYPE_BINFO (BINFO_TYPE (binfo)) != binfo)
722 return NULL;
724 return BINFO_TYPE (binfo);
727 /* Callback of walk_aliased_vdefs and a helper function for
728 detect_type_change to check whether a particular statement may modify
729 the virtual table pointer, and if possible also determine the new type of
730 the (sub-)object. It stores its result into DATA, which points to a
731 prop_type_change_info structure. */
733 static bool
734 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
736 gimple stmt = SSA_NAME_DEF_STMT (vdef);
737 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
739 if (stmt_may_be_vtbl_ptr_store (stmt))
741 tree type;
743 type = extr_type_from_vtbl_ptr_store (stmt, tci);
744 gcc_assert (!type || TYPE_MAIN_VARIANT (type) == type);
745 if (tci->type_maybe_changed
746 && type != tci->known_current_type)
747 tci->multiple_types_encountered = true;
748 tci->known_current_type = type;
749 tci->type_maybe_changed = true;
750 return true;
752 else
753 return false;
756 /* See if ARG is PARAM_DECl describing instance passed by pointer
757 or reference in FUNCTION. Return false if the dynamic type may change
758 in between beggining of the function until CALL is invoked.
760 Generally functions are not allowed to change type of such instances,
761 but they call destructors. We assume that methods can not destroy the THIS
762 pointer. Also as a special cases, constructor and destructors may change
763 type of the THIS pointer. */
765 static bool
766 param_type_may_change_p (tree function, tree arg, gimple call)
768 /* Pure functions can not do any changes on the dynamic type;
769 that require writting to memory. */
770 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
771 return false;
772 /* We need to check if we are within inlined consturctor
773 or destructor (ideally we would have way to check that the
774 inline cdtor is actually working on ARG, but we don't have
775 easy tie on this, so punt on all non-pure cdtors.
776 We may also record the types of cdtors and once we know type
777 of the instance match them.
779 Also code unification optimizations may merge calls from
780 different blocks making return values unreliable. So
781 do nothing during late optimization. */
782 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
783 return true;
784 if (TREE_CODE (arg) == SSA_NAME
785 && SSA_NAME_IS_DEFAULT_DEF (arg)
786 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
788 /* Normal (non-THIS) argument. */
789 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
790 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
791 /* THIS pointer of an method - here we we want to watch constructors
792 and destructors as those definitely may change the dynamic
793 type. */
794 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
795 && !DECL_CXX_CONSTRUCTOR_P (function)
796 && !DECL_CXX_DESTRUCTOR_P (function)
797 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
799 /* Walk the inline stack and watch out for ctors/dtors. */
800 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
801 block = BLOCK_SUPERCONTEXT (block))
802 if (BLOCK_ABSTRACT_ORIGIN (block)
803 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
805 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
807 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
808 continue;
809 if (TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
810 && (DECL_CXX_CONSTRUCTOR_P (fn)
811 || DECL_CXX_DESTRUCTOR_P (fn)))
812 return true;
814 return false;
817 return true;
820 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
821 callsite CALL) by looking for assignments to its virtual table pointer. If
822 it is, return true and fill in the jump function JFUNC with relevant type
823 information or set it to unknown. ARG is the object itself (not a pointer
824 to it, unless dereferenced). BASE is the base of the memory access as
825 returned by get_ref_base_and_extent, as is the offset.
827 This is helper function for detect_type_change and detect_type_change_ssa
828 that does the heavy work which is usually unnecesary. */
830 static bool
831 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
832 gimple call, struct ipa_jump_func *jfunc,
833 HOST_WIDE_INT offset)
835 struct prop_type_change_info tci;
836 ao_ref ao;
837 bool entry_reached = false;
839 gcc_checking_assert (DECL_P (arg)
840 || TREE_CODE (arg) == MEM_REF
841 || handled_component_p (arg));
843 comp_type = TYPE_MAIN_VARIANT (comp_type);
845 /* Const calls cannot call virtual methods through VMT and so type changes do
846 not matter. */
847 if (!flag_devirtualize || !gimple_vuse (call)
848 /* Be sure expected_type is polymorphic. */
849 || !comp_type
850 || TREE_CODE (comp_type) != RECORD_TYPE
851 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
852 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
853 return true;
855 ao_ref_init (&ao, arg);
856 ao.base = base;
857 ao.offset = offset;
858 ao.size = POINTER_SIZE;
859 ao.max_size = ao.size;
861 tci.offset = offset;
862 tci.object = get_base_address (arg);
863 tci.known_current_type = NULL_TREE;
864 tci.type_maybe_changed = false;
865 tci.multiple_types_encountered = false;
867 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
868 &tci, NULL, &entry_reached);
869 if (!tci.type_maybe_changed)
870 return false;
872 if (!tci.known_current_type
873 || tci.multiple_types_encountered
874 || offset != 0
875 /* When the walk reached function entry, it means that type
876 is set along some paths but not along others. */
877 || entry_reached)
878 jfunc->type = IPA_JF_UNKNOWN;
879 else
880 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
882 return true;
885 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
886 If it is, return true and fill in the jump function JFUNC with relevant type
887 information or set it to unknown. ARG is the object itself (not a pointer
888 to it, unless dereferenced). BASE is the base of the memory access as
889 returned by get_ref_base_and_extent, as is the offset. */
891 static bool
892 detect_type_change (tree arg, tree base, tree comp_type, gimple call,
893 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
895 if (!flag_devirtualize)
896 return false;
898 if (TREE_CODE (base) == MEM_REF
899 && !param_type_may_change_p (current_function_decl,
900 TREE_OPERAND (base, 0),
901 call))
902 return false;
903 return detect_type_change_from_memory_writes (arg, base, comp_type,
904 call, jfunc, offset);
907 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
908 SSA name (its dereference will become the base and the offset is assumed to
909 be zero). */
911 static bool
912 detect_type_change_ssa (tree arg, tree comp_type,
913 gimple call, struct ipa_jump_func *jfunc)
915 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
916 if (!flag_devirtualize
917 || !POINTER_TYPE_P (TREE_TYPE (arg)))
918 return false;
920 if (!param_type_may_change_p (current_function_decl, arg, call))
921 return false;
923 arg = build2 (MEM_REF, ptr_type_node, arg,
924 build_int_cst (ptr_type_node, 0));
926 return detect_type_change_from_memory_writes (arg, arg, comp_type,
927 call, jfunc, 0);
930 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
931 boolean variable pointed to by DATA. */
933 static bool
934 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
935 void *data)
937 bool *b = (bool *) data;
938 *b = true;
939 return true;
942 /* Return true if we have already walked so many statements in AA that we
943 should really just start giving up. */
945 static bool
946 aa_overwalked (struct func_body_info *fbi)
948 gcc_checking_assert (fbi);
949 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
952 /* Find the nearest valid aa status for parameter specified by INDEX that
953 dominates BB. */
955 static struct param_aa_status *
956 find_dominating_aa_status (struct func_body_info *fbi, basic_block bb,
957 int index)
959 while (true)
961 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
962 if (!bb)
963 return NULL;
964 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
965 if (!bi->param_aa_statuses.is_empty ()
966 && bi->param_aa_statuses[index].valid)
967 return &bi->param_aa_statuses[index];
971 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
972 structures and/or intialize the result with a dominating description as
973 necessary. */
975 static struct param_aa_status *
976 parm_bb_aa_status_for_bb (struct func_body_info *fbi, basic_block bb,
977 int index)
979 gcc_checking_assert (fbi);
980 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
981 if (bi->param_aa_statuses.is_empty ())
982 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
983 struct param_aa_status *paa = &bi->param_aa_statuses[index];
984 if (!paa->valid)
986 gcc_checking_assert (!paa->parm_modified
987 && !paa->ref_modified
988 && !paa->pt_modified);
989 struct param_aa_status *dom_paa;
990 dom_paa = find_dominating_aa_status (fbi, bb, index);
991 if (dom_paa)
992 *paa = *dom_paa;
993 else
994 paa->valid = true;
997 return paa;
1000 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
1001 a value known not to be modified in this function before reaching the
1002 statement STMT. FBI holds information about the function we have so far
1003 gathered but do not survive the summary building stage. */
1005 static bool
1006 parm_preserved_before_stmt_p (struct func_body_info *fbi, int index,
1007 gimple stmt, tree parm_load)
1009 struct param_aa_status *paa;
1010 bool modified = false;
1011 ao_ref refd;
1013 /* FIXME: FBI can be NULL if we are being called from outside
1014 ipa_node_analysis or ipcp_transform_function, which currently happens
1015 during inlining analysis. It would be great to extend fbi's lifetime and
1016 always have it. Currently, we are just not afraid of too much walking in
1017 that case. */
1018 if (fbi)
1020 if (aa_overwalked (fbi))
1021 return false;
1022 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1023 if (paa->parm_modified)
1024 return false;
1026 else
1027 paa = NULL;
1029 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
1030 ao_ref_init (&refd, parm_load);
1031 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1032 &modified, NULL);
1033 if (fbi)
1034 fbi->aa_walked += walked;
1035 if (paa && modified)
1036 paa->parm_modified = true;
1037 return !modified;
1040 /* If STMT is an assignment that loads a value from an parameter declaration,
1041 return the index of the parameter in ipa_node_params which has not been
1042 modified. Otherwise return -1. */
1044 static int
1045 load_from_unmodified_param (struct func_body_info *fbi,
1046 vec<ipa_param_descriptor> descriptors,
1047 gimple stmt)
1049 int index;
1050 tree op1;
1052 if (!gimple_assign_single_p (stmt))
1053 return -1;
1055 op1 = gimple_assign_rhs1 (stmt);
1056 if (TREE_CODE (op1) != PARM_DECL)
1057 return -1;
1059 index = ipa_get_param_decl_index_1 (descriptors, op1);
1060 if (index < 0
1061 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
1062 return -1;
1064 return index;
1067 /* Return true if memory reference REF (which must be a load through parameter
1068 with INDEX) loads data that are known to be unmodified in this function
1069 before reaching statement STMT. */
1071 static bool
1072 parm_ref_data_preserved_p (struct func_body_info *fbi,
1073 int index, gimple stmt, tree ref)
1075 struct param_aa_status *paa;
1076 bool modified = false;
1077 ao_ref refd;
1079 /* FIXME: FBI can be NULL if we are being called from outside
1080 ipa_node_analysis or ipcp_transform_function, which currently happens
1081 during inlining analysis. It would be great to extend fbi's lifetime and
1082 always have it. Currently, we are just not afraid of too much walking in
1083 that case. */
1084 if (fbi)
1086 if (aa_overwalked (fbi))
1087 return false;
1088 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1089 if (paa->ref_modified)
1090 return false;
1092 else
1093 paa = NULL;
1095 gcc_checking_assert (gimple_vuse (stmt));
1096 ao_ref_init (&refd, ref);
1097 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1098 &modified, NULL);
1099 if (fbi)
1100 fbi->aa_walked += walked;
1101 if (paa && modified)
1102 paa->ref_modified = true;
1103 return !modified;
1106 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1107 is known to be unmodified in this function before reaching call statement
1108 CALL into which it is passed. FBI describes the function body. */
1110 static bool
1111 parm_ref_data_pass_through_p (struct func_body_info *fbi, int index,
1112 gimple call, tree parm)
1114 bool modified = false;
1115 ao_ref refd;
1117 /* It's unnecessary to calculate anything about memory contnets for a const
1118 function because it is not goin to use it. But do not cache the result
1119 either. Also, no such calculations for non-pointers. */
1120 if (!gimple_vuse (call)
1121 || !POINTER_TYPE_P (TREE_TYPE (parm))
1122 || aa_overwalked (fbi))
1123 return false;
1125 struct param_aa_status *paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (call),
1126 index);
1127 if (paa->pt_modified)
1128 return false;
1130 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1131 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1132 &modified, NULL);
1133 fbi->aa_walked += walked;
1134 if (modified)
1135 paa->pt_modified = true;
1136 return !modified;
1139 /* Return true if we can prove that OP is a memory reference loading unmodified
1140 data from an aggregate passed as a parameter and if the aggregate is passed
1141 by reference, that the alias type of the load corresponds to the type of the
1142 formal parameter (so that we can rely on this type for TBAA in callers).
1143 INFO and PARMS_AINFO describe parameters of the current function (but the
1144 latter can be NULL), STMT is the load statement. If function returns true,
1145 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1146 within the aggregate and whether it is a load from a value passed by
1147 reference respectively. */
1149 static bool
1150 ipa_load_from_parm_agg_1 (struct func_body_info *fbi,
1151 vec<ipa_param_descriptor> descriptors,
1152 gimple stmt, tree op, int *index_p,
1153 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1154 bool *by_ref_p)
1156 int index;
1157 HOST_WIDE_INT size, max_size;
1158 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
1160 if (max_size == -1 || max_size != size || *offset_p < 0)
1161 return false;
1163 if (DECL_P (base))
1165 int index = ipa_get_param_decl_index_1 (descriptors, base);
1166 if (index >= 0
1167 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1169 *index_p = index;
1170 *by_ref_p = false;
1171 if (size_p)
1172 *size_p = size;
1173 return true;
1175 return false;
1178 if (TREE_CODE (base) != MEM_REF
1179 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1180 || !integer_zerop (TREE_OPERAND (base, 1)))
1181 return false;
1183 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1185 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1186 index = ipa_get_param_decl_index_1 (descriptors, parm);
1188 else
1190 /* This branch catches situations where a pointer parameter is not a
1191 gimple register, for example:
1193 void hip7(S*) (struct S * p)
1195 void (*<T2e4>) (struct S *) D.1867;
1196 struct S * p.1;
1198 <bb 2>:
1199 p.1_1 = p;
1200 D.1867_2 = p.1_1->f;
1201 D.1867_2 ();
1202 gdp = &p;
1205 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1206 index = load_from_unmodified_param (fbi, descriptors, def);
1209 if (index >= 0
1210 && parm_ref_data_preserved_p (fbi, index, stmt, op))
1212 *index_p = index;
1213 *by_ref_p = true;
1214 if (size_p)
1215 *size_p = size;
1216 return true;
1218 return false;
1221 /* Just like the previous function, just without the param_analysis_info
1222 pointer, for users outside of this file. */
1224 bool
1225 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
1226 tree op, int *index_p, HOST_WIDE_INT *offset_p,
1227 bool *by_ref_p)
1229 return ipa_load_from_parm_agg_1 (NULL, info->descriptors, stmt, op, index_p,
1230 offset_p, NULL, by_ref_p);
1233 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1234 of an assignment statement STMT, try to determine whether we are actually
1235 handling any of the following cases and construct an appropriate jump
1236 function into JFUNC if so:
1238 1) The passed value is loaded from a formal parameter which is not a gimple
1239 register (most probably because it is addressable, the value has to be
1240 scalar) and we can guarantee the value has not changed. This case can
1241 therefore be described by a simple pass-through jump function. For example:
1243 foo (int a)
1245 int a.0;
1247 a.0_2 = a;
1248 bar (a.0_2);
1250 2) The passed value can be described by a simple arithmetic pass-through
1251 jump function. E.g.
1253 foo (int a)
1255 int D.2064;
1257 D.2064_4 = a.1(D) + 4;
1258 bar (D.2064_4);
1260 This case can also occur in combination of the previous one, e.g.:
1262 foo (int a, int z)
1264 int a.0;
1265 int D.2064;
1267 a.0_3 = a;
1268 D.2064_4 = a.0_3 + 4;
1269 foo (D.2064_4);
1271 3) The passed value is an address of an object within another one (which
1272 also passed by reference). Such situations are described by an ancestor
1273 jump function and describe situations such as:
1275 B::foo() (struct B * const this)
1277 struct A * D.1845;
1279 D.1845_2 = &this_1(D)->D.1748;
1280 A::bar (D.1845_2);
1282 INFO is the structure describing individual parameters access different
1283 stages of IPA optimizations. PARMS_AINFO contains the information that is
1284 only needed for intraprocedural analysis. */
1286 static void
1287 compute_complex_assign_jump_func (struct func_body_info *fbi,
1288 struct ipa_node_params *info,
1289 struct ipa_jump_func *jfunc,
1290 gimple call, gimple stmt, tree name,
1291 tree param_type)
1293 HOST_WIDE_INT offset, size, max_size;
1294 tree op1, tc_ssa, base, ssa;
1295 int index;
1297 op1 = gimple_assign_rhs1 (stmt);
1299 if (TREE_CODE (op1) == SSA_NAME)
1301 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1302 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1303 else
1304 index = load_from_unmodified_param (fbi, info->descriptors,
1305 SSA_NAME_DEF_STMT (op1));
1306 tc_ssa = op1;
1308 else
1310 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1311 tc_ssa = gimple_assign_lhs (stmt);
1314 if (index >= 0)
1316 tree op2 = gimple_assign_rhs2 (stmt);
1318 if (op2)
1320 if (!is_gimple_ip_invariant (op2)
1321 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1322 && !useless_type_conversion_p (TREE_TYPE (name),
1323 TREE_TYPE (op1))))
1324 return;
1326 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1327 gimple_assign_rhs_code (stmt));
1329 else if (gimple_assign_single_p (stmt))
1331 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1332 bool type_p = false;
1334 if (param_type && POINTER_TYPE_P (param_type))
1335 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1336 call, jfunc);
1337 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1338 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
1340 return;
1343 if (TREE_CODE (op1) != ADDR_EXPR)
1344 return;
1345 op1 = TREE_OPERAND (op1, 0);
1346 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1347 return;
1348 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1349 if (TREE_CODE (base) != MEM_REF
1350 /* If this is a varying address, punt. */
1351 || max_size == -1
1352 || max_size != size)
1353 return;
1354 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1355 ssa = TREE_OPERAND (base, 0);
1356 if (TREE_CODE (ssa) != SSA_NAME
1357 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1358 || offset < 0)
1359 return;
1361 /* Dynamic types are changed in constructors and destructors. */
1362 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1363 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1365 bool type_p = (contains_polymorphic_type_p (TREE_TYPE (param_type))
1366 && !detect_type_change (op1, base, TREE_TYPE (param_type),
1367 call, jfunc, offset));
1368 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1369 ipa_set_ancestor_jf (jfunc, offset,
1370 type_p ? TREE_TYPE (param_type) : NULL, index,
1371 parm_ref_data_pass_through_p (fbi, index,
1372 call, ssa), type_p);
1376 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1377 it looks like:
1379 iftmp.1_3 = &obj_2(D)->D.1762;
1381 The base of the MEM_REF must be a default definition SSA NAME of a
1382 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1383 whole MEM_REF expression is returned and the offset calculated from any
1384 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1385 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1387 static tree
1388 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1390 HOST_WIDE_INT size, max_size;
1391 tree expr, parm, obj;
1393 if (!gimple_assign_single_p (assign))
1394 return NULL_TREE;
1395 expr = gimple_assign_rhs1 (assign);
1397 if (TREE_CODE (expr) != ADDR_EXPR)
1398 return NULL_TREE;
1399 expr = TREE_OPERAND (expr, 0);
1400 obj = expr;
1401 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1403 if (TREE_CODE (expr) != MEM_REF
1404 /* If this is a varying address, punt. */
1405 || max_size == -1
1406 || max_size != size
1407 || *offset < 0)
1408 return NULL_TREE;
1409 parm = TREE_OPERAND (expr, 0);
1410 if (TREE_CODE (parm) != SSA_NAME
1411 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1412 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1413 return NULL_TREE;
1415 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1416 *obj_p = obj;
1417 return expr;
1421 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1422 statement PHI, try to find out whether NAME is in fact a
1423 multiple-inheritance typecast from a descendant into an ancestor of a formal
1424 parameter and thus can be described by an ancestor jump function and if so,
1425 write the appropriate function into JFUNC.
1427 Essentially we want to match the following pattern:
1429 if (obj_2(D) != 0B)
1430 goto <bb 3>;
1431 else
1432 goto <bb 4>;
1434 <bb 3>:
1435 iftmp.1_3 = &obj_2(D)->D.1762;
1437 <bb 4>:
1438 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1439 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1440 return D.1879_6; */
1442 static void
1443 compute_complex_ancestor_jump_func (struct func_body_info *fbi,
1444 struct ipa_node_params *info,
1445 struct ipa_jump_func *jfunc,
1446 gimple call, gimple phi, tree param_type)
1448 HOST_WIDE_INT offset;
1449 gimple assign, cond;
1450 basic_block phi_bb, assign_bb, cond_bb;
1451 tree tmp, parm, expr, obj;
1452 int index, i;
1454 if (gimple_phi_num_args (phi) != 2)
1455 return;
1457 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1458 tmp = PHI_ARG_DEF (phi, 0);
1459 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1460 tmp = PHI_ARG_DEF (phi, 1);
1461 else
1462 return;
1463 if (TREE_CODE (tmp) != SSA_NAME
1464 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1465 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1466 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1467 return;
1469 assign = SSA_NAME_DEF_STMT (tmp);
1470 assign_bb = gimple_bb (assign);
1471 if (!single_pred_p (assign_bb))
1472 return;
1473 expr = get_ancestor_addr_info (assign, &obj, &offset);
1474 if (!expr)
1475 return;
1476 parm = TREE_OPERAND (expr, 0);
1477 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1478 if (index < 0)
1479 return;
1481 cond_bb = single_pred (assign_bb);
1482 cond = last_stmt (cond_bb);
1483 if (!cond
1484 || gimple_code (cond) != GIMPLE_COND
1485 || gimple_cond_code (cond) != NE_EXPR
1486 || gimple_cond_lhs (cond) != parm
1487 || !integer_zerop (gimple_cond_rhs (cond)))
1488 return;
1490 phi_bb = gimple_bb (phi);
1491 for (i = 0; i < 2; i++)
1493 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1494 if (pred != assign_bb && pred != cond_bb)
1495 return;
1498 bool type_p = false;
1499 if (param_type && POINTER_TYPE_P (param_type)
1500 && contains_polymorphic_type_p (TREE_TYPE (param_type)))
1501 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1502 call, jfunc, offset);
1503 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1504 ipa_set_ancestor_jf (jfunc, offset, type_p ? TREE_TYPE (param_type) : NULL,
1505 index,
1506 parm_ref_data_pass_through_p (fbi, index, call, parm),
1507 type_p);
1510 /* Given OP which is passed as an actual argument to a called function,
1511 determine if it is possible to construct a KNOWN_TYPE jump function for it
1512 and if so, create one and store it to JFUNC.
1513 EXPECTED_TYPE represents a type the argument should be in */
1515 static void
1516 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1517 gimple call, tree expected_type)
1519 HOST_WIDE_INT offset, size, max_size;
1520 tree base;
1522 if (!flag_devirtualize
1523 || TREE_CODE (op) != ADDR_EXPR
1524 || !contains_polymorphic_type_p (TREE_TYPE (TREE_TYPE (op)))
1525 /* Be sure expected_type is polymorphic. */
1526 || !expected_type
1527 || !contains_polymorphic_type_p (expected_type))
1528 return;
1530 op = TREE_OPERAND (op, 0);
1531 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1532 if (!DECL_P (base)
1533 || max_size == -1
1534 || max_size != size
1535 || !contains_polymorphic_type_p (TREE_TYPE (base)))
1536 return;
1538 if (decl_maybe_in_construction_p (base, TREE_TYPE (base),
1539 call, current_function_decl)
1540 /* Even if the var seems to be in construction by inline call stack,
1541 we may work out the actual type by walking memory writes. */
1542 && (is_global_var (base)
1543 || detect_type_change (op, base, expected_type, call, jfunc, offset)))
1544 return;
1546 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1547 expected_type);
1550 /* Inspect the given TYPE and return true iff it has the same structure (the
1551 same number of fields of the same types) as a C++ member pointer. If
1552 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1553 corresponding fields there. */
1555 static bool
1556 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1558 tree fld;
1560 if (TREE_CODE (type) != RECORD_TYPE)
1561 return false;
1563 fld = TYPE_FIELDS (type);
1564 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1565 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1566 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1567 return false;
1569 if (method_ptr)
1570 *method_ptr = fld;
1572 fld = DECL_CHAIN (fld);
1573 if (!fld || INTEGRAL_TYPE_P (fld)
1574 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1575 return false;
1576 if (delta)
1577 *delta = fld;
1579 if (DECL_CHAIN (fld))
1580 return false;
1582 return true;
1585 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1586 return the rhs of its defining statement. Otherwise return RHS as it
1587 is. */
1589 static inline tree
1590 get_ssa_def_if_simple_copy (tree rhs)
1592 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1594 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1596 if (gimple_assign_single_p (def_stmt))
1597 rhs = gimple_assign_rhs1 (def_stmt);
1598 else
1599 break;
1601 return rhs;
1604 /* Simple linked list, describing known contents of an aggregate beforere
1605 call. */
1607 struct ipa_known_agg_contents_list
1609 /* Offset and size of the described part of the aggregate. */
1610 HOST_WIDE_INT offset, size;
1611 /* Known constant value or NULL if the contents is known to be unknown. */
1612 tree constant;
1613 /* Pointer to the next structure in the list. */
1614 struct ipa_known_agg_contents_list *next;
1617 /* Find the proper place in linked list of ipa_known_agg_contents_list
1618 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1619 unless there is a partial overlap, in which case return NULL, or such
1620 element is already there, in which case set *ALREADY_THERE to true. */
1622 static struct ipa_known_agg_contents_list **
1623 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1624 HOST_WIDE_INT lhs_offset,
1625 HOST_WIDE_INT lhs_size,
1626 bool *already_there)
1628 struct ipa_known_agg_contents_list **p = list;
1629 while (*p && (*p)->offset < lhs_offset)
1631 if ((*p)->offset + (*p)->size > lhs_offset)
1632 return NULL;
1633 p = &(*p)->next;
1636 if (*p && (*p)->offset < lhs_offset + lhs_size)
1638 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1639 /* We already know this value is subsequently overwritten with
1640 something else. */
1641 *already_there = true;
1642 else
1643 /* Otherwise this is a partial overlap which we cannot
1644 represent. */
1645 return NULL;
1647 return p;
1650 /* Build aggregate jump function from LIST, assuming there are exactly
1651 CONST_COUNT constant entries there and that th offset of the passed argument
1652 is ARG_OFFSET and store it into JFUNC. */
1654 static void
1655 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1656 int const_count, HOST_WIDE_INT arg_offset,
1657 struct ipa_jump_func *jfunc)
1659 vec_alloc (jfunc->agg.items, const_count);
1660 while (list)
1662 if (list->constant)
1664 struct ipa_agg_jf_item item;
1665 item.offset = list->offset - arg_offset;
1666 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1667 item.value = unshare_expr_without_location (list->constant);
1668 jfunc->agg.items->quick_push (item);
1670 list = list->next;
1674 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1675 in ARG is filled in with constant values. ARG can either be an aggregate
1676 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1677 aggregate. JFUNC is the jump function into which the constants are
1678 subsequently stored. */
1680 static void
1681 determine_locally_known_aggregate_parts (gimple call, tree arg, tree arg_type,
1682 struct ipa_jump_func *jfunc)
1684 struct ipa_known_agg_contents_list *list = NULL;
1685 int item_count = 0, const_count = 0;
1686 HOST_WIDE_INT arg_offset, arg_size;
1687 gimple_stmt_iterator gsi;
1688 tree arg_base;
1689 bool check_ref, by_ref;
1690 ao_ref r;
1692 /* The function operates in three stages. First, we prepare check_ref, r,
1693 arg_base and arg_offset based on what is actually passed as an actual
1694 argument. */
1696 if (POINTER_TYPE_P (arg_type))
1698 by_ref = true;
1699 if (TREE_CODE (arg) == SSA_NAME)
1701 tree type_size;
1702 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1703 return;
1704 check_ref = true;
1705 arg_base = arg;
1706 arg_offset = 0;
1707 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1708 arg_size = tree_to_uhwi (type_size);
1709 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1711 else if (TREE_CODE (arg) == ADDR_EXPR)
1713 HOST_WIDE_INT arg_max_size;
1715 arg = TREE_OPERAND (arg, 0);
1716 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1717 &arg_max_size);
1718 if (arg_max_size == -1
1719 || arg_max_size != arg_size
1720 || arg_offset < 0)
1721 return;
1722 if (DECL_P (arg_base))
1724 check_ref = false;
1725 ao_ref_init (&r, arg_base);
1727 else
1728 return;
1730 else
1731 return;
1733 else
1735 HOST_WIDE_INT arg_max_size;
1737 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1739 by_ref = false;
1740 check_ref = false;
1741 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1742 &arg_max_size);
1743 if (arg_max_size == -1
1744 || arg_max_size != arg_size
1745 || arg_offset < 0)
1746 return;
1748 ao_ref_init (&r, arg);
1751 /* Second stage walks back the BB, looks at individual statements and as long
1752 as it is confident of how the statements affect contents of the
1753 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1754 describing it. */
1755 gsi = gsi_for_stmt (call);
1756 gsi_prev (&gsi);
1757 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1759 struct ipa_known_agg_contents_list *n, **p;
1760 gimple stmt = gsi_stmt (gsi);
1761 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1762 tree lhs, rhs, lhs_base;
1764 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1765 continue;
1766 if (!gimple_assign_single_p (stmt))
1767 break;
1769 lhs = gimple_assign_lhs (stmt);
1770 rhs = gimple_assign_rhs1 (stmt);
1771 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1772 || TREE_CODE (lhs) == BIT_FIELD_REF
1773 || contains_bitfld_component_ref_p (lhs))
1774 break;
1776 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1777 &lhs_max_size);
1778 if (lhs_max_size == -1
1779 || lhs_max_size != lhs_size)
1780 break;
1782 if (check_ref)
1784 if (TREE_CODE (lhs_base) != MEM_REF
1785 || TREE_OPERAND (lhs_base, 0) != arg_base
1786 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1787 break;
1789 else if (lhs_base != arg_base)
1791 if (DECL_P (lhs_base))
1792 continue;
1793 else
1794 break;
1797 bool already_there = false;
1798 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1799 &already_there);
1800 if (!p)
1801 break;
1802 if (already_there)
1803 continue;
1805 rhs = get_ssa_def_if_simple_copy (rhs);
1806 n = XALLOCA (struct ipa_known_agg_contents_list);
1807 n->size = lhs_size;
1808 n->offset = lhs_offset;
1809 if (is_gimple_ip_invariant (rhs))
1811 n->constant = rhs;
1812 const_count++;
1814 else
1815 n->constant = NULL_TREE;
1816 n->next = *p;
1817 *p = n;
1819 item_count++;
1820 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1821 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1822 break;
1825 /* Third stage just goes over the list and creates an appropriate vector of
1826 ipa_agg_jf_item structures out of it, of sourse only if there are
1827 any known constants to begin with. */
1829 if (const_count)
1831 jfunc->agg.by_ref = by_ref;
1832 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1836 static tree
1837 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1839 int n;
1840 tree type = (e->callee
1841 ? TREE_TYPE (e->callee->decl)
1842 : gimple_call_fntype (e->call_stmt));
1843 tree t = TYPE_ARG_TYPES (type);
1845 for (n = 0; n < i; n++)
1847 if (!t)
1848 break;
1849 t = TREE_CHAIN (t);
1851 if (t)
1852 return TREE_VALUE (t);
1853 if (!e->callee)
1854 return NULL;
1855 t = DECL_ARGUMENTS (e->callee->decl);
1856 for (n = 0; n < i; n++)
1858 if (!t)
1859 return NULL;
1860 t = TREE_CHAIN (t);
1862 if (t)
1863 return TREE_TYPE (t);
1864 return NULL;
1867 /* Compute jump function for all arguments of callsite CS and insert the
1868 information in the jump_functions array in the ipa_edge_args corresponding
1869 to this callsite. */
1871 static void
1872 ipa_compute_jump_functions_for_edge (struct func_body_info *fbi,
1873 struct cgraph_edge *cs)
1875 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1876 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1877 gimple call = cs->call_stmt;
1878 int n, arg_num = gimple_call_num_args (call);
1880 if (arg_num == 0 || args->jump_functions)
1881 return;
1882 vec_safe_grow_cleared (args->jump_functions, arg_num);
1884 if (gimple_call_internal_p (call))
1885 return;
1886 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1887 return;
1889 for (n = 0; n < arg_num; n++)
1891 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1892 tree arg = gimple_call_arg (call, n);
1893 tree param_type = ipa_get_callee_param_type (cs, n);
1895 if (is_gimple_ip_invariant (arg))
1896 ipa_set_jf_constant (jfunc, arg, cs);
1897 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1898 && TREE_CODE (arg) == PARM_DECL)
1900 int index = ipa_get_param_decl_index (info, arg);
1902 gcc_assert (index >=0);
1903 /* Aggregate passed by value, check for pass-through, otherwise we
1904 will attempt to fill in aggregate contents later in this
1905 for cycle. */
1906 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1908 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
1909 continue;
1912 else if (TREE_CODE (arg) == SSA_NAME)
1914 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1916 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1917 if (index >= 0)
1919 bool agg_p, type_p;
1920 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1921 if (param_type && POINTER_TYPE_P (param_type))
1922 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1923 call, jfunc);
1924 else
1925 type_p = false;
1926 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1927 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1928 type_p);
1931 else
1933 gimple stmt = SSA_NAME_DEF_STMT (arg);
1934 if (is_gimple_assign (stmt))
1935 compute_complex_assign_jump_func (fbi, info, jfunc,
1936 call, stmt, arg, param_type);
1937 else if (gimple_code (stmt) == GIMPLE_PHI)
1938 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1939 call, stmt, param_type);
1942 else
1943 compute_known_type_jump_func (arg, jfunc, call,
1944 param_type
1945 && POINTER_TYPE_P (param_type)
1946 ? TREE_TYPE (param_type)
1947 : NULL);
1949 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1950 passed (because type conversions are ignored in gimple). Usually we can
1951 safely get type from function declaration, but in case of K&R prototypes or
1952 variadic functions we can try our luck with type of the pointer passed.
1953 TODO: Since we look for actual initialization of the memory object, we may better
1954 work out the type based on the memory stores we find. */
1955 if (!param_type)
1956 param_type = TREE_TYPE (arg);
1958 if ((jfunc->type != IPA_JF_PASS_THROUGH
1959 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1960 && (jfunc->type != IPA_JF_ANCESTOR
1961 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1962 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1963 || POINTER_TYPE_P (param_type)))
1964 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1968 /* Compute jump functions for all edges - both direct and indirect - outgoing
1969 from BB. */
1971 static void
1972 ipa_compute_jump_functions_for_bb (struct func_body_info *fbi, basic_block bb)
1974 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1975 int i;
1976 struct cgraph_edge *cs;
1978 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1980 struct cgraph_node *callee = cs->callee;
1982 if (callee)
1984 callee->ultimate_alias_target ();
1985 /* We do not need to bother analyzing calls to unknown functions
1986 unless they may become known during lto/whopr. */
1987 if (!callee->definition && !flag_lto)
1988 continue;
1990 ipa_compute_jump_functions_for_edge (fbi, cs);
1994 /* If STMT looks like a statement loading a value from a member pointer formal
1995 parameter, return that parameter and store the offset of the field to
1996 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1997 might be clobbered). If USE_DELTA, then we look for a use of the delta
1998 field rather than the pfn. */
2000 static tree
2001 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
2002 HOST_WIDE_INT *offset_p)
2004 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2006 if (!gimple_assign_single_p (stmt))
2007 return NULL_TREE;
2009 rhs = gimple_assign_rhs1 (stmt);
2010 if (TREE_CODE (rhs) == COMPONENT_REF)
2012 ref_field = TREE_OPERAND (rhs, 1);
2013 rhs = TREE_OPERAND (rhs, 0);
2015 else
2016 ref_field = NULL_TREE;
2017 if (TREE_CODE (rhs) != MEM_REF)
2018 return NULL_TREE;
2019 rec = TREE_OPERAND (rhs, 0);
2020 if (TREE_CODE (rec) != ADDR_EXPR)
2021 return NULL_TREE;
2022 rec = TREE_OPERAND (rec, 0);
2023 if (TREE_CODE (rec) != PARM_DECL
2024 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2025 return NULL_TREE;
2026 ref_offset = TREE_OPERAND (rhs, 1);
2028 if (use_delta)
2029 fld = delta_field;
2030 else
2031 fld = ptr_field;
2032 if (offset_p)
2033 *offset_p = int_bit_position (fld);
2035 if (ref_field)
2037 if (integer_nonzerop (ref_offset))
2038 return NULL_TREE;
2039 return ref_field == fld ? rec : NULL_TREE;
2041 else
2042 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2043 : NULL_TREE;
2046 /* Returns true iff T is an SSA_NAME defined by a statement. */
2048 static bool
2049 ipa_is_ssa_with_stmt_def (tree t)
2051 if (TREE_CODE (t) == SSA_NAME
2052 && !SSA_NAME_IS_DEFAULT_DEF (t))
2053 return true;
2054 else
2055 return false;
2058 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2059 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2060 indirect call graph edge. */
2062 static struct cgraph_edge *
2063 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
2065 struct cgraph_edge *cs;
2067 cs = node->get_edge (stmt);
2068 cs->indirect_info->param_index = param_index;
2069 cs->indirect_info->agg_contents = 0;
2070 cs->indirect_info->member_ptr = 0;
2071 return cs;
2074 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2075 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2076 intermediate information about each formal parameter. Currently it checks
2077 whether the call calls a pointer that is a formal parameter and if so, the
2078 parameter is marked with the called flag and an indirect call graph edge
2079 describing the call is created. This is very simple for ordinary pointers
2080 represented in SSA but not-so-nice when it comes to member pointers. The
2081 ugly part of this function does nothing more than trying to match the
2082 pattern of such a call. An example of such a pattern is the gimple dump
2083 below, the call is on the last line:
2085 <bb 2>:
2086 f$__delta_5 = f.__delta;
2087 f$__pfn_24 = f.__pfn;
2090 <bb 2>:
2091 f$__delta_5 = MEM[(struct *)&f];
2092 f$__pfn_24 = MEM[(struct *)&f + 4B];
2094 and a few lines below:
2096 <bb 5>
2097 D.2496_3 = (int) f$__pfn_24;
2098 D.2497_4 = D.2496_3 & 1;
2099 if (D.2497_4 != 0)
2100 goto <bb 3>;
2101 else
2102 goto <bb 4>;
2104 <bb 6>:
2105 D.2500_7 = (unsigned int) f$__delta_5;
2106 D.2501_8 = &S + D.2500_7;
2107 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2108 D.2503_10 = *D.2502_9;
2109 D.2504_12 = f$__pfn_24 + -1;
2110 D.2505_13 = (unsigned int) D.2504_12;
2111 D.2506_14 = D.2503_10 + D.2505_13;
2112 D.2507_15 = *D.2506_14;
2113 iftmp.11_16 = (String:: *) D.2507_15;
2115 <bb 7>:
2116 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2117 D.2500_19 = (unsigned int) f$__delta_5;
2118 D.2508_20 = &S + D.2500_19;
2119 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2121 Such patterns are results of simple calls to a member pointer:
2123 int doprinting (int (MyString::* f)(int) const)
2125 MyString S ("somestring");
2127 return (S.*f)(4);
2130 Moreover, the function also looks for called pointers loaded from aggregates
2131 passed by value or reference. */
2133 static void
2134 ipa_analyze_indirect_call_uses (struct func_body_info *fbi, gimple call,
2135 tree target)
2137 struct ipa_node_params *info = fbi->info;
2138 HOST_WIDE_INT offset;
2139 bool by_ref;
2141 if (SSA_NAME_IS_DEFAULT_DEF (target))
2143 tree var = SSA_NAME_VAR (target);
2144 int index = ipa_get_param_decl_index (info, var);
2145 if (index >= 0)
2146 ipa_note_param_call (fbi->node, index, call);
2147 return;
2150 int index;
2151 gimple def = SSA_NAME_DEF_STMT (target);
2152 if (gimple_assign_single_p (def)
2153 && ipa_load_from_parm_agg_1 (fbi, info->descriptors, def,
2154 gimple_assign_rhs1 (def), &index, &offset,
2155 NULL, &by_ref))
2157 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2158 cs->indirect_info->offset = offset;
2159 cs->indirect_info->agg_contents = 1;
2160 cs->indirect_info->by_ref = by_ref;
2161 return;
2164 /* Now we need to try to match the complex pattern of calling a member
2165 pointer. */
2166 if (gimple_code (def) != GIMPLE_PHI
2167 || gimple_phi_num_args (def) != 2
2168 || !POINTER_TYPE_P (TREE_TYPE (target))
2169 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2170 return;
2172 /* First, we need to check whether one of these is a load from a member
2173 pointer that is a parameter to this function. */
2174 tree n1 = PHI_ARG_DEF (def, 0);
2175 tree n2 = PHI_ARG_DEF (def, 1);
2176 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2177 return;
2178 gimple d1 = SSA_NAME_DEF_STMT (n1);
2179 gimple d2 = SSA_NAME_DEF_STMT (n2);
2181 tree rec;
2182 basic_block bb, virt_bb;
2183 basic_block join = gimple_bb (def);
2184 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2186 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2187 return;
2189 bb = EDGE_PRED (join, 0)->src;
2190 virt_bb = gimple_bb (d2);
2192 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2194 bb = EDGE_PRED (join, 1)->src;
2195 virt_bb = gimple_bb (d1);
2197 else
2198 return;
2200 /* Second, we need to check that the basic blocks are laid out in the way
2201 corresponding to the pattern. */
2203 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2204 || single_pred (virt_bb) != bb
2205 || single_succ (virt_bb) != join)
2206 return;
2208 /* Third, let's see that the branching is done depending on the least
2209 significant bit of the pfn. */
2211 gimple branch = last_stmt (bb);
2212 if (!branch || gimple_code (branch) != GIMPLE_COND)
2213 return;
2215 if ((gimple_cond_code (branch) != NE_EXPR
2216 && gimple_cond_code (branch) != EQ_EXPR)
2217 || !integer_zerop (gimple_cond_rhs (branch)))
2218 return;
2220 tree cond = gimple_cond_lhs (branch);
2221 if (!ipa_is_ssa_with_stmt_def (cond))
2222 return;
2224 def = SSA_NAME_DEF_STMT (cond);
2225 if (!is_gimple_assign (def)
2226 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2227 || !integer_onep (gimple_assign_rhs2 (def)))
2228 return;
2230 cond = gimple_assign_rhs1 (def);
2231 if (!ipa_is_ssa_with_stmt_def (cond))
2232 return;
2234 def = SSA_NAME_DEF_STMT (cond);
2236 if (is_gimple_assign (def)
2237 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2239 cond = gimple_assign_rhs1 (def);
2240 if (!ipa_is_ssa_with_stmt_def (cond))
2241 return;
2242 def = SSA_NAME_DEF_STMT (cond);
2245 tree rec2;
2246 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2247 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2248 == ptrmemfunc_vbit_in_delta),
2249 NULL);
2250 if (rec != rec2)
2251 return;
2253 index = ipa_get_param_decl_index (info, rec);
2254 if (index >= 0
2255 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2257 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2258 cs->indirect_info->offset = offset;
2259 cs->indirect_info->agg_contents = 1;
2260 cs->indirect_info->member_ptr = 1;
2263 return;
2266 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2267 object referenced in the expression is a formal parameter of the caller
2268 FBI->node (described by FBI->info), create a call note for the
2269 statement. */
2271 static void
2272 ipa_analyze_virtual_call_uses (struct func_body_info *fbi,
2273 gimple call, tree target)
2275 tree obj = OBJ_TYPE_REF_OBJECT (target);
2276 int index;
2277 HOST_WIDE_INT anc_offset;
2279 if (!flag_devirtualize)
2280 return;
2282 if (TREE_CODE (obj) != SSA_NAME)
2283 return;
2285 struct ipa_node_params *info = fbi->info;
2286 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2288 struct ipa_jump_func jfunc;
2289 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2290 return;
2292 anc_offset = 0;
2293 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2294 gcc_assert (index >= 0);
2295 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2296 call, &jfunc))
2297 return;
2299 else
2301 struct ipa_jump_func jfunc;
2302 gimple stmt = SSA_NAME_DEF_STMT (obj);
2303 tree expr;
2305 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2306 if (!expr)
2307 return;
2308 index = ipa_get_param_decl_index (info,
2309 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2310 gcc_assert (index >= 0);
2311 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2312 call, &jfunc, anc_offset))
2313 return;
2316 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2317 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2318 ii->offset = anc_offset;
2319 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2320 ii->otr_type = obj_type_ref_class (target);
2321 ii->polymorphic = 1;
2324 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2325 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2326 containing intermediate information about each formal parameter. */
2328 static void
2329 ipa_analyze_call_uses (struct func_body_info *fbi, gimple call)
2331 tree target = gimple_call_fn (call);
2333 if (!target
2334 || (TREE_CODE (target) != SSA_NAME
2335 && !virtual_method_call_p (target)))
2336 return;
2338 struct cgraph_edge *cs = fbi->node->get_edge (call);
2339 /* If we previously turned the call into a direct call, there is
2340 no need to analyze. */
2341 if (cs && !cs->indirect_unknown_callee)
2342 return;
2344 if (cs->indirect_info->polymorphic)
2346 tree instance;
2347 tree target = gimple_call_fn (call);
2348 ipa_polymorphic_call_context context (current_function_decl,
2349 target, call, &instance);
2351 gcc_checking_assert (cs->indirect_info->otr_type
2352 == obj_type_ref_class (target));
2353 gcc_checking_assert (cs->indirect_info->otr_token
2354 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2356 if (context.get_dynamic_type (instance,
2357 OBJ_TYPE_REF_OBJECT (target),
2358 obj_type_ref_class (target), call))
2359 cs->indirect_info->context = context;
2362 if (TREE_CODE (target) == SSA_NAME)
2363 ipa_analyze_indirect_call_uses (fbi, call, target);
2364 else if (virtual_method_call_p (target))
2365 ipa_analyze_virtual_call_uses (fbi, call, target);
2369 /* Analyze the call statement STMT with respect to formal parameters (described
2370 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2371 formal parameters are called. */
2373 static void
2374 ipa_analyze_stmt_uses (struct func_body_info *fbi, gimple stmt)
2376 if (is_gimple_call (stmt))
2377 ipa_analyze_call_uses (fbi, stmt);
2380 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2381 If OP is a parameter declaration, mark it as used in the info structure
2382 passed in DATA. */
2384 static bool
2385 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2387 struct ipa_node_params *info = (struct ipa_node_params *) data;
2389 op = get_base_address (op);
2390 if (op
2391 && TREE_CODE (op) == PARM_DECL)
2393 int index = ipa_get_param_decl_index (info, op);
2394 gcc_assert (index >= 0);
2395 ipa_set_param_used (info, index, true);
2398 return false;
2401 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2402 the findings in various structures of the associated ipa_node_params
2403 structure, such as parameter flags, notes etc. FBI holds various data about
2404 the function being analyzed. */
2406 static void
2407 ipa_analyze_params_uses_in_bb (struct func_body_info *fbi, basic_block bb)
2409 gimple_stmt_iterator gsi;
2410 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2412 gimple stmt = gsi_stmt (gsi);
2414 if (is_gimple_debug (stmt))
2415 continue;
2417 ipa_analyze_stmt_uses (fbi, stmt);
2418 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2419 visit_ref_for_mod_analysis,
2420 visit_ref_for_mod_analysis,
2421 visit_ref_for_mod_analysis);
2423 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2424 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2425 visit_ref_for_mod_analysis,
2426 visit_ref_for_mod_analysis,
2427 visit_ref_for_mod_analysis);
2430 /* Calculate controlled uses of parameters of NODE. */
2432 static void
2433 ipa_analyze_controlled_uses (struct cgraph_node *node)
2435 struct ipa_node_params *info = IPA_NODE_REF (node);
2437 for (int i = 0; i < ipa_get_param_count (info); i++)
2439 tree parm = ipa_get_param (info, i);
2440 int controlled_uses = 0;
2442 /* For SSA regs see if parameter is used. For non-SSA we compute
2443 the flag during modification analysis. */
2444 if (is_gimple_reg (parm))
2446 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2447 parm);
2448 if (ddef && !has_zero_uses (ddef))
2450 imm_use_iterator imm_iter;
2451 use_operand_p use_p;
2453 ipa_set_param_used (info, i, true);
2454 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2455 if (!is_gimple_call (USE_STMT (use_p)))
2457 if (!is_gimple_debug (USE_STMT (use_p)))
2459 controlled_uses = IPA_UNDESCRIBED_USE;
2460 break;
2463 else
2464 controlled_uses++;
2466 else
2467 controlled_uses = 0;
2469 else
2470 controlled_uses = IPA_UNDESCRIBED_USE;
2471 ipa_set_controlled_uses (info, i, controlled_uses);
2475 /* Free stuff in BI. */
2477 static void
2478 free_ipa_bb_info (struct ipa_bb_info *bi)
2480 bi->cg_edges.release ();
2481 bi->param_aa_statuses.release ();
2484 /* Dominator walker driving the analysis. */
2486 class analysis_dom_walker : public dom_walker
2488 public:
2489 analysis_dom_walker (struct func_body_info *fbi)
2490 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2492 virtual void before_dom_children (basic_block);
2494 private:
2495 struct func_body_info *m_fbi;
2498 void
2499 analysis_dom_walker::before_dom_children (basic_block bb)
2501 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2502 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2505 /* Initialize the array describing properties of of formal parameters
2506 of NODE, analyze their uses and compute jump functions associated
2507 with actual arguments of calls from within NODE. */
2509 void
2510 ipa_analyze_node (struct cgraph_node *node)
2512 struct func_body_info fbi;
2513 struct ipa_node_params *info;
2515 ipa_check_create_node_params ();
2516 ipa_check_create_edge_args ();
2517 info = IPA_NODE_REF (node);
2519 if (info->analysis_done)
2520 return;
2521 info->analysis_done = 1;
2523 if (ipa_func_spec_opts_forbid_analysis_p (node))
2525 for (int i = 0; i < ipa_get_param_count (info); i++)
2527 ipa_set_param_used (info, i, true);
2528 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2530 return;
2533 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2534 push_cfun (func);
2535 calculate_dominance_info (CDI_DOMINATORS);
2536 ipa_initialize_node_params (node);
2537 ipa_analyze_controlled_uses (node);
2539 fbi.node = node;
2540 fbi.info = IPA_NODE_REF (node);
2541 fbi.bb_infos = vNULL;
2542 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2543 fbi.param_count = ipa_get_param_count (info);
2544 fbi.aa_walked = 0;
2546 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2548 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2549 bi->cg_edges.safe_push (cs);
2552 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2554 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2555 bi->cg_edges.safe_push (cs);
2558 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2560 int i;
2561 struct ipa_bb_info *bi;
2562 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
2563 free_ipa_bb_info (bi);
2564 fbi.bb_infos.release ();
2565 free_dominance_info (CDI_DOMINATORS);
2566 pop_cfun ();
2569 /* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2570 attempt a type-based devirtualization. If successful, return the
2571 target function declaration, otherwise return NULL. */
2573 tree
2574 ipa_intraprocedural_devirtualization (gimple call)
2576 tree binfo, token, fndecl;
2577 struct ipa_jump_func jfunc;
2578 tree otr = gimple_call_fn (call);
2580 jfunc.type = IPA_JF_UNKNOWN;
2581 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
2582 call, obj_type_ref_class (otr));
2583 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2584 return NULL_TREE;
2585 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2586 if (!binfo)
2587 return NULL_TREE;
2588 token = OBJ_TYPE_REF_TOKEN (otr);
2589 fndecl = gimple_get_virt_method_for_binfo (tree_to_uhwi (token),
2590 binfo);
2591 #ifdef ENABLE_CHECKING
2592 if (fndecl)
2593 gcc_assert (possible_polymorphic_call_target_p
2594 (otr, call, cgraph_node::get (fndecl)));
2595 #endif
2596 return fndecl;
2599 /* Update the jump function DST when the call graph edge corresponding to SRC is
2600 is being inlined, knowing that DST is of type ancestor and src of known
2601 type. */
2603 static void
2604 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2605 struct ipa_jump_func *dst)
2607 HOST_WIDE_INT combined_offset;
2608 tree combined_type;
2610 if (!ipa_get_jf_ancestor_type_preserved (dst))
2612 dst->type = IPA_JF_UNKNOWN;
2613 return;
2616 combined_offset = ipa_get_jf_known_type_offset (src)
2617 + ipa_get_jf_ancestor_offset (dst);
2618 combined_type = ipa_get_jf_ancestor_type (dst);
2620 ipa_set_jf_known_type (dst, combined_offset,
2621 ipa_get_jf_known_type_base_type (src),
2622 combined_type);
2625 /* Update the jump functions associated with call graph edge E when the call
2626 graph edge CS is being inlined, assuming that E->caller is already (possibly
2627 indirectly) inlined into CS->callee and that E has not been inlined. */
2629 static void
2630 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2631 struct cgraph_edge *e)
2633 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2634 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2635 int count = ipa_get_cs_argument_count (args);
2636 int i;
2638 for (i = 0; i < count; i++)
2640 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2642 if (dst->type == IPA_JF_ANCESTOR)
2644 struct ipa_jump_func *src;
2645 int dst_fid = dst->value.ancestor.formal_id;
2647 /* Variable number of arguments can cause havoc if we try to access
2648 one that does not exist in the inlined edge. So make sure we
2649 don't. */
2650 if (dst_fid >= ipa_get_cs_argument_count (top))
2652 dst->type = IPA_JF_UNKNOWN;
2653 continue;
2656 src = ipa_get_ith_jump_func (top, dst_fid);
2658 if (src->agg.items
2659 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2661 struct ipa_agg_jf_item *item;
2662 int j;
2664 /* Currently we do not produce clobber aggregate jump functions,
2665 replace with merging when we do. */
2666 gcc_assert (!dst->agg.items);
2668 dst->agg.items = vec_safe_copy (src->agg.items);
2669 dst->agg.by_ref = src->agg.by_ref;
2670 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2671 item->offset -= dst->value.ancestor.offset;
2674 if (src->type == IPA_JF_KNOWN_TYPE)
2675 combine_known_type_and_ancestor_jfs (src, dst);
2676 else if (src->type == IPA_JF_PASS_THROUGH
2677 && src->value.pass_through.operation == NOP_EXPR)
2679 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2680 dst->value.ancestor.agg_preserved &=
2681 src->value.pass_through.agg_preserved;
2682 dst->value.ancestor.type_preserved &=
2683 src->value.pass_through.type_preserved;
2685 else if (src->type == IPA_JF_ANCESTOR)
2687 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2688 dst->value.ancestor.offset += src->value.ancestor.offset;
2689 dst->value.ancestor.agg_preserved &=
2690 src->value.ancestor.agg_preserved;
2691 dst->value.ancestor.type_preserved &=
2692 src->value.ancestor.type_preserved;
2694 else
2695 dst->type = IPA_JF_UNKNOWN;
2697 else if (dst->type == IPA_JF_PASS_THROUGH)
2699 struct ipa_jump_func *src;
2700 /* We must check range due to calls with variable number of arguments
2701 and we cannot combine jump functions with operations. */
2702 if (dst->value.pass_through.operation == NOP_EXPR
2703 && (dst->value.pass_through.formal_id
2704 < ipa_get_cs_argument_count (top)))
2706 int dst_fid = dst->value.pass_through.formal_id;
2707 src = ipa_get_ith_jump_func (top, dst_fid);
2708 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2710 switch (src->type)
2712 case IPA_JF_UNKNOWN:
2713 dst->type = IPA_JF_UNKNOWN;
2714 break;
2715 case IPA_JF_KNOWN_TYPE:
2716 if (ipa_get_jf_pass_through_type_preserved (dst))
2717 ipa_set_jf_known_type (dst,
2718 ipa_get_jf_known_type_offset (src),
2719 ipa_get_jf_known_type_base_type (src),
2720 ipa_get_jf_known_type_component_type (src));
2721 else
2722 dst->type = IPA_JF_UNKNOWN;
2723 break;
2724 case IPA_JF_CONST:
2725 ipa_set_jf_cst_copy (dst, src);
2726 break;
2728 case IPA_JF_PASS_THROUGH:
2730 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2731 enum tree_code operation;
2732 operation = ipa_get_jf_pass_through_operation (src);
2734 if (operation == NOP_EXPR)
2736 bool agg_p, type_p;
2737 agg_p = dst_agg_p
2738 && ipa_get_jf_pass_through_agg_preserved (src);
2739 type_p = ipa_get_jf_pass_through_type_preserved (src)
2740 && ipa_get_jf_pass_through_type_preserved (dst);
2741 ipa_set_jf_simple_pass_through (dst, formal_id,
2742 agg_p, type_p);
2744 else
2746 tree operand = ipa_get_jf_pass_through_operand (src);
2747 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2748 operation);
2750 break;
2752 case IPA_JF_ANCESTOR:
2754 bool agg_p, type_p;
2755 agg_p = dst_agg_p
2756 && ipa_get_jf_ancestor_agg_preserved (src);
2757 type_p = ipa_get_jf_ancestor_type_preserved (src)
2758 && ipa_get_jf_pass_through_type_preserved (dst);
2759 ipa_set_ancestor_jf (dst,
2760 ipa_get_jf_ancestor_offset (src),
2761 ipa_get_jf_ancestor_type (src),
2762 ipa_get_jf_ancestor_formal_id (src),
2763 agg_p, type_p);
2764 break;
2766 default:
2767 gcc_unreachable ();
2770 if (src->agg.items
2771 && (dst_agg_p || !src->agg.by_ref))
2773 /* Currently we do not produce clobber aggregate jump
2774 functions, replace with merging when we do. */
2775 gcc_assert (!dst->agg.items);
2777 dst->agg.by_ref = src->agg.by_ref;
2778 dst->agg.items = vec_safe_copy (src->agg.items);
2781 else
2782 dst->type = IPA_JF_UNKNOWN;
2787 /* If TARGET is an addr_expr of a function declaration, make it the destination
2788 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2790 struct cgraph_edge *
2791 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2793 struct cgraph_node *callee;
2794 struct inline_edge_summary *es = inline_edge_summary (ie);
2795 bool unreachable = false;
2797 if (TREE_CODE (target) == ADDR_EXPR)
2798 target = TREE_OPERAND (target, 0);
2799 if (TREE_CODE (target) != FUNCTION_DECL)
2801 target = canonicalize_constructor_val (target, NULL);
2802 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2804 if (ie->indirect_info->member_ptr)
2805 /* Member pointer call that goes through a VMT lookup. */
2806 return NULL;
2808 if (dump_enabled_p ())
2810 location_t loc = gimple_location_safe (ie->call_stmt);
2811 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2812 "discovered direct call to non-function in %s/%i, "
2813 "making it __builtin_unreachable\n",
2814 ie->caller->name (), ie->caller->order);
2817 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2818 callee = cgraph_node::get_create (target);
2819 unreachable = true;
2821 else
2822 callee = cgraph_node::get (target);
2824 else
2825 callee = cgraph_node::get (target);
2827 /* Because may-edges are not explicitely represented and vtable may be external,
2828 we may create the first reference to the object in the unit. */
2829 if (!callee || callee->global.inlined_to)
2832 /* We are better to ensure we can refer to it.
2833 In the case of static functions we are out of luck, since we already
2834 removed its body. In the case of public functions we may or may
2835 not introduce the reference. */
2836 if (!canonicalize_constructor_val (target, NULL)
2837 || !TREE_PUBLIC (target))
2839 if (dump_file)
2840 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2841 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2842 xstrdup (ie->caller->name ()),
2843 ie->caller->order,
2844 xstrdup (ie->callee->name ()),
2845 ie->callee->order);
2846 return NULL;
2848 callee = cgraph_node::get_create (target);
2851 if (!dbg_cnt (devirt))
2852 return NULL;
2854 ipa_check_create_node_params ();
2856 /* We can not make edges to inline clones. It is bug that someone removed
2857 the cgraph node too early. */
2858 gcc_assert (!callee->global.inlined_to);
2860 if (dump_file && !unreachable)
2862 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2863 "(%s/%i -> %s/%i), for stmt ",
2864 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2865 xstrdup (ie->caller->name ()),
2866 ie->caller->order,
2867 xstrdup (callee->name ()),
2868 callee->order);
2869 if (ie->call_stmt)
2870 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2871 else
2872 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2874 if (dump_enabled_p ())
2876 location_t loc = gimple_location_safe (ie->call_stmt);
2878 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2879 "converting indirect call in %s to direct call to %s\n",
2880 ie->caller->name (), callee->name ());
2882 ie = ie->make_direct (callee);
2883 es = inline_edge_summary (ie);
2884 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2885 - eni_size_weights.call_cost);
2886 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2887 - eni_time_weights.call_cost);
2889 return ie;
2892 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2893 return NULL if there is not any. BY_REF specifies whether the value has to
2894 be passed by reference or by value. */
2896 tree
2897 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2898 HOST_WIDE_INT offset, bool by_ref)
2900 struct ipa_agg_jf_item *item;
2901 int i;
2903 if (by_ref != agg->by_ref)
2904 return NULL;
2906 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2907 if (item->offset == offset)
2909 /* Currently we do not have clobber values, return NULL for them once
2910 we do. */
2911 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2912 return item->value;
2914 return NULL;
2917 /* Remove a reference to SYMBOL from the list of references of a node given by
2918 reference description RDESC. Return true if the reference has been
2919 successfully found and removed. */
2921 static bool
2922 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2924 struct ipa_ref *to_del;
2925 struct cgraph_edge *origin;
2927 origin = rdesc->cs;
2928 if (!origin)
2929 return false;
2930 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
2931 origin->lto_stmt_uid);
2932 if (!to_del)
2933 return false;
2935 to_del->remove_reference ();
2936 if (dump_file)
2937 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2938 xstrdup (origin->caller->name ()),
2939 origin->caller->order, xstrdup (symbol->name ()));
2940 return true;
2943 /* If JFUNC has a reference description with refcount different from
2944 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2945 NULL. JFUNC must be a constant jump function. */
2947 static struct ipa_cst_ref_desc *
2948 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2950 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2951 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2952 return rdesc;
2953 else
2954 return NULL;
2957 /* If the value of constant jump function JFUNC is an address of a function
2958 declaration, return the associated call graph node. Otherwise return
2959 NULL. */
2961 static cgraph_node *
2962 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2964 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2965 tree cst = ipa_get_jf_constant (jfunc);
2966 if (TREE_CODE (cst) != ADDR_EXPR
2967 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2968 return NULL;
2970 return cgraph_node::get (TREE_OPERAND (cst, 0));
2974 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2975 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2976 the edge specified in the rdesc. Return false if either the symbol or the
2977 reference could not be found, otherwise return true. */
2979 static bool
2980 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2982 struct ipa_cst_ref_desc *rdesc;
2983 if (jfunc->type == IPA_JF_CONST
2984 && (rdesc = jfunc_rdesc_usable (jfunc))
2985 && --rdesc->refcount == 0)
2987 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
2988 if (!symbol)
2989 return false;
2991 return remove_described_reference (symbol, rdesc);
2993 return true;
2996 /* Try to find a destination for indirect edge IE that corresponds to a simple
2997 call or a call of a member function pointer and where the destination is a
2998 pointer formal parameter described by jump function JFUNC. If it can be
2999 determined, return the newly direct edge, otherwise return NULL.
3000 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3002 static struct cgraph_edge *
3003 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3004 struct ipa_jump_func *jfunc,
3005 struct ipa_node_params *new_root_info)
3007 struct cgraph_edge *cs;
3008 tree target;
3009 bool agg_contents = ie->indirect_info->agg_contents;
3011 if (ie->indirect_info->agg_contents)
3012 target = ipa_find_agg_cst_for_param (&jfunc->agg,
3013 ie->indirect_info->offset,
3014 ie->indirect_info->by_ref);
3015 else
3016 target = ipa_value_from_jfunc (new_root_info, jfunc);
3017 if (!target)
3018 return NULL;
3019 cs = ipa_make_edge_direct_to_target (ie, target);
3021 if (cs && !agg_contents)
3023 bool ok;
3024 gcc_checking_assert (cs->callee
3025 && (cs != ie
3026 || jfunc->type != IPA_JF_CONST
3027 || !cgraph_node_for_jfunc (jfunc)
3028 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3029 ok = try_decrement_rdesc_refcount (jfunc);
3030 gcc_checking_assert (ok);
3033 return cs;
3036 /* Return the target to be used in cases of impossible devirtualization. IE
3037 and target (the latter can be NULL) are dumped when dumping is enabled. */
3039 tree
3040 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3042 if (dump_file)
3044 if (target)
3045 fprintf (dump_file,
3046 "Type inconsistent devirtualization: %s/%i->%s\n",
3047 ie->caller->name (), ie->caller->order,
3048 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3049 else
3050 fprintf (dump_file,
3051 "No devirtualization target in %s/%i\n",
3052 ie->caller->name (), ie->caller->order);
3054 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3055 cgraph_node::get_create (new_target);
3056 return new_target;
3059 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3060 call based on a formal parameter which is described by jump function JFUNC
3061 and if it can be determined, make it direct and return the direct edge.
3062 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
3063 are relative to. */
3065 static struct cgraph_edge *
3066 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3067 struct ipa_jump_func *jfunc,
3068 struct ipa_node_params *new_root_info)
3070 tree binfo, target;
3072 if (!flag_devirtualize)
3073 return NULL;
3075 /* First try to do lookup via known virtual table pointer value. */
3076 if (!ie->indirect_info->by_ref)
3078 tree vtable;
3079 unsigned HOST_WIDE_INT offset;
3080 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
3081 ie->indirect_info->offset,
3082 true);
3083 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3085 target = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3086 vtable, offset);
3087 if (target)
3089 if ((TREE_CODE (TREE_TYPE (target)) == FUNCTION_TYPE
3090 && DECL_FUNCTION_CODE (target) == BUILT_IN_UNREACHABLE)
3091 || !possible_polymorphic_call_target_p
3092 (ie, cgraph_node::get (target)))
3093 target = ipa_impossible_devirt_target (ie, target);
3094 return ipa_make_edge_direct_to_target (ie, target);
3099 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
3101 if (!binfo)
3102 return NULL;
3104 if (TREE_CODE (binfo) != TREE_BINFO)
3106 ipa_polymorphic_call_context context (binfo,
3107 ie->indirect_info->otr_type,
3108 ie->indirect_info->offset);
3109 vec <cgraph_node *>targets;
3110 bool final;
3112 targets = possible_polymorphic_call_targets
3113 (ie->indirect_info->otr_type,
3114 ie->indirect_info->otr_token,
3115 context, &final);
3116 if (!final || targets.length () > 1)
3117 return NULL;
3118 if (targets.length () == 1)
3119 target = targets[0]->decl;
3120 else
3121 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3123 else
3125 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
3126 ie->indirect_info->otr_type);
3127 if (binfo)
3128 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
3129 binfo);
3130 else
3131 return NULL;
3134 if (target)
3136 if (!possible_polymorphic_call_target_p (ie, cgraph_node::get (target)))
3137 target = ipa_impossible_devirt_target (ie, target);
3138 return ipa_make_edge_direct_to_target (ie, target);
3140 else
3141 return NULL;
3144 /* Update the param called notes associated with NODE when CS is being inlined,
3145 assuming NODE is (potentially indirectly) inlined into CS->callee.
3146 Moreover, if the callee is discovered to be constant, create a new cgraph
3147 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3148 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3150 static bool
3151 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3152 struct cgraph_node *node,
3153 vec<cgraph_edge *> *new_edges)
3155 struct ipa_edge_args *top;
3156 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3157 struct ipa_node_params *new_root_info;
3158 bool res = false;
3160 ipa_check_create_edge_args ();
3161 top = IPA_EDGE_REF (cs);
3162 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3163 ? cs->caller->global.inlined_to
3164 : cs->caller);
3166 for (ie = node->indirect_calls; ie; ie = next_ie)
3168 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3169 struct ipa_jump_func *jfunc;
3170 int param_index;
3172 next_ie = ie->next_callee;
3174 if (ici->param_index == -1)
3175 continue;
3177 /* We must check range due to calls with variable number of arguments: */
3178 if (ici->param_index >= ipa_get_cs_argument_count (top))
3180 ici->param_index = -1;
3181 continue;
3184 param_index = ici->param_index;
3185 jfunc = ipa_get_ith_jump_func (top, param_index);
3187 if (!flag_indirect_inlining)
3188 new_direct_edge = NULL;
3189 else if (ici->polymorphic)
3190 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
3191 new_root_info);
3192 else
3193 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3194 new_root_info);
3195 /* If speculation was removed, then we need to do nothing. */
3196 if (new_direct_edge && new_direct_edge != ie)
3198 new_direct_edge->indirect_inlining_edge = 1;
3199 top = IPA_EDGE_REF (cs);
3200 res = true;
3202 else if (new_direct_edge)
3204 new_direct_edge->indirect_inlining_edge = 1;
3205 if (new_direct_edge->call_stmt)
3206 new_direct_edge->call_stmt_cannot_inline_p
3207 = !gimple_check_call_matching_types (
3208 new_direct_edge->call_stmt,
3209 new_direct_edge->callee->decl, false);
3210 if (new_edges)
3212 new_edges->safe_push (new_direct_edge);
3213 res = true;
3215 top = IPA_EDGE_REF (cs);
3217 else if (jfunc->type == IPA_JF_PASS_THROUGH
3218 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3220 if ((ici->agg_contents
3221 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
3222 || (ici->polymorphic
3223 && !ipa_get_jf_pass_through_type_preserved (jfunc)))
3224 ici->param_index = -1;
3225 else
3226 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3228 else if (jfunc->type == IPA_JF_ANCESTOR)
3230 if ((ici->agg_contents
3231 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
3232 || (ici->polymorphic
3233 && !ipa_get_jf_ancestor_type_preserved (jfunc)))
3234 ici->param_index = -1;
3235 else
3237 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3238 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3241 else
3242 /* Either we can find a destination for this edge now or never. */
3243 ici->param_index = -1;
3246 return res;
3249 /* Recursively traverse subtree of NODE (including node) made of inlined
3250 cgraph_edges when CS has been inlined and invoke
3251 update_indirect_edges_after_inlining on all nodes and
3252 update_jump_functions_after_inlining on all non-inlined edges that lead out
3253 of this subtree. Newly discovered indirect edges will be added to
3254 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3255 created. */
3257 static bool
3258 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3259 struct cgraph_node *node,
3260 vec<cgraph_edge *> *new_edges)
3262 struct cgraph_edge *e;
3263 bool res;
3265 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3267 for (e = node->callees; e; e = e->next_callee)
3268 if (!e->inline_failed)
3269 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3270 else
3271 update_jump_functions_after_inlining (cs, e);
3272 for (e = node->indirect_calls; e; e = e->next_callee)
3273 update_jump_functions_after_inlining (cs, e);
3275 return res;
3278 /* Combine two controlled uses counts as done during inlining. */
3280 static int
3281 combine_controlled_uses_counters (int c, int d)
3283 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3284 return IPA_UNDESCRIBED_USE;
3285 else
3286 return c + d - 1;
3289 /* Propagate number of controlled users from CS->caleee to the new root of the
3290 tree of inlined nodes. */
3292 static void
3293 propagate_controlled_uses (struct cgraph_edge *cs)
3295 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3296 struct cgraph_node *new_root = cs->caller->global.inlined_to
3297 ? cs->caller->global.inlined_to : cs->caller;
3298 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3299 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3300 int count, i;
3302 count = MIN (ipa_get_cs_argument_count (args),
3303 ipa_get_param_count (old_root_info));
3304 for (i = 0; i < count; i++)
3306 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3307 struct ipa_cst_ref_desc *rdesc;
3309 if (jf->type == IPA_JF_PASS_THROUGH)
3311 int src_idx, c, d;
3312 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3313 c = ipa_get_controlled_uses (new_root_info, src_idx);
3314 d = ipa_get_controlled_uses (old_root_info, i);
3316 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3317 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3318 c = combine_controlled_uses_counters (c, d);
3319 ipa_set_controlled_uses (new_root_info, src_idx, c);
3320 if (c == 0 && new_root_info->ipcp_orig_node)
3322 struct cgraph_node *n;
3323 struct ipa_ref *ref;
3324 tree t = new_root_info->known_vals[src_idx];
3326 if (t && TREE_CODE (t) == ADDR_EXPR
3327 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3328 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3329 && (ref = new_root->find_reference (n, NULL, 0)))
3331 if (dump_file)
3332 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3333 "reference from %s/%i to %s/%i.\n",
3334 xstrdup (new_root->name ()),
3335 new_root->order,
3336 xstrdup (n->name ()), n->order);
3337 ref->remove_reference ();
3341 else if (jf->type == IPA_JF_CONST
3342 && (rdesc = jfunc_rdesc_usable (jf)))
3344 int d = ipa_get_controlled_uses (old_root_info, i);
3345 int c = rdesc->refcount;
3346 rdesc->refcount = combine_controlled_uses_counters (c, d);
3347 if (rdesc->refcount == 0)
3349 tree cst = ipa_get_jf_constant (jf);
3350 struct cgraph_node *n;
3351 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3352 && TREE_CODE (TREE_OPERAND (cst, 0))
3353 == FUNCTION_DECL);
3354 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3355 if (n)
3357 struct cgraph_node *clone;
3358 bool ok;
3359 ok = remove_described_reference (n, rdesc);
3360 gcc_checking_assert (ok);
3362 clone = cs->caller;
3363 while (clone->global.inlined_to
3364 && clone != rdesc->cs->caller
3365 && IPA_NODE_REF (clone)->ipcp_orig_node)
3367 struct ipa_ref *ref;
3368 ref = clone->find_reference (n, NULL, 0);
3369 if (ref)
3371 if (dump_file)
3372 fprintf (dump_file, "ipa-prop: Removing "
3373 "cloning-created reference "
3374 "from %s/%i to %s/%i.\n",
3375 xstrdup (clone->name ()),
3376 clone->order,
3377 xstrdup (n->name ()),
3378 n->order);
3379 ref->remove_reference ();
3381 clone = clone->callers->caller;
3388 for (i = ipa_get_param_count (old_root_info);
3389 i < ipa_get_cs_argument_count (args);
3390 i++)
3392 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3394 if (jf->type == IPA_JF_CONST)
3396 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3397 if (rdesc)
3398 rdesc->refcount = IPA_UNDESCRIBED_USE;
3400 else if (jf->type == IPA_JF_PASS_THROUGH)
3401 ipa_set_controlled_uses (new_root_info,
3402 jf->value.pass_through.formal_id,
3403 IPA_UNDESCRIBED_USE);
3407 /* Update jump functions and call note functions on inlining the call site CS.
3408 CS is expected to lead to a node already cloned by
3409 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3410 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3411 created. */
3413 bool
3414 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3415 vec<cgraph_edge *> *new_edges)
3417 bool changed;
3418 /* Do nothing if the preparation phase has not been carried out yet
3419 (i.e. during early inlining). */
3420 if (!ipa_node_params_vector.exists ())
3421 return false;
3422 gcc_assert (ipa_edge_args_vector);
3424 propagate_controlled_uses (cs);
3425 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3427 return changed;
3430 /* Frees all dynamically allocated structures that the argument info points
3431 to. */
3433 void
3434 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3436 vec_free (args->jump_functions);
3437 memset (args, 0, sizeof (*args));
3440 /* Free all ipa_edge structures. */
3442 void
3443 ipa_free_all_edge_args (void)
3445 int i;
3446 struct ipa_edge_args *args;
3448 if (!ipa_edge_args_vector)
3449 return;
3451 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3452 ipa_free_edge_args_substructures (args);
3454 vec_free (ipa_edge_args_vector);
3457 /* Frees all dynamically allocated structures that the param info points
3458 to. */
3460 void
3461 ipa_free_node_params_substructures (struct ipa_node_params *info)
3463 info->descriptors.release ();
3464 free (info->lattices);
3465 /* Lattice values and their sources are deallocated with their alocation
3466 pool. */
3467 info->known_vals.release ();
3468 memset (info, 0, sizeof (*info));
3471 /* Free all ipa_node_params structures. */
3473 void
3474 ipa_free_all_node_params (void)
3476 int i;
3477 struct ipa_node_params *info;
3479 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
3480 ipa_free_node_params_substructures (info);
3482 ipa_node_params_vector.release ();
3485 /* Set the aggregate replacements of NODE to be AGGVALS. */
3487 void
3488 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3489 struct ipa_agg_replacement_value *aggvals)
3491 if (vec_safe_length (ipa_node_agg_replacements)
3492 <= (unsigned) symtab->cgraph_max_uid)
3493 vec_safe_grow_cleared (ipa_node_agg_replacements,
3494 symtab->cgraph_max_uid + 1);
3496 (*ipa_node_agg_replacements)[node->uid] = aggvals;
3499 /* Hook that is called by cgraph.c when an edge is removed. */
3501 static void
3502 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3504 struct ipa_edge_args *args;
3506 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3507 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3508 return;
3510 args = IPA_EDGE_REF (cs);
3511 if (args->jump_functions)
3513 struct ipa_jump_func *jf;
3514 int i;
3515 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3517 struct ipa_cst_ref_desc *rdesc;
3518 try_decrement_rdesc_refcount (jf);
3519 if (jf->type == IPA_JF_CONST
3520 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3521 && rdesc->cs == cs)
3522 rdesc->cs = NULL;
3526 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3529 /* Hook that is called by cgraph.c when a node is removed. */
3531 static void
3532 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3534 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3535 if (ipa_node_params_vector.length () > (unsigned)node->uid)
3536 ipa_free_node_params_substructures (IPA_NODE_REF (node));
3537 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3538 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
3541 /* Hook that is called by cgraph.c when an edge is duplicated. */
3543 static void
3544 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3545 __attribute__((unused)) void *data)
3547 struct ipa_edge_args *old_args, *new_args;
3548 unsigned int i;
3550 ipa_check_create_edge_args ();
3552 old_args = IPA_EDGE_REF (src);
3553 new_args = IPA_EDGE_REF (dst);
3555 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3557 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3559 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3560 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3562 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3564 if (src_jf->type == IPA_JF_CONST)
3566 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3568 if (!src_rdesc)
3569 dst_jf->value.constant.rdesc = NULL;
3570 else if (src->caller == dst->caller)
3572 struct ipa_ref *ref;
3573 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3574 gcc_checking_assert (n);
3575 ref = src->caller->find_reference (n, src->call_stmt,
3576 src->lto_stmt_uid);
3577 gcc_checking_assert (ref);
3578 dst->caller->clone_reference (ref, ref->stmt);
3580 gcc_checking_assert (ipa_refdesc_pool);
3581 struct ipa_cst_ref_desc *dst_rdesc
3582 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3583 dst_rdesc->cs = dst;
3584 dst_rdesc->refcount = src_rdesc->refcount;
3585 dst_rdesc->next_duplicate = NULL;
3586 dst_jf->value.constant.rdesc = dst_rdesc;
3588 else if (src_rdesc->cs == src)
3590 struct ipa_cst_ref_desc *dst_rdesc;
3591 gcc_checking_assert (ipa_refdesc_pool);
3592 dst_rdesc
3593 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3594 dst_rdesc->cs = dst;
3595 dst_rdesc->refcount = src_rdesc->refcount;
3596 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3597 src_rdesc->next_duplicate = dst_rdesc;
3598 dst_jf->value.constant.rdesc = dst_rdesc;
3600 else
3602 struct ipa_cst_ref_desc *dst_rdesc;
3603 /* This can happen during inlining, when a JFUNC can refer to a
3604 reference taken in a function up in the tree of inline clones.
3605 We need to find the duplicate that refers to our tree of
3606 inline clones. */
3608 gcc_assert (dst->caller->global.inlined_to);
3609 for (dst_rdesc = src_rdesc->next_duplicate;
3610 dst_rdesc;
3611 dst_rdesc = dst_rdesc->next_duplicate)
3613 struct cgraph_node *top;
3614 top = dst_rdesc->cs->caller->global.inlined_to
3615 ? dst_rdesc->cs->caller->global.inlined_to
3616 : dst_rdesc->cs->caller;
3617 if (dst->caller->global.inlined_to == top)
3618 break;
3620 gcc_assert (dst_rdesc);
3621 dst_jf->value.constant.rdesc = dst_rdesc;
3624 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3625 && src->caller == dst->caller)
3627 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3628 ? dst->caller->global.inlined_to : dst->caller;
3629 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3630 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3632 int c = ipa_get_controlled_uses (root_info, idx);
3633 if (c != IPA_UNDESCRIBED_USE)
3635 c++;
3636 ipa_set_controlled_uses (root_info, idx, c);
3642 /* Hook that is called by cgraph.c when a node is duplicated. */
3644 static void
3645 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3646 ATTRIBUTE_UNUSED void *data)
3648 struct ipa_node_params *old_info, *new_info;
3649 struct ipa_agg_replacement_value *old_av, *new_av;
3651 ipa_check_create_node_params ();
3652 old_info = IPA_NODE_REF (src);
3653 new_info = IPA_NODE_REF (dst);
3655 new_info->descriptors = old_info->descriptors.copy ();
3656 new_info->lattices = NULL;
3657 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3659 new_info->analysis_done = old_info->analysis_done;
3660 new_info->node_enqueued = old_info->node_enqueued;
3662 old_av = ipa_get_agg_replacements_for_node (src);
3663 if (!old_av)
3664 return;
3666 new_av = NULL;
3667 while (old_av)
3669 struct ipa_agg_replacement_value *v;
3671 v = ggc_alloc<ipa_agg_replacement_value> ();
3672 memcpy (v, old_av, sizeof (*v));
3673 v->next = new_av;
3674 new_av = v;
3675 old_av = old_av->next;
3677 ipa_set_node_agg_value_chain (dst, new_av);
3681 /* Analyze newly added function into callgraph. */
3683 static void
3684 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3686 if (node->has_gimple_body_p ())
3687 ipa_analyze_node (node);
3690 /* Register our cgraph hooks if they are not already there. */
3692 void
3693 ipa_register_cgraph_hooks (void)
3695 if (!edge_removal_hook_holder)
3696 edge_removal_hook_holder =
3697 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3698 if (!node_removal_hook_holder)
3699 node_removal_hook_holder =
3700 symtab->add_cgraph_removal_hook (&ipa_node_removal_hook, NULL);
3701 if (!edge_duplication_hook_holder)
3702 edge_duplication_hook_holder =
3703 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3704 if (!node_duplication_hook_holder)
3705 node_duplication_hook_holder =
3706 symtab->add_cgraph_duplication_hook (&ipa_node_duplication_hook, NULL);
3707 function_insertion_hook_holder =
3708 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3711 /* Unregister our cgraph hooks if they are not already there. */
3713 static void
3714 ipa_unregister_cgraph_hooks (void)
3716 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
3717 edge_removal_hook_holder = NULL;
3718 symtab->remove_cgraph_removal_hook (node_removal_hook_holder);
3719 node_removal_hook_holder = NULL;
3720 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
3721 edge_duplication_hook_holder = NULL;
3722 symtab->remove_cgraph_duplication_hook (node_duplication_hook_holder);
3723 node_duplication_hook_holder = NULL;
3724 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3725 function_insertion_hook_holder = NULL;
3728 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3729 longer needed after ipa-cp. */
3731 void
3732 ipa_free_all_structures_after_ipa_cp (void)
3734 if (!optimize)
3736 ipa_free_all_edge_args ();
3737 ipa_free_all_node_params ();
3738 free_alloc_pool (ipcp_sources_pool);
3739 free_alloc_pool (ipcp_values_pool);
3740 free_alloc_pool (ipcp_agg_lattice_pool);
3741 ipa_unregister_cgraph_hooks ();
3742 if (ipa_refdesc_pool)
3743 free_alloc_pool (ipa_refdesc_pool);
3747 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3748 longer needed after indirect inlining. */
3750 void
3751 ipa_free_all_structures_after_iinln (void)
3753 ipa_free_all_edge_args ();
3754 ipa_free_all_node_params ();
3755 ipa_unregister_cgraph_hooks ();
3756 if (ipcp_sources_pool)
3757 free_alloc_pool (ipcp_sources_pool);
3758 if (ipcp_values_pool)
3759 free_alloc_pool (ipcp_values_pool);
3760 if (ipcp_agg_lattice_pool)
3761 free_alloc_pool (ipcp_agg_lattice_pool);
3762 if (ipa_refdesc_pool)
3763 free_alloc_pool (ipa_refdesc_pool);
3766 /* Print ipa_tree_map data structures of all functions in the
3767 callgraph to F. */
3769 void
3770 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3772 int i, count;
3773 struct ipa_node_params *info;
3775 if (!node->definition)
3776 return;
3777 info = IPA_NODE_REF (node);
3778 fprintf (f, " function %s/%i parameter descriptors:\n",
3779 node->name (), node->order);
3780 count = ipa_get_param_count (info);
3781 for (i = 0; i < count; i++)
3783 int c;
3785 fprintf (f, " ");
3786 ipa_dump_param (f, info, i);
3787 if (ipa_is_param_used (info, i))
3788 fprintf (f, " used");
3789 c = ipa_get_controlled_uses (info, i);
3790 if (c == IPA_UNDESCRIBED_USE)
3791 fprintf (f, " undescribed_use");
3792 else
3793 fprintf (f, " controlled_uses=%i", c);
3794 fprintf (f, "\n");
3798 /* Print ipa_tree_map data structures of all functions in the
3799 callgraph to F. */
3801 void
3802 ipa_print_all_params (FILE * f)
3804 struct cgraph_node *node;
3806 fprintf (f, "\nFunction parameters:\n");
3807 FOR_EACH_FUNCTION (node)
3808 ipa_print_node_params (f, node);
3811 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3813 vec<tree>
3814 ipa_get_vector_of_formal_parms (tree fndecl)
3816 vec<tree> args;
3817 int count;
3818 tree parm;
3820 gcc_assert (!flag_wpa);
3821 count = count_formal_params (fndecl);
3822 args.create (count);
3823 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3824 args.quick_push (parm);
3826 return args;
3829 /* Return a heap allocated vector containing types of formal parameters of
3830 function type FNTYPE. */
3832 vec<tree>
3833 ipa_get_vector_of_formal_parm_types (tree fntype)
3835 vec<tree> types;
3836 int count = 0;
3837 tree t;
3839 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3840 count++;
3842 types.create (count);
3843 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3844 types.quick_push (TREE_VALUE (t));
3846 return types;
3849 /* Modify the function declaration FNDECL and its type according to the plan in
3850 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3851 to reflect the actual parameters being modified which are determined by the
3852 base_index field. */
3854 void
3855 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3857 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3858 tree orig_type = TREE_TYPE (fndecl);
3859 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3861 /* The following test is an ugly hack, some functions simply don't have any
3862 arguments in their type. This is probably a bug but well... */
3863 bool care_for_types = (old_arg_types != NULL_TREE);
3864 bool last_parm_void;
3865 vec<tree> otypes;
3866 if (care_for_types)
3868 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3869 == void_type_node);
3870 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3871 if (last_parm_void)
3872 gcc_assert (oparms.length () + 1 == otypes.length ());
3873 else
3874 gcc_assert (oparms.length () == otypes.length ());
3876 else
3878 last_parm_void = false;
3879 otypes.create (0);
3882 int len = adjustments.length ();
3883 tree *link = &DECL_ARGUMENTS (fndecl);
3884 tree new_arg_types = NULL;
3885 for (int i = 0; i < len; i++)
3887 struct ipa_parm_adjustment *adj;
3888 gcc_assert (link);
3890 adj = &adjustments[i];
3891 tree parm;
3892 if (adj->op == IPA_PARM_OP_NEW)
3893 parm = NULL;
3894 else
3895 parm = oparms[adj->base_index];
3896 adj->base = parm;
3898 if (adj->op == IPA_PARM_OP_COPY)
3900 if (care_for_types)
3901 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3902 new_arg_types);
3903 *link = parm;
3904 link = &DECL_CHAIN (parm);
3906 else if (adj->op != IPA_PARM_OP_REMOVE)
3908 tree new_parm;
3909 tree ptype;
3911 if (adj->by_ref)
3912 ptype = build_pointer_type (adj->type);
3913 else
3915 ptype = adj->type;
3916 if (is_gimple_reg_type (ptype))
3918 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3919 if (TYPE_ALIGN (ptype) < malign)
3920 ptype = build_aligned_type (ptype, malign);
3924 if (care_for_types)
3925 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3927 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3928 ptype);
3929 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3930 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3931 DECL_ARTIFICIAL (new_parm) = 1;
3932 DECL_ARG_TYPE (new_parm) = ptype;
3933 DECL_CONTEXT (new_parm) = fndecl;
3934 TREE_USED (new_parm) = 1;
3935 DECL_IGNORED_P (new_parm) = 1;
3936 layout_decl (new_parm, 0);
3938 if (adj->op == IPA_PARM_OP_NEW)
3939 adj->base = NULL;
3940 else
3941 adj->base = parm;
3942 adj->new_decl = new_parm;
3944 *link = new_parm;
3945 link = &DECL_CHAIN (new_parm);
3949 *link = NULL_TREE;
3951 tree new_reversed = NULL;
3952 if (care_for_types)
3954 new_reversed = nreverse (new_arg_types);
3955 if (last_parm_void)
3957 if (new_reversed)
3958 TREE_CHAIN (new_arg_types) = void_list_node;
3959 else
3960 new_reversed = void_list_node;
3964 /* Use copy_node to preserve as much as possible from original type
3965 (debug info, attribute lists etc.)
3966 Exception is METHOD_TYPEs must have THIS argument.
3967 When we are asked to remove it, we need to build new FUNCTION_TYPE
3968 instead. */
3969 tree new_type = NULL;
3970 if (TREE_CODE (orig_type) != METHOD_TYPE
3971 || (adjustments[0].op == IPA_PARM_OP_COPY
3972 && adjustments[0].base_index == 0))
3974 new_type = build_distinct_type_copy (orig_type);
3975 TYPE_ARG_TYPES (new_type) = new_reversed;
3977 else
3979 new_type
3980 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3981 new_reversed));
3982 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3983 DECL_VINDEX (fndecl) = NULL_TREE;
3986 /* When signature changes, we need to clear builtin info. */
3987 if (DECL_BUILT_IN (fndecl))
3989 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3990 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3993 /* This is a new type, not a copy of an old type. Need to reassociate
3994 variants. We can handle everything except the main variant lazily. */
3995 tree t = TYPE_MAIN_VARIANT (orig_type);
3996 if (orig_type != t)
3998 TYPE_MAIN_VARIANT (new_type) = t;
3999 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
4000 TYPE_NEXT_VARIANT (t) = new_type;
4002 else
4004 TYPE_MAIN_VARIANT (new_type) = new_type;
4005 TYPE_NEXT_VARIANT (new_type) = NULL;
4008 TREE_TYPE (fndecl) = new_type;
4009 DECL_VIRTUAL_P (fndecl) = 0;
4010 DECL_LANG_SPECIFIC (fndecl) = NULL;
4011 otypes.release ();
4012 oparms.release ();
4015 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
4016 If this is a directly recursive call, CS must be NULL. Otherwise it must
4017 contain the corresponding call graph edge. */
4019 void
4020 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
4021 ipa_parm_adjustment_vec adjustments)
4023 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
4024 vec<tree> vargs;
4025 vec<tree, va_gc> **debug_args = NULL;
4026 gimple new_stmt;
4027 gimple_stmt_iterator gsi, prev_gsi;
4028 tree callee_decl;
4029 int i, len;
4031 len = adjustments.length ();
4032 vargs.create (len);
4033 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
4034 current_node->remove_stmt_references (stmt);
4036 gsi = gsi_for_stmt (stmt);
4037 prev_gsi = gsi;
4038 gsi_prev (&prev_gsi);
4039 for (i = 0; i < len; i++)
4041 struct ipa_parm_adjustment *adj;
4043 adj = &adjustments[i];
4045 if (adj->op == IPA_PARM_OP_COPY)
4047 tree arg = gimple_call_arg (stmt, adj->base_index);
4049 vargs.quick_push (arg);
4051 else if (adj->op != IPA_PARM_OP_REMOVE)
4053 tree expr, base, off;
4054 location_t loc;
4055 unsigned int deref_align = 0;
4056 bool deref_base = false;
4058 /* We create a new parameter out of the value of the old one, we can
4059 do the following kind of transformations:
4061 - A scalar passed by reference is converted to a scalar passed by
4062 value. (adj->by_ref is false and the type of the original
4063 actual argument is a pointer to a scalar).
4065 - A part of an aggregate is passed instead of the whole aggregate.
4066 The part can be passed either by value or by reference, this is
4067 determined by value of adj->by_ref. Moreover, the code below
4068 handles both situations when the original aggregate is passed by
4069 value (its type is not a pointer) and when it is passed by
4070 reference (it is a pointer to an aggregate).
4072 When the new argument is passed by reference (adj->by_ref is true)
4073 it must be a part of an aggregate and therefore we form it by
4074 simply taking the address of a reference inside the original
4075 aggregate. */
4077 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
4078 base = gimple_call_arg (stmt, adj->base_index);
4079 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
4080 : EXPR_LOCATION (base);
4082 if (TREE_CODE (base) != ADDR_EXPR
4083 && POINTER_TYPE_P (TREE_TYPE (base)))
4084 off = build_int_cst (adj->alias_ptr_type,
4085 adj->offset / BITS_PER_UNIT);
4086 else
4088 HOST_WIDE_INT base_offset;
4089 tree prev_base;
4090 bool addrof;
4092 if (TREE_CODE (base) == ADDR_EXPR)
4094 base = TREE_OPERAND (base, 0);
4095 addrof = true;
4097 else
4098 addrof = false;
4099 prev_base = base;
4100 base = get_addr_base_and_unit_offset (base, &base_offset);
4101 /* Aggregate arguments can have non-invariant addresses. */
4102 if (!base)
4104 base = build_fold_addr_expr (prev_base);
4105 off = build_int_cst (adj->alias_ptr_type,
4106 adj->offset / BITS_PER_UNIT);
4108 else if (TREE_CODE (base) == MEM_REF)
4110 if (!addrof)
4112 deref_base = true;
4113 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4115 off = build_int_cst (adj->alias_ptr_type,
4116 base_offset
4117 + adj->offset / BITS_PER_UNIT);
4118 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
4119 off);
4120 base = TREE_OPERAND (base, 0);
4122 else
4124 off = build_int_cst (adj->alias_ptr_type,
4125 base_offset
4126 + adj->offset / BITS_PER_UNIT);
4127 base = build_fold_addr_expr (base);
4131 if (!adj->by_ref)
4133 tree type = adj->type;
4134 unsigned int align;
4135 unsigned HOST_WIDE_INT misalign;
4137 if (deref_base)
4139 align = deref_align;
4140 misalign = 0;
4142 else
4144 get_pointer_alignment_1 (base, &align, &misalign);
4145 if (TYPE_ALIGN (type) > align)
4146 align = TYPE_ALIGN (type);
4148 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
4149 * BITS_PER_UNIT);
4150 misalign = misalign & (align - 1);
4151 if (misalign != 0)
4152 align = (misalign & -misalign);
4153 if (align < TYPE_ALIGN (type))
4154 type = build_aligned_type (type, align);
4155 base = force_gimple_operand_gsi (&gsi, base,
4156 true, NULL, true, GSI_SAME_STMT);
4157 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4158 /* If expr is not a valid gimple call argument emit
4159 a load into a temporary. */
4160 if (is_gimple_reg_type (TREE_TYPE (expr)))
4162 gimple tem = gimple_build_assign (NULL_TREE, expr);
4163 if (gimple_in_ssa_p (cfun))
4165 gimple_set_vuse (tem, gimple_vuse (stmt));
4166 expr = make_ssa_name (TREE_TYPE (expr), tem);
4168 else
4169 expr = create_tmp_reg (TREE_TYPE (expr), NULL);
4170 gimple_assign_set_lhs (tem, expr);
4171 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4174 else
4176 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4177 expr = build_fold_addr_expr (expr);
4178 expr = force_gimple_operand_gsi (&gsi, expr,
4179 true, NULL, true, GSI_SAME_STMT);
4181 vargs.quick_push (expr);
4183 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4185 unsigned int ix;
4186 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4187 gimple def_temp;
4189 arg = gimple_call_arg (stmt, adj->base_index);
4190 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4192 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4193 continue;
4194 arg = fold_convert_loc (gimple_location (stmt),
4195 TREE_TYPE (origin), arg);
4197 if (debug_args == NULL)
4198 debug_args = decl_debug_args_insert (callee_decl);
4199 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4200 if (ddecl == origin)
4202 ddecl = (**debug_args)[ix + 1];
4203 break;
4205 if (ddecl == NULL)
4207 ddecl = make_node (DEBUG_EXPR_DECL);
4208 DECL_ARTIFICIAL (ddecl) = 1;
4209 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4210 DECL_MODE (ddecl) = DECL_MODE (origin);
4212 vec_safe_push (*debug_args, origin);
4213 vec_safe_push (*debug_args, ddecl);
4215 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4216 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4220 if (dump_file && (dump_flags & TDF_DETAILS))
4222 fprintf (dump_file, "replacing stmt:");
4223 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4226 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4227 vargs.release ();
4228 if (gimple_call_lhs (stmt))
4229 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4231 gimple_set_block (new_stmt, gimple_block (stmt));
4232 if (gimple_has_location (stmt))
4233 gimple_set_location (new_stmt, gimple_location (stmt));
4234 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4235 gimple_call_copy_flags (new_stmt, stmt);
4236 if (gimple_in_ssa_p (cfun))
4238 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4239 if (gimple_vdef (stmt))
4241 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4242 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4246 if (dump_file && (dump_flags & TDF_DETAILS))
4248 fprintf (dump_file, "with stmt:");
4249 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4250 fprintf (dump_file, "\n");
4252 gsi_replace (&gsi, new_stmt, true);
4253 if (cs)
4254 cs->set_call_stmt (new_stmt);
4257 current_node->record_stmt_references (gsi_stmt (gsi));
4258 gsi_prev (&gsi);
4260 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4263 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4264 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4265 specifies whether the function should care about type incompatibility the
4266 current and new expressions. If it is false, the function will leave
4267 incompatibility issues to the caller. Return true iff the expression
4268 was modified. */
4270 bool
4271 ipa_modify_expr (tree *expr, bool convert,
4272 ipa_parm_adjustment_vec adjustments)
4274 struct ipa_parm_adjustment *cand
4275 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4276 if (!cand)
4277 return false;
4279 tree src;
4280 if (cand->by_ref)
4281 src = build_simple_mem_ref (cand->new_decl);
4282 else
4283 src = cand->new_decl;
4285 if (dump_file && (dump_flags & TDF_DETAILS))
4287 fprintf (dump_file, "About to replace expr ");
4288 print_generic_expr (dump_file, *expr, 0);
4289 fprintf (dump_file, " with ");
4290 print_generic_expr (dump_file, src, 0);
4291 fprintf (dump_file, "\n");
4294 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4296 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4297 *expr = vce;
4299 else
4300 *expr = src;
4301 return true;
4304 /* If T is an SSA_NAME, return NULL if it is not a default def or
4305 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4306 the base variable is always returned, regardless if it is a default
4307 def. Return T if it is not an SSA_NAME. */
4309 static tree
4310 get_ssa_base_param (tree t, bool ignore_default_def)
4312 if (TREE_CODE (t) == SSA_NAME)
4314 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4315 return SSA_NAME_VAR (t);
4316 else
4317 return NULL_TREE;
4319 return t;
4322 /* Given an expression, return an adjustment entry specifying the
4323 transformation to be done on EXPR. If no suitable adjustment entry
4324 was found, returns NULL.
4326 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4327 default def, otherwise bail on them.
4329 If CONVERT is non-NULL, this function will set *CONVERT if the
4330 expression provided is a component reference. ADJUSTMENTS is the
4331 adjustments vector. */
4333 ipa_parm_adjustment *
4334 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4335 ipa_parm_adjustment_vec adjustments,
4336 bool ignore_default_def)
4338 if (TREE_CODE (**expr) == BIT_FIELD_REF
4339 || TREE_CODE (**expr) == IMAGPART_EXPR
4340 || TREE_CODE (**expr) == REALPART_EXPR)
4342 *expr = &TREE_OPERAND (**expr, 0);
4343 if (convert)
4344 *convert = true;
4347 HOST_WIDE_INT offset, size, max_size;
4348 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
4349 if (!base || size == -1 || max_size == -1)
4350 return NULL;
4352 if (TREE_CODE (base) == MEM_REF)
4354 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4355 base = TREE_OPERAND (base, 0);
4358 base = get_ssa_base_param (base, ignore_default_def);
4359 if (!base || TREE_CODE (base) != PARM_DECL)
4360 return NULL;
4362 struct ipa_parm_adjustment *cand = NULL;
4363 unsigned int len = adjustments.length ();
4364 for (unsigned i = 0; i < len; i++)
4366 struct ipa_parm_adjustment *adj = &adjustments[i];
4368 if (adj->base == base
4369 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4371 cand = adj;
4372 break;
4376 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4377 return NULL;
4378 return cand;
4381 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4383 static bool
4384 index_in_adjustments_multiple_times_p (int base_index,
4385 ipa_parm_adjustment_vec adjustments)
4387 int i, len = adjustments.length ();
4388 bool one = false;
4390 for (i = 0; i < len; i++)
4392 struct ipa_parm_adjustment *adj;
4393 adj = &adjustments[i];
4395 if (adj->base_index == base_index)
4397 if (one)
4398 return true;
4399 else
4400 one = true;
4403 return false;
4407 /* Return adjustments that should have the same effect on function parameters
4408 and call arguments as if they were first changed according to adjustments in
4409 INNER and then by adjustments in OUTER. */
4411 ipa_parm_adjustment_vec
4412 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4413 ipa_parm_adjustment_vec outer)
4415 int i, outlen = outer.length ();
4416 int inlen = inner.length ();
4417 int removals = 0;
4418 ipa_parm_adjustment_vec adjustments, tmp;
4420 tmp.create (inlen);
4421 for (i = 0; i < inlen; i++)
4423 struct ipa_parm_adjustment *n;
4424 n = &inner[i];
4426 if (n->op == IPA_PARM_OP_REMOVE)
4427 removals++;
4428 else
4430 /* FIXME: Handling of new arguments are not implemented yet. */
4431 gcc_assert (n->op != IPA_PARM_OP_NEW);
4432 tmp.quick_push (*n);
4436 adjustments.create (outlen + removals);
4437 for (i = 0; i < outlen; i++)
4439 struct ipa_parm_adjustment r;
4440 struct ipa_parm_adjustment *out = &outer[i];
4441 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4443 memset (&r, 0, sizeof (r));
4444 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4445 if (out->op == IPA_PARM_OP_REMOVE)
4447 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4449 r.op = IPA_PARM_OP_REMOVE;
4450 adjustments.quick_push (r);
4452 continue;
4454 else
4456 /* FIXME: Handling of new arguments are not implemented yet. */
4457 gcc_assert (out->op != IPA_PARM_OP_NEW);
4460 r.base_index = in->base_index;
4461 r.type = out->type;
4463 /* FIXME: Create nonlocal value too. */
4465 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4466 r.op = IPA_PARM_OP_COPY;
4467 else if (in->op == IPA_PARM_OP_COPY)
4468 r.offset = out->offset;
4469 else if (out->op == IPA_PARM_OP_COPY)
4470 r.offset = in->offset;
4471 else
4472 r.offset = in->offset + out->offset;
4473 adjustments.quick_push (r);
4476 for (i = 0; i < inlen; i++)
4478 struct ipa_parm_adjustment *n = &inner[i];
4480 if (n->op == IPA_PARM_OP_REMOVE)
4481 adjustments.quick_push (*n);
4484 tmp.release ();
4485 return adjustments;
4488 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4489 friendly way, assuming they are meant to be applied to FNDECL. */
4491 void
4492 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4493 tree fndecl)
4495 int i, len = adjustments.length ();
4496 bool first = true;
4497 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4499 fprintf (file, "IPA param adjustments: ");
4500 for (i = 0; i < len; i++)
4502 struct ipa_parm_adjustment *adj;
4503 adj = &adjustments[i];
4505 if (!first)
4506 fprintf (file, " ");
4507 else
4508 first = false;
4510 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4511 print_generic_expr (file, parms[adj->base_index], 0);
4512 if (adj->base)
4514 fprintf (file, ", base: ");
4515 print_generic_expr (file, adj->base, 0);
4517 if (adj->new_decl)
4519 fprintf (file, ", new_decl: ");
4520 print_generic_expr (file, adj->new_decl, 0);
4522 if (adj->new_ssa_base)
4524 fprintf (file, ", new_ssa_base: ");
4525 print_generic_expr (file, adj->new_ssa_base, 0);
4528 if (adj->op == IPA_PARM_OP_COPY)
4529 fprintf (file, ", copy_param");
4530 else if (adj->op == IPA_PARM_OP_REMOVE)
4531 fprintf (file, ", remove_param");
4532 else
4533 fprintf (file, ", offset %li", (long) adj->offset);
4534 if (adj->by_ref)
4535 fprintf (file, ", by_ref");
4536 print_node_brief (file, ", type: ", adj->type, 0);
4537 fprintf (file, "\n");
4539 parms.release ();
4542 /* Dump the AV linked list. */
4544 void
4545 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4547 bool comma = false;
4548 fprintf (f, " Aggregate replacements:");
4549 for (; av; av = av->next)
4551 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4552 av->index, av->offset);
4553 print_generic_expr (f, av->value, 0);
4554 comma = true;
4556 fprintf (f, "\n");
4559 /* Stream out jump function JUMP_FUNC to OB. */
4561 static void
4562 ipa_write_jump_function (struct output_block *ob,
4563 struct ipa_jump_func *jump_func)
4565 struct ipa_agg_jf_item *item;
4566 struct bitpack_d bp;
4567 int i, count;
4569 streamer_write_uhwi (ob, jump_func->type);
4570 switch (jump_func->type)
4572 case IPA_JF_UNKNOWN:
4573 break;
4574 case IPA_JF_KNOWN_TYPE:
4575 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
4576 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
4577 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
4578 break;
4579 case IPA_JF_CONST:
4580 gcc_assert (
4581 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4582 stream_write_tree (ob, jump_func->value.constant.value, true);
4583 break;
4584 case IPA_JF_PASS_THROUGH:
4585 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4586 if (jump_func->value.pass_through.operation == NOP_EXPR)
4588 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4589 bp = bitpack_create (ob->main_stream);
4590 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4591 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
4592 streamer_write_bitpack (&bp);
4594 else
4596 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4597 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4599 break;
4600 case IPA_JF_ANCESTOR:
4601 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4602 stream_write_tree (ob, jump_func->value.ancestor.type, true);
4603 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4604 bp = bitpack_create (ob->main_stream);
4605 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4606 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
4607 streamer_write_bitpack (&bp);
4608 break;
4611 count = vec_safe_length (jump_func->agg.items);
4612 streamer_write_uhwi (ob, count);
4613 if (count)
4615 bp = bitpack_create (ob->main_stream);
4616 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4617 streamer_write_bitpack (&bp);
4620 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4622 streamer_write_uhwi (ob, item->offset);
4623 stream_write_tree (ob, item->value, true);
4627 /* Read in jump function JUMP_FUNC from IB. */
4629 static void
4630 ipa_read_jump_function (struct lto_input_block *ib,
4631 struct ipa_jump_func *jump_func,
4632 struct cgraph_edge *cs,
4633 struct data_in *data_in)
4635 enum jump_func_type jftype;
4636 enum tree_code operation;
4637 int i, count;
4639 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4640 switch (jftype)
4642 case IPA_JF_UNKNOWN:
4643 jump_func->type = IPA_JF_UNKNOWN;
4644 break;
4645 case IPA_JF_KNOWN_TYPE:
4647 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4648 tree base_type = stream_read_tree (ib, data_in);
4649 tree component_type = stream_read_tree (ib, data_in);
4651 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
4652 break;
4654 case IPA_JF_CONST:
4655 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4656 break;
4657 case IPA_JF_PASS_THROUGH:
4658 operation = (enum tree_code) streamer_read_uhwi (ib);
4659 if (operation == NOP_EXPR)
4661 int formal_id = streamer_read_uhwi (ib);
4662 struct bitpack_d bp = streamer_read_bitpack (ib);
4663 bool agg_preserved = bp_unpack_value (&bp, 1);
4664 bool type_preserved = bp_unpack_value (&bp, 1);
4665 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4666 type_preserved);
4668 else
4670 tree operand = stream_read_tree (ib, data_in);
4671 int formal_id = streamer_read_uhwi (ib);
4672 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4673 operation);
4675 break;
4676 case IPA_JF_ANCESTOR:
4678 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4679 tree type = stream_read_tree (ib, data_in);
4680 int formal_id = streamer_read_uhwi (ib);
4681 struct bitpack_d bp = streamer_read_bitpack (ib);
4682 bool agg_preserved = bp_unpack_value (&bp, 1);
4683 bool type_preserved = bp_unpack_value (&bp, 1);
4685 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4686 type_preserved);
4687 break;
4691 count = streamer_read_uhwi (ib);
4692 vec_alloc (jump_func->agg.items, count);
4693 if (count)
4695 struct bitpack_d bp = streamer_read_bitpack (ib);
4696 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4698 for (i = 0; i < count; i++)
4700 struct ipa_agg_jf_item item;
4701 item.offset = streamer_read_uhwi (ib);
4702 item.value = stream_read_tree (ib, data_in);
4703 jump_func->agg.items->quick_push (item);
4707 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4708 relevant to indirect inlining to OB. */
4710 static void
4711 ipa_write_indirect_edge_info (struct output_block *ob,
4712 struct cgraph_edge *cs)
4714 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4715 struct bitpack_d bp;
4717 streamer_write_hwi (ob, ii->param_index);
4718 bp = bitpack_create (ob->main_stream);
4719 bp_pack_value (&bp, ii->polymorphic, 1);
4720 bp_pack_value (&bp, ii->agg_contents, 1);
4721 bp_pack_value (&bp, ii->member_ptr, 1);
4722 bp_pack_value (&bp, ii->by_ref, 1);
4723 streamer_write_bitpack (&bp);
4724 if (ii->agg_contents || ii->polymorphic)
4725 streamer_write_hwi (ob, ii->offset);
4726 else
4727 gcc_assert (ii->offset == 0);
4729 if (ii->polymorphic)
4731 streamer_write_hwi (ob, ii->otr_token);
4732 stream_write_tree (ob, ii->otr_type, true);
4733 ii->context.stream_out (ob);
4737 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4738 relevant to indirect inlining from IB. */
4740 static void
4741 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4742 struct data_in *data_in,
4743 struct cgraph_edge *cs)
4745 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4746 struct bitpack_d bp;
4748 ii->param_index = (int) streamer_read_hwi (ib);
4749 bp = streamer_read_bitpack (ib);
4750 ii->polymorphic = bp_unpack_value (&bp, 1);
4751 ii->agg_contents = bp_unpack_value (&bp, 1);
4752 ii->member_ptr = bp_unpack_value (&bp, 1);
4753 ii->by_ref = bp_unpack_value (&bp, 1);
4754 if (ii->agg_contents || ii->polymorphic)
4755 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4756 else
4757 ii->offset = 0;
4758 if (ii->polymorphic)
4760 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4761 ii->otr_type = stream_read_tree (ib, data_in);
4762 ii->context.stream_in (ib, data_in);
4766 /* Stream out NODE info to OB. */
4768 static void
4769 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4771 int node_ref;
4772 lto_symtab_encoder_t encoder;
4773 struct ipa_node_params *info = IPA_NODE_REF (node);
4774 int j;
4775 struct cgraph_edge *e;
4776 struct bitpack_d bp;
4778 encoder = ob->decl_state->symtab_node_encoder;
4779 node_ref = lto_symtab_encoder_encode (encoder, node);
4780 streamer_write_uhwi (ob, node_ref);
4782 streamer_write_uhwi (ob, ipa_get_param_count (info));
4783 for (j = 0; j < ipa_get_param_count (info); j++)
4784 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4785 bp = bitpack_create (ob->main_stream);
4786 gcc_assert (info->analysis_done
4787 || ipa_get_param_count (info) == 0);
4788 gcc_assert (!info->node_enqueued);
4789 gcc_assert (!info->ipcp_orig_node);
4790 for (j = 0; j < ipa_get_param_count (info); j++)
4791 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4792 streamer_write_bitpack (&bp);
4793 for (j = 0; j < ipa_get_param_count (info); j++)
4794 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4795 for (e = node->callees; e; e = e->next_callee)
4797 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4799 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4800 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4801 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4803 for (e = node->indirect_calls; e; e = e->next_callee)
4805 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4807 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4808 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4809 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4810 ipa_write_indirect_edge_info (ob, e);
4814 /* Stream in NODE info from IB. */
4816 static void
4817 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4818 struct data_in *data_in)
4820 struct ipa_node_params *info = IPA_NODE_REF (node);
4821 int k;
4822 struct cgraph_edge *e;
4823 struct bitpack_d bp;
4825 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4827 for (k = 0; k < ipa_get_param_count (info); k++)
4828 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4830 bp = streamer_read_bitpack (ib);
4831 if (ipa_get_param_count (info) != 0)
4832 info->analysis_done = true;
4833 info->node_enqueued = false;
4834 for (k = 0; k < ipa_get_param_count (info); k++)
4835 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4836 for (k = 0; k < ipa_get_param_count (info); k++)
4837 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4838 for (e = node->callees; e; e = e->next_callee)
4840 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4841 int count = streamer_read_uhwi (ib);
4843 if (!count)
4844 continue;
4845 vec_safe_grow_cleared (args->jump_functions, count);
4847 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4848 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4849 data_in);
4851 for (e = node->indirect_calls; e; e = e->next_callee)
4853 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4854 int count = streamer_read_uhwi (ib);
4856 if (count)
4858 vec_safe_grow_cleared (args->jump_functions, count);
4859 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4860 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4861 data_in);
4863 ipa_read_indirect_edge_info (ib, data_in, e);
4867 /* Write jump functions for nodes in SET. */
4869 void
4870 ipa_prop_write_jump_functions (void)
4872 struct cgraph_node *node;
4873 struct output_block *ob;
4874 unsigned int count = 0;
4875 lto_symtab_encoder_iterator lsei;
4876 lto_symtab_encoder_t encoder;
4879 if (!ipa_node_params_vector.exists ())
4880 return;
4882 ob = create_output_block (LTO_section_jump_functions);
4883 encoder = ob->decl_state->symtab_node_encoder;
4884 ob->symbol = NULL;
4885 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4886 lsei_next_function_in_partition (&lsei))
4888 node = lsei_cgraph_node (lsei);
4889 if (node->has_gimple_body_p ()
4890 && IPA_NODE_REF (node) != NULL)
4891 count++;
4894 streamer_write_uhwi (ob, count);
4896 /* Process all of the functions. */
4897 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4898 lsei_next_function_in_partition (&lsei))
4900 node = lsei_cgraph_node (lsei);
4901 if (node->has_gimple_body_p ()
4902 && IPA_NODE_REF (node) != NULL)
4903 ipa_write_node_info (ob, node);
4905 streamer_write_char_stream (ob->main_stream, 0);
4906 produce_asm (ob, NULL);
4907 destroy_output_block (ob);
4910 /* Read section in file FILE_DATA of length LEN with data DATA. */
4912 static void
4913 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4914 size_t len)
4916 const struct lto_function_header *header =
4917 (const struct lto_function_header *) data;
4918 const int cfg_offset = sizeof (struct lto_function_header);
4919 const int main_offset = cfg_offset + header->cfg_size;
4920 const int string_offset = main_offset + header->main_size;
4921 struct data_in *data_in;
4922 unsigned int i;
4923 unsigned int count;
4925 lto_input_block ib_main ((const char *) data + main_offset,
4926 header->main_size);
4928 data_in =
4929 lto_data_in_create (file_data, (const char *) data + string_offset,
4930 header->string_size, vNULL);
4931 count = streamer_read_uhwi (&ib_main);
4933 for (i = 0; i < count; i++)
4935 unsigned int index;
4936 struct cgraph_node *node;
4937 lto_symtab_encoder_t encoder;
4939 index = streamer_read_uhwi (&ib_main);
4940 encoder = file_data->symtab_node_encoder;
4941 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4942 index));
4943 gcc_assert (node->definition);
4944 ipa_read_node_info (&ib_main, node, data_in);
4946 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4947 len);
4948 lto_data_in_delete (data_in);
4951 /* Read ipcp jump functions. */
4953 void
4954 ipa_prop_read_jump_functions (void)
4956 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4957 struct lto_file_decl_data *file_data;
4958 unsigned int j = 0;
4960 ipa_check_create_node_params ();
4961 ipa_check_create_edge_args ();
4962 ipa_register_cgraph_hooks ();
4964 while ((file_data = file_data_vec[j++]))
4966 size_t len;
4967 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4969 if (data)
4970 ipa_prop_read_section (file_data, data, len);
4974 /* After merging units, we can get mismatch in argument counts.
4975 Also decl merging might've rendered parameter lists obsolete.
4976 Also compute called_with_variable_arg info. */
4978 void
4979 ipa_update_after_lto_read (void)
4981 ipa_check_create_node_params ();
4982 ipa_check_create_edge_args ();
4985 void
4986 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4988 int node_ref;
4989 unsigned int count = 0;
4990 lto_symtab_encoder_t encoder;
4991 struct ipa_agg_replacement_value *aggvals, *av;
4993 aggvals = ipa_get_agg_replacements_for_node (node);
4994 encoder = ob->decl_state->symtab_node_encoder;
4995 node_ref = lto_symtab_encoder_encode (encoder, node);
4996 streamer_write_uhwi (ob, node_ref);
4998 for (av = aggvals; av; av = av->next)
4999 count++;
5000 streamer_write_uhwi (ob, count);
5002 for (av = aggvals; av; av = av->next)
5004 struct bitpack_d bp;
5006 streamer_write_uhwi (ob, av->offset);
5007 streamer_write_uhwi (ob, av->index);
5008 stream_write_tree (ob, av->value, true);
5010 bp = bitpack_create (ob->main_stream);
5011 bp_pack_value (&bp, av->by_ref, 1);
5012 streamer_write_bitpack (&bp);
5016 /* Stream in the aggregate value replacement chain for NODE from IB. */
5018 static void
5019 read_agg_replacement_chain (struct lto_input_block *ib,
5020 struct cgraph_node *node,
5021 struct data_in *data_in)
5023 struct ipa_agg_replacement_value *aggvals = NULL;
5024 unsigned int count, i;
5026 count = streamer_read_uhwi (ib);
5027 for (i = 0; i <count; i++)
5029 struct ipa_agg_replacement_value *av;
5030 struct bitpack_d bp;
5032 av = ggc_alloc<ipa_agg_replacement_value> ();
5033 av->offset = streamer_read_uhwi (ib);
5034 av->index = streamer_read_uhwi (ib);
5035 av->value = stream_read_tree (ib, data_in);
5036 bp = streamer_read_bitpack (ib);
5037 av->by_ref = bp_unpack_value (&bp, 1);
5038 av->next = aggvals;
5039 aggvals = av;
5041 ipa_set_node_agg_value_chain (node, aggvals);
5044 /* Write all aggregate replacement for nodes in set. */
5046 void
5047 ipa_prop_write_all_agg_replacement (void)
5049 struct cgraph_node *node;
5050 struct output_block *ob;
5051 unsigned int count = 0;
5052 lto_symtab_encoder_iterator lsei;
5053 lto_symtab_encoder_t encoder;
5055 if (!ipa_node_agg_replacements)
5056 return;
5058 ob = create_output_block (LTO_section_ipcp_transform);
5059 encoder = ob->decl_state->symtab_node_encoder;
5060 ob->symbol = NULL;
5061 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5062 lsei_next_function_in_partition (&lsei))
5064 node = lsei_cgraph_node (lsei);
5065 if (node->has_gimple_body_p ()
5066 && ipa_get_agg_replacements_for_node (node) != NULL)
5067 count++;
5070 streamer_write_uhwi (ob, count);
5072 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5073 lsei_next_function_in_partition (&lsei))
5075 node = lsei_cgraph_node (lsei);
5076 if (node->has_gimple_body_p ()
5077 && ipa_get_agg_replacements_for_node (node) != NULL)
5078 write_agg_replacement_chain (ob, node);
5080 streamer_write_char_stream (ob->main_stream, 0);
5081 produce_asm (ob, NULL);
5082 destroy_output_block (ob);
5085 /* Read replacements section in file FILE_DATA of length LEN with data
5086 DATA. */
5088 static void
5089 read_replacements_section (struct lto_file_decl_data *file_data,
5090 const char *data,
5091 size_t len)
5093 const struct lto_function_header *header =
5094 (const struct lto_function_header *) data;
5095 const int cfg_offset = sizeof (struct lto_function_header);
5096 const int main_offset = cfg_offset + header->cfg_size;
5097 const int string_offset = main_offset + header->main_size;
5098 struct data_in *data_in;
5099 unsigned int i;
5100 unsigned int count;
5102 lto_input_block ib_main ((const char *) data + main_offset,
5103 header->main_size);
5105 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5106 header->string_size, vNULL);
5107 count = streamer_read_uhwi (&ib_main);
5109 for (i = 0; i < count; i++)
5111 unsigned int index;
5112 struct cgraph_node *node;
5113 lto_symtab_encoder_t encoder;
5115 index = streamer_read_uhwi (&ib_main);
5116 encoder = file_data->symtab_node_encoder;
5117 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5118 index));
5119 gcc_assert (node->definition);
5120 read_agg_replacement_chain (&ib_main, node, data_in);
5122 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5123 len);
5124 lto_data_in_delete (data_in);
5127 /* Read IPA-CP aggregate replacements. */
5129 void
5130 ipa_prop_read_all_agg_replacement (void)
5132 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5133 struct lto_file_decl_data *file_data;
5134 unsigned int j = 0;
5136 while ((file_data = file_data_vec[j++]))
5138 size_t len;
5139 const char *data = lto_get_section_data (file_data,
5140 LTO_section_ipcp_transform,
5141 NULL, &len);
5142 if (data)
5143 read_replacements_section (file_data, data, len);
5147 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5148 NODE. */
5150 static void
5151 adjust_agg_replacement_values (struct cgraph_node *node,
5152 struct ipa_agg_replacement_value *aggval)
5154 struct ipa_agg_replacement_value *v;
5155 int i, c = 0, d = 0, *adj;
5157 if (!node->clone.combined_args_to_skip)
5158 return;
5160 for (v = aggval; v; v = v->next)
5162 gcc_assert (v->index >= 0);
5163 if (c < v->index)
5164 c = v->index;
5166 c++;
5168 adj = XALLOCAVEC (int, c);
5169 for (i = 0; i < c; i++)
5170 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5172 adj[i] = -1;
5173 d++;
5175 else
5176 adj[i] = i - d;
5178 for (v = aggval; v; v = v->next)
5179 v->index = adj[v->index];
5182 /* Dominator walker driving the ipcp modification phase. */
5184 class ipcp_modif_dom_walker : public dom_walker
5186 public:
5187 ipcp_modif_dom_walker (struct func_body_info *fbi,
5188 vec<ipa_param_descriptor> descs,
5189 struct ipa_agg_replacement_value *av,
5190 bool *sc, bool *cc)
5191 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5192 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5194 virtual void before_dom_children (basic_block);
5196 private:
5197 struct func_body_info *m_fbi;
5198 vec<ipa_param_descriptor> m_descriptors;
5199 struct ipa_agg_replacement_value *m_aggval;
5200 bool *m_something_changed, *m_cfg_changed;
5203 void
5204 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5206 gimple_stmt_iterator gsi;
5207 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5209 struct ipa_agg_replacement_value *v;
5210 gimple stmt = gsi_stmt (gsi);
5211 tree rhs, val, t;
5212 HOST_WIDE_INT offset, size;
5213 int index;
5214 bool by_ref, vce;
5216 if (!gimple_assign_load_p (stmt))
5217 continue;
5218 rhs = gimple_assign_rhs1 (stmt);
5219 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5220 continue;
5222 vce = false;
5223 t = rhs;
5224 while (handled_component_p (t))
5226 /* V_C_E can do things like convert an array of integers to one
5227 bigger integer and similar things we do not handle below. */
5228 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5230 vce = true;
5231 break;
5233 t = TREE_OPERAND (t, 0);
5235 if (vce)
5236 continue;
5238 if (!ipa_load_from_parm_agg_1 (m_fbi, m_descriptors, stmt, rhs, &index,
5239 &offset, &size, &by_ref))
5240 continue;
5241 for (v = m_aggval; v; v = v->next)
5242 if (v->index == index
5243 && v->offset == offset)
5244 break;
5245 if (!v
5246 || v->by_ref != by_ref
5247 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5248 continue;
5250 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5251 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5253 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5254 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5255 else if (TYPE_SIZE (TREE_TYPE (rhs))
5256 == TYPE_SIZE (TREE_TYPE (v->value)))
5257 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5258 else
5260 if (dump_file)
5262 fprintf (dump_file, " const ");
5263 print_generic_expr (dump_file, v->value, 0);
5264 fprintf (dump_file, " can't be converted to type of ");
5265 print_generic_expr (dump_file, rhs, 0);
5266 fprintf (dump_file, "\n");
5268 continue;
5271 else
5272 val = v->value;
5274 if (dump_file && (dump_flags & TDF_DETAILS))
5276 fprintf (dump_file, "Modifying stmt:\n ");
5277 print_gimple_stmt (dump_file, stmt, 0, 0);
5279 gimple_assign_set_rhs_from_tree (&gsi, val);
5280 update_stmt (stmt);
5282 if (dump_file && (dump_flags & TDF_DETAILS))
5284 fprintf (dump_file, "into:\n ");
5285 print_gimple_stmt (dump_file, stmt, 0, 0);
5286 fprintf (dump_file, "\n");
5289 *m_something_changed = true;
5290 if (maybe_clean_eh_stmt (stmt)
5291 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5292 *m_cfg_changed = true;
5297 /* IPCP transformation phase doing propagation of aggregate values. */
5299 unsigned int
5300 ipcp_transform_function (struct cgraph_node *node)
5302 vec<ipa_param_descriptor> descriptors = vNULL;
5303 struct func_body_info fbi;
5304 struct ipa_agg_replacement_value *aggval;
5305 int param_count;
5306 bool cfg_changed = false, something_changed = false;
5308 gcc_checking_assert (cfun);
5309 gcc_checking_assert (current_function_decl);
5311 if (dump_file)
5312 fprintf (dump_file, "Modification phase of node %s/%i\n",
5313 node->name (), node->order);
5315 aggval = ipa_get_agg_replacements_for_node (node);
5316 if (!aggval)
5317 return 0;
5318 param_count = count_formal_params (node->decl);
5319 if (param_count == 0)
5320 return 0;
5321 adjust_agg_replacement_values (node, aggval);
5322 if (dump_file)
5323 ipa_dump_agg_replacement_values (dump_file, aggval);
5325 fbi.node = node;
5326 fbi.info = NULL;
5327 fbi.bb_infos = vNULL;
5328 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5329 fbi.param_count = param_count;
5330 fbi.aa_walked = 0;
5332 descriptors.safe_grow_cleared (param_count);
5333 ipa_populate_param_decls (node, descriptors);
5334 calculate_dominance_info (CDI_DOMINATORS);
5335 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5336 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5338 int i;
5339 struct ipa_bb_info *bi;
5340 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5341 free_ipa_bb_info (bi);
5342 fbi.bb_infos.release ();
5343 free_dominance_info (CDI_DOMINATORS);
5344 (*ipa_node_agg_replacements)[node->uid] = NULL;
5345 descriptors.release ();
5347 if (!something_changed)
5348 return 0;
5349 else if (cfg_changed)
5350 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5351 else
5352 return TODO_update_ssa_only_virtuals;