2014-08-13 Andrew Sutton <andrew.n.sutton@gmail.com>
[official-gcc.git] / gcc / ipa-prop.c
blob4b309b97978fc4c1683398e21d828c18212493a3
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "basic-block.h"
25 #include "tree-ssa-alias.h"
26 #include "internal-fn.h"
27 #include "gimple-fold.h"
28 #include "tree-eh.h"
29 #include "gimple-expr.h"
30 #include "is-a.h"
31 #include "gimple.h"
32 #include "expr.h"
33 #include "stor-layout.h"
34 #include "print-tree.h"
35 #include "gimplify.h"
36 #include "gimple-iterator.h"
37 #include "gimplify-me.h"
38 #include "gimple-walk.h"
39 #include "langhooks.h"
40 #include "target.h"
41 #include "ipa-prop.h"
42 #include "bitmap.h"
43 #include "gimple-ssa.h"
44 #include "tree-cfg.h"
45 #include "tree-phinodes.h"
46 #include "ssa-iterators.h"
47 #include "tree-into-ssa.h"
48 #include "tree-dfa.h"
49 #include "tree-pass.h"
50 #include "tree-inline.h"
51 #include "ipa-inline.h"
52 #include "flags.h"
53 #include "diagnostic.h"
54 #include "gimple-pretty-print.h"
55 #include "lto-streamer.h"
56 #include "data-streamer.h"
57 #include "tree-streamer.h"
58 #include "params.h"
59 #include "ipa-utils.h"
60 #include "stringpool.h"
61 #include "tree-ssanames.h"
62 #include "dbgcnt.h"
63 #include "domwalk.h"
64 #include "builtins.h"
65 #include "calls.h"
67 /* Intermediate information that we get from alias analysis about a particular
68 parameter in a particular basic_block. When a parameter or the memory it
69 references is marked modified, we use that information in all dominatd
70 blocks without cosulting alias analysis oracle. */
72 struct param_aa_status
74 /* Set when this structure contains meaningful information. If not, the
75 structure describing a dominating BB should be used instead. */
76 bool valid;
78 /* Whether we have seen something which might have modified the data in
79 question. PARM is for the parameter itself, REF is for data it points to
80 but using the alias type of individual accesses and PT is the same thing
81 but for computing aggregate pass-through functions using a very inclusive
82 ao_ref. */
83 bool parm_modified, ref_modified, pt_modified;
86 /* Information related to a given BB that used only when looking at function
87 body. */
89 struct ipa_bb_info
91 /* Call graph edges going out of this BB. */
92 vec<cgraph_edge *> cg_edges;
93 /* Alias analysis statuses of each formal parameter at this bb. */
94 vec<param_aa_status> param_aa_statuses;
97 /* Structure with global information that is only used when looking at function
98 body. */
100 struct func_body_info
102 /* The node that is being analyzed. */
103 cgraph_node *node;
105 /* Its info. */
106 struct ipa_node_params *info;
108 /* Information about individual BBs. */
109 vec<ipa_bb_info> bb_infos;
111 /* Number of parameters. */
112 int param_count;
114 /* Number of statements already walked by when analyzing this function. */
115 unsigned int aa_walked;
118 /* Vector where the parameter infos are actually stored. */
119 vec<ipa_node_params> ipa_node_params_vector;
120 /* Vector of known aggregate values in cloned nodes. */
121 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
122 /* Vector where the parameter infos are actually stored. */
123 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
125 /* Holders of ipa cgraph hooks: */
126 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
127 static struct cgraph_node_hook_list *node_removal_hook_holder;
128 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
129 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
130 static struct cgraph_node_hook_list *function_insertion_hook_holder;
132 /* Description of a reference to an IPA constant. */
133 struct ipa_cst_ref_desc
135 /* Edge that corresponds to the statement which took the reference. */
136 struct cgraph_edge *cs;
137 /* Linked list of duplicates created when call graph edges are cloned. */
138 struct ipa_cst_ref_desc *next_duplicate;
139 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
140 if out of control. */
141 int refcount;
144 /* Allocation pool for reference descriptions. */
146 static alloc_pool ipa_refdesc_pool;
148 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
149 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
151 static bool
152 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
154 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
155 struct cl_optimization *os;
157 if (!fs_opts)
158 return false;
159 os = TREE_OPTIMIZATION (fs_opts);
160 return !os->x_optimize || !os->x_flag_ipa_cp;
163 /* Return index of the formal whose tree is PTREE in function which corresponds
164 to INFO. */
166 static int
167 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
169 int i, count;
171 count = descriptors.length ();
172 for (i = 0; i < count; i++)
173 if (descriptors[i].decl == ptree)
174 return i;
176 return -1;
179 /* Return index of the formal whose tree is PTREE in function which corresponds
180 to INFO. */
183 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
185 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
188 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
189 NODE. */
191 static void
192 ipa_populate_param_decls (struct cgraph_node *node,
193 vec<ipa_param_descriptor> &descriptors)
195 tree fndecl;
196 tree fnargs;
197 tree parm;
198 int param_num;
200 fndecl = node->decl;
201 gcc_assert (gimple_has_body_p (fndecl));
202 fnargs = DECL_ARGUMENTS (fndecl);
203 param_num = 0;
204 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
206 descriptors[param_num].decl = parm;
207 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
208 true);
209 param_num++;
213 /* Return how many formal parameters FNDECL has. */
216 count_formal_params (tree fndecl)
218 tree parm;
219 int count = 0;
220 gcc_assert (gimple_has_body_p (fndecl));
222 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
223 count++;
225 return count;
228 /* Return the declaration of Ith formal parameter of the function corresponding
229 to INFO. Note there is no setter function as this array is built just once
230 using ipa_initialize_node_params. */
232 void
233 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
235 fprintf (file, "param #%i", i);
236 if (info->descriptors[i].decl)
238 fprintf (file, " ");
239 print_generic_expr (file, info->descriptors[i].decl, 0);
243 /* Initialize the ipa_node_params structure associated with NODE
244 to hold PARAM_COUNT parameters. */
246 void
247 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
249 struct ipa_node_params *info = IPA_NODE_REF (node);
251 if (!info->descriptors.exists () && param_count)
252 info->descriptors.safe_grow_cleared (param_count);
255 /* Initialize the ipa_node_params structure associated with NODE by counting
256 the function parameters, creating the descriptors and populating their
257 param_decls. */
259 void
260 ipa_initialize_node_params (struct cgraph_node *node)
262 struct ipa_node_params *info = IPA_NODE_REF (node);
264 if (!info->descriptors.exists ())
266 ipa_alloc_node_params (node, count_formal_params (node->decl));
267 ipa_populate_param_decls (node, info->descriptors);
271 /* Print the jump functions associated with call graph edge CS to file F. */
273 static void
274 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
276 int i, count;
278 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
279 for (i = 0; i < count; i++)
281 struct ipa_jump_func *jump_func;
282 enum jump_func_type type;
284 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
285 type = jump_func->type;
287 fprintf (f, " param %d: ", i);
288 if (type == IPA_JF_UNKNOWN)
289 fprintf (f, "UNKNOWN\n");
290 else if (type == IPA_JF_KNOWN_TYPE)
292 fprintf (f, "KNOWN TYPE: base ");
293 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
294 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
295 jump_func->value.known_type.offset);
296 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
297 fprintf (f, "\n");
299 else if (type == IPA_JF_CONST)
301 tree val = jump_func->value.constant.value;
302 fprintf (f, "CONST: ");
303 print_generic_expr (f, val, 0);
304 if (TREE_CODE (val) == ADDR_EXPR
305 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
307 fprintf (f, " -> ");
308 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
311 fprintf (f, "\n");
313 else if (type == IPA_JF_PASS_THROUGH)
315 fprintf (f, "PASS THROUGH: ");
316 fprintf (f, "%d, op %s",
317 jump_func->value.pass_through.formal_id,
318 get_tree_code_name(jump_func->value.pass_through.operation));
319 if (jump_func->value.pass_through.operation != NOP_EXPR)
321 fprintf (f, " ");
322 print_generic_expr (f,
323 jump_func->value.pass_through.operand, 0);
325 if (jump_func->value.pass_through.agg_preserved)
326 fprintf (f, ", agg_preserved");
327 if (jump_func->value.pass_through.type_preserved)
328 fprintf (f, ", type_preserved");
329 fprintf (f, "\n");
331 else if (type == IPA_JF_ANCESTOR)
333 fprintf (f, "ANCESTOR: ");
334 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
335 jump_func->value.ancestor.formal_id,
336 jump_func->value.ancestor.offset);
337 print_generic_expr (f, jump_func->value.ancestor.type, 0);
338 if (jump_func->value.ancestor.agg_preserved)
339 fprintf (f, ", agg_preserved");
340 if (jump_func->value.ancestor.type_preserved)
341 fprintf (f, ", type_preserved");
342 fprintf (f, "\n");
345 if (jump_func->agg.items)
347 struct ipa_agg_jf_item *item;
348 int j;
350 fprintf (f, " Aggregate passed by %s:\n",
351 jump_func->agg.by_ref ? "reference" : "value");
352 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
354 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
355 item->offset);
356 if (TYPE_P (item->value))
357 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
358 tree_to_uhwi (TYPE_SIZE (item->value)));
359 else
361 fprintf (f, "cst: ");
362 print_generic_expr (f, item->value, 0);
364 fprintf (f, "\n");
371 /* Print the jump functions of all arguments on all call graph edges going from
372 NODE to file F. */
374 void
375 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
377 struct cgraph_edge *cs;
379 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
380 node->order);
381 for (cs = node->callees; cs; cs = cs->next_callee)
383 if (!ipa_edge_args_info_available_for_edge_p (cs))
384 continue;
386 fprintf (f, " callsite %s/%i -> %s/%i : \n",
387 xstrdup (node->name ()), node->order,
388 xstrdup (cs->callee->name ()),
389 cs->callee->order);
390 ipa_print_node_jump_functions_for_edge (f, cs);
393 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
395 struct cgraph_indirect_call_info *ii;
396 if (!ipa_edge_args_info_available_for_edge_p (cs))
397 continue;
399 ii = cs->indirect_info;
400 if (ii->agg_contents)
401 fprintf (f, " indirect %s callsite, calling param %i, "
402 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
403 ii->member_ptr ? "member ptr" : "aggregate",
404 ii->param_index, ii->offset,
405 ii->by_ref ? "by reference" : "by_value");
406 else
407 fprintf (f, " indirect %s callsite, calling param %i, "
408 "offset " HOST_WIDE_INT_PRINT_DEC,
409 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
410 ii->offset);
412 if (cs->call_stmt)
414 fprintf (f, ", for stmt ");
415 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
417 else
418 fprintf (f, "\n");
419 ipa_print_node_jump_functions_for_edge (f, cs);
423 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
425 void
426 ipa_print_all_jump_functions (FILE *f)
428 struct cgraph_node *node;
430 fprintf (f, "\nJump functions:\n");
431 FOR_EACH_FUNCTION (node)
433 ipa_print_node_jump_functions (f, node);
437 /* Set JFUNC to be a known type jump function. */
439 static void
440 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
441 tree base_type, tree component_type)
443 /* Recording and propagating main variants increases change that types
444 will match. */
445 base_type = TYPE_MAIN_VARIANT (base_type);
446 component_type = TYPE_MAIN_VARIANT (component_type);
448 gcc_assert (contains_polymorphic_type_p (base_type)
449 && contains_polymorphic_type_p (component_type));
450 if (!flag_devirtualize)
451 return;
452 jfunc->type = IPA_JF_KNOWN_TYPE;
453 jfunc->value.known_type.offset = offset,
454 jfunc->value.known_type.base_type = base_type;
455 jfunc->value.known_type.component_type = component_type;
456 gcc_assert (component_type);
459 /* Set JFUNC to be a copy of another jmp (to be used by jump function
460 combination code). The two functions will share their rdesc. */
462 static void
463 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
464 struct ipa_jump_func *src)
467 gcc_checking_assert (src->type == IPA_JF_CONST);
468 dst->type = IPA_JF_CONST;
469 dst->value.constant = src->value.constant;
472 /* Set JFUNC to be a constant jmp function. */
474 static void
475 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
476 struct cgraph_edge *cs)
478 constant = unshare_expr (constant);
479 if (constant && EXPR_P (constant))
480 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
481 jfunc->type = IPA_JF_CONST;
482 jfunc->value.constant.value = unshare_expr_without_location (constant);
484 if (TREE_CODE (constant) == ADDR_EXPR
485 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
487 struct ipa_cst_ref_desc *rdesc;
488 if (!ipa_refdesc_pool)
489 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
490 sizeof (struct ipa_cst_ref_desc), 32);
492 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
493 rdesc->cs = cs;
494 rdesc->next_duplicate = NULL;
495 rdesc->refcount = 1;
496 jfunc->value.constant.rdesc = rdesc;
498 else
499 jfunc->value.constant.rdesc = NULL;
502 /* Set JFUNC to be a simple pass-through jump function. */
503 static void
504 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
505 bool agg_preserved, bool type_preserved)
507 jfunc->type = IPA_JF_PASS_THROUGH;
508 jfunc->value.pass_through.operand = NULL_TREE;
509 jfunc->value.pass_through.formal_id = formal_id;
510 jfunc->value.pass_through.operation = NOP_EXPR;
511 jfunc->value.pass_through.agg_preserved = agg_preserved;
512 jfunc->value.pass_through.type_preserved = type_preserved;
515 /* Set JFUNC to be an arithmetic pass through jump function. */
517 static void
518 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
519 tree operand, enum tree_code operation)
521 jfunc->type = IPA_JF_PASS_THROUGH;
522 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
523 jfunc->value.pass_through.formal_id = formal_id;
524 jfunc->value.pass_through.operation = operation;
525 jfunc->value.pass_through.agg_preserved = false;
526 jfunc->value.pass_through.type_preserved = false;
529 /* Set JFUNC to be an ancestor jump function. */
531 static void
532 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
533 tree type, int formal_id, bool agg_preserved,
534 bool type_preserved)
536 if (!flag_devirtualize)
537 type_preserved = false;
538 if (!type_preserved)
539 type = NULL_TREE;
540 if (type)
541 type = TYPE_MAIN_VARIANT (type);
542 gcc_assert (!type_preserved || contains_polymorphic_type_p (type));
543 jfunc->type = IPA_JF_ANCESTOR;
544 jfunc->value.ancestor.formal_id = formal_id;
545 jfunc->value.ancestor.offset = offset;
546 jfunc->value.ancestor.type = type_preserved ? type : NULL;
547 jfunc->value.ancestor.agg_preserved = agg_preserved;
548 jfunc->value.ancestor.type_preserved = type_preserved;
551 /* Extract the acual BINFO being described by JFUNC which must be a known type
552 jump function. */
554 tree
555 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
557 if (!RECORD_OR_UNION_TYPE_P (jfunc->value.known_type.base_type))
558 return NULL_TREE;
560 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
562 if (!base_binfo)
563 return NULL_TREE;
564 /* FIXME: At LTO we can't propagate to non-polymorphic type, because
565 we have no ODR equivalency on those. This should be fixed by
566 propagating on types rather than binfos that would make type
567 matching here unnecesary. */
568 if (in_lto_p
569 && (TREE_CODE (jfunc->value.known_type.component_type) != RECORD_TYPE
570 || !TYPE_BINFO (jfunc->value.known_type.component_type)
571 || !BINFO_VTABLE (TYPE_BINFO (jfunc->value.known_type.component_type))))
573 if (!jfunc->value.known_type.offset)
574 return base_binfo;
575 return NULL;
577 return get_binfo_at_offset (base_binfo,
578 jfunc->value.known_type.offset,
579 jfunc->value.known_type.component_type);
582 /* Get IPA BB information about the given BB. FBI is the context of analyzis
583 of this function body. */
585 static struct ipa_bb_info *
586 ipa_get_bb_info (struct func_body_info *fbi, basic_block bb)
588 gcc_checking_assert (fbi);
589 return &fbi->bb_infos[bb->index];
592 /* Structure to be passed in between detect_type_change and
593 check_stmt_for_type_change. */
595 struct type_change_info
597 /* Offset into the object where there is the virtual method pointer we are
598 looking for. */
599 HOST_WIDE_INT offset;
600 /* The declaration or SSA_NAME pointer of the base that we are checking for
601 type change. */
602 tree object;
603 /* If we actually can tell the type that the object has changed to, it is
604 stored in this field. Otherwise it remains NULL_TREE. */
605 tree known_current_type;
606 /* Set to true if dynamic type change has been detected. */
607 bool type_maybe_changed;
608 /* Set to true if multiple types have been encountered. known_current_type
609 must be disregarded in that case. */
610 bool multiple_types_encountered;
613 /* Return true if STMT can modify a virtual method table pointer.
615 This function makes special assumptions about both constructors and
616 destructors which are all the functions that are allowed to alter the VMT
617 pointers. It assumes that destructors begin with assignment into all VMT
618 pointers and that constructors essentially look in the following way:
620 1) The very first thing they do is that they call constructors of ancestor
621 sub-objects that have them.
623 2) Then VMT pointers of this and all its ancestors is set to new values
624 corresponding to the type corresponding to the constructor.
626 3) Only afterwards, other stuff such as constructor of member sub-objects
627 and the code written by the user is run. Only this may include calling
628 virtual functions, directly or indirectly.
630 There is no way to call a constructor of an ancestor sub-object in any
631 other way.
633 This means that we do not have to care whether constructors get the correct
634 type information because they will always change it (in fact, if we define
635 the type to be given by the VMT pointer, it is undefined).
637 The most important fact to derive from the above is that if, for some
638 statement in the section 3, we try to detect whether the dynamic type has
639 changed, we can safely ignore all calls as we examine the function body
640 backwards until we reach statements in section 2 because these calls cannot
641 be ancestor constructors or destructors (if the input is not bogus) and so
642 do not change the dynamic type (this holds true only for automatically
643 allocated objects but at the moment we devirtualize only these). We then
644 must detect that statements in section 2 change the dynamic type and can try
645 to derive the new type. That is enough and we can stop, we will never see
646 the calls into constructors of sub-objects in this code. Therefore we can
647 safely ignore all call statements that we traverse.
650 static bool
651 stmt_may_be_vtbl_ptr_store (gimple stmt)
653 if (is_gimple_call (stmt))
654 return false;
655 if (gimple_clobber_p (stmt))
656 return false;
657 else if (is_gimple_assign (stmt))
659 tree lhs = gimple_assign_lhs (stmt);
661 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
663 if (flag_strict_aliasing
664 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
665 return false;
667 if (TREE_CODE (lhs) == COMPONENT_REF
668 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
669 return false;
670 /* In the future we might want to use get_base_ref_and_offset to find
671 if there is a field corresponding to the offset and if so, proceed
672 almost like if it was a component ref. */
675 return true;
678 /* If STMT can be proved to be an assignment to the virtual method table
679 pointer of ANALYZED_OBJ and the type associated with the new table
680 identified, return the type. Otherwise return NULL_TREE. */
682 static tree
683 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
685 HOST_WIDE_INT offset, size, max_size;
686 tree lhs, rhs, base, binfo;
688 if (!gimple_assign_single_p (stmt))
689 return NULL_TREE;
691 lhs = gimple_assign_lhs (stmt);
692 rhs = gimple_assign_rhs1 (stmt);
693 if (TREE_CODE (lhs) != COMPONENT_REF
694 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
695 return NULL_TREE;
697 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
698 if (offset != tci->offset
699 || size != POINTER_SIZE
700 || max_size != POINTER_SIZE)
701 return NULL_TREE;
702 if (TREE_CODE (base) == MEM_REF)
704 if (TREE_CODE (tci->object) != MEM_REF
705 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
706 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
707 TREE_OPERAND (base, 1)))
708 return NULL_TREE;
710 else if (tci->object != base)
711 return NULL_TREE;
713 binfo = vtable_pointer_value_to_binfo (rhs);
715 /* FIXME: vtable_pointer_value_to_binfo may return BINFO of a
716 base of outer type. In this case we would need to either
717 work on binfos or translate it back to outer type and offset.
718 KNOWN_TYPE jump functions are not ready for that, yet. */
719 if (!binfo || TYPE_BINFO (BINFO_TYPE (binfo)) != binfo)
720 return NULL;
722 return BINFO_TYPE (binfo);
725 /* Callback of walk_aliased_vdefs and a helper function for
726 detect_type_change to check whether a particular statement may modify
727 the virtual table pointer, and if possible also determine the new type of
728 the (sub-)object. It stores its result into DATA, which points to a
729 type_change_info structure. */
731 static bool
732 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
734 gimple stmt = SSA_NAME_DEF_STMT (vdef);
735 struct type_change_info *tci = (struct type_change_info *) data;
737 if (stmt_may_be_vtbl_ptr_store (stmt))
739 tree type;
741 type = extr_type_from_vtbl_ptr_store (stmt, tci);
742 gcc_assert (!type || TYPE_MAIN_VARIANT (type) == type);
743 if (tci->type_maybe_changed
744 && type != tci->known_current_type)
745 tci->multiple_types_encountered = true;
746 tci->known_current_type = type;
747 tci->type_maybe_changed = true;
748 return true;
750 else
751 return false;
754 /* See if ARG is PARAM_DECl describing instance passed by pointer
755 or reference in FUNCTION. Return false if the dynamic type may change
756 in between beggining of the function until CALL is invoked.
758 Generally functions are not allowed to change type of such instances,
759 but they call destructors. We assume that methods can not destroy the THIS
760 pointer. Also as a special cases, constructor and destructors may change
761 type of the THIS pointer. */
763 static bool
764 param_type_may_change_p (tree function, tree arg, gimple call)
766 /* Pure functions can not do any changes on the dynamic type;
767 that require writting to memory. */
768 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
769 return false;
770 /* We need to check if we are within inlined consturctor
771 or destructor (ideally we would have way to check that the
772 inline cdtor is actually working on ARG, but we don't have
773 easy tie on this, so punt on all non-pure cdtors.
774 We may also record the types of cdtors and once we know type
775 of the instance match them.
777 Also code unification optimizations may merge calls from
778 different blocks making return values unreliable. So
779 do nothing during late optimization. */
780 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
781 return true;
782 if (TREE_CODE (arg) == SSA_NAME
783 && SSA_NAME_IS_DEFAULT_DEF (arg)
784 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
786 /* Normal (non-THIS) argument. */
787 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
788 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
789 /* THIS pointer of an method - here we we want to watch constructors
790 and destructors as those definitely may change the dynamic
791 type. */
792 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
793 && !DECL_CXX_CONSTRUCTOR_P (function)
794 && !DECL_CXX_DESTRUCTOR_P (function)
795 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
797 /* Walk the inline stack and watch out for ctors/dtors. */
798 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
799 block = BLOCK_SUPERCONTEXT (block))
800 if (BLOCK_ABSTRACT_ORIGIN (block)
801 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
803 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
805 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
806 continue;
807 if (TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
808 && (DECL_CXX_CONSTRUCTOR_P (fn)
809 || DECL_CXX_DESTRUCTOR_P (fn)))
810 return true;
812 return false;
815 return true;
818 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
819 callsite CALL) by looking for assignments to its virtual table pointer. If
820 it is, return true and fill in the jump function JFUNC with relevant type
821 information or set it to unknown. ARG is the object itself (not a pointer
822 to it, unless dereferenced). BASE is the base of the memory access as
823 returned by get_ref_base_and_extent, as is the offset.
825 This is helper function for detect_type_change and detect_type_change_ssa
826 that does the heavy work which is usually unnecesary. */
828 static bool
829 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
830 gimple call, struct ipa_jump_func *jfunc,
831 HOST_WIDE_INT offset)
833 struct type_change_info tci;
834 ao_ref ao;
835 bool entry_reached = false;
837 gcc_checking_assert (DECL_P (arg)
838 || TREE_CODE (arg) == MEM_REF
839 || handled_component_p (arg));
841 comp_type = TYPE_MAIN_VARIANT (comp_type);
843 /* Const calls cannot call virtual methods through VMT and so type changes do
844 not matter. */
845 if (!flag_devirtualize || !gimple_vuse (call)
846 /* Be sure expected_type is polymorphic. */
847 || !comp_type
848 || TREE_CODE (comp_type) != RECORD_TYPE
849 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
850 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
851 return true;
853 ao_ref_init (&ao, arg);
854 ao.base = base;
855 ao.offset = offset;
856 ao.size = POINTER_SIZE;
857 ao.max_size = ao.size;
859 tci.offset = offset;
860 tci.object = get_base_address (arg);
861 tci.known_current_type = NULL_TREE;
862 tci.type_maybe_changed = false;
863 tci.multiple_types_encountered = false;
865 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
866 &tci, NULL, &entry_reached);
867 if (!tci.type_maybe_changed)
868 return false;
870 if (!tci.known_current_type
871 || tci.multiple_types_encountered
872 || offset != 0
873 /* When the walk reached function entry, it means that type
874 is set along some paths but not along others. */
875 || entry_reached)
876 jfunc->type = IPA_JF_UNKNOWN;
877 else
878 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
880 return true;
883 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
884 If it is, return true and fill in the jump function JFUNC with relevant type
885 information or set it to unknown. ARG is the object itself (not a pointer
886 to it, unless dereferenced). BASE is the base of the memory access as
887 returned by get_ref_base_and_extent, as is the offset. */
889 static bool
890 detect_type_change (tree arg, tree base, tree comp_type, gimple call,
891 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
893 if (!flag_devirtualize)
894 return false;
896 if (TREE_CODE (base) == MEM_REF
897 && !param_type_may_change_p (current_function_decl,
898 TREE_OPERAND (base, 0),
899 call))
900 return false;
901 return detect_type_change_from_memory_writes (arg, base, comp_type,
902 call, jfunc, offset);
905 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
906 SSA name (its dereference will become the base and the offset is assumed to
907 be zero). */
909 static bool
910 detect_type_change_ssa (tree arg, tree comp_type,
911 gimple call, struct ipa_jump_func *jfunc)
913 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
914 if (!flag_devirtualize
915 || !POINTER_TYPE_P (TREE_TYPE (arg)))
916 return false;
918 if (!param_type_may_change_p (current_function_decl, arg, call))
919 return false;
921 arg = build2 (MEM_REF, ptr_type_node, arg,
922 build_int_cst (ptr_type_node, 0));
924 return detect_type_change_from_memory_writes (arg, arg, comp_type,
925 call, jfunc, 0);
928 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
929 boolean variable pointed to by DATA. */
931 static bool
932 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
933 void *data)
935 bool *b = (bool *) data;
936 *b = true;
937 return true;
940 /* Return true if we have already walked so many statements in AA that we
941 should really just start giving up. */
943 static bool
944 aa_overwalked (struct func_body_info *fbi)
946 gcc_checking_assert (fbi);
947 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
950 /* Find the nearest valid aa status for parameter specified by INDEX that
951 dominates BB. */
953 static struct param_aa_status *
954 find_dominating_aa_status (struct func_body_info *fbi, basic_block bb,
955 int index)
957 while (true)
959 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
960 if (!bb)
961 return NULL;
962 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
963 if (!bi->param_aa_statuses.is_empty ()
964 && bi->param_aa_statuses[index].valid)
965 return &bi->param_aa_statuses[index];
969 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
970 structures and/or intialize the result with a dominating description as
971 necessary. */
973 static struct param_aa_status *
974 parm_bb_aa_status_for_bb (struct func_body_info *fbi, basic_block bb,
975 int index)
977 gcc_checking_assert (fbi);
978 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
979 if (bi->param_aa_statuses.is_empty ())
980 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
981 struct param_aa_status *paa = &bi->param_aa_statuses[index];
982 if (!paa->valid)
984 gcc_checking_assert (!paa->parm_modified
985 && !paa->ref_modified
986 && !paa->pt_modified);
987 struct param_aa_status *dom_paa;
988 dom_paa = find_dominating_aa_status (fbi, bb, index);
989 if (dom_paa)
990 *paa = *dom_paa;
991 else
992 paa->valid = true;
995 return paa;
998 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
999 a value known not to be modified in this function before reaching the
1000 statement STMT. FBI holds information about the function we have so far
1001 gathered but do not survive the summary building stage. */
1003 static bool
1004 parm_preserved_before_stmt_p (struct func_body_info *fbi, int index,
1005 gimple stmt, tree parm_load)
1007 struct param_aa_status *paa;
1008 bool modified = false;
1009 ao_ref refd;
1011 /* FIXME: FBI can be NULL if we are being called from outside
1012 ipa_node_analysis or ipcp_transform_function, which currently happens
1013 during inlining analysis. It would be great to extend fbi's lifetime and
1014 always have it. Currently, we are just not afraid of too much walking in
1015 that case. */
1016 if (fbi)
1018 if (aa_overwalked (fbi))
1019 return false;
1020 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1021 if (paa->parm_modified)
1022 return false;
1024 else
1025 paa = NULL;
1027 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
1028 ao_ref_init (&refd, parm_load);
1029 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1030 &modified, NULL);
1031 if (fbi)
1032 fbi->aa_walked += walked;
1033 if (paa && modified)
1034 paa->parm_modified = true;
1035 return !modified;
1038 /* If STMT is an assignment that loads a value from an parameter declaration,
1039 return the index of the parameter in ipa_node_params which has not been
1040 modified. Otherwise return -1. */
1042 static int
1043 load_from_unmodified_param (struct func_body_info *fbi,
1044 vec<ipa_param_descriptor> descriptors,
1045 gimple stmt)
1047 int index;
1048 tree op1;
1050 if (!gimple_assign_single_p (stmt))
1051 return -1;
1053 op1 = gimple_assign_rhs1 (stmt);
1054 if (TREE_CODE (op1) != PARM_DECL)
1055 return -1;
1057 index = ipa_get_param_decl_index_1 (descriptors, op1);
1058 if (index < 0
1059 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
1060 return -1;
1062 return index;
1065 /* Return true if memory reference REF (which must be a load through parameter
1066 with INDEX) loads data that are known to be unmodified in this function
1067 before reaching statement STMT. */
1069 static bool
1070 parm_ref_data_preserved_p (struct func_body_info *fbi,
1071 int index, gimple stmt, tree ref)
1073 struct param_aa_status *paa;
1074 bool modified = false;
1075 ao_ref refd;
1077 /* FIXME: FBI can be NULL if we are being called from outside
1078 ipa_node_analysis or ipcp_transform_function, which currently happens
1079 during inlining analysis. It would be great to extend fbi's lifetime and
1080 always have it. Currently, we are just not afraid of too much walking in
1081 that case. */
1082 if (fbi)
1084 if (aa_overwalked (fbi))
1085 return false;
1086 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1087 if (paa->ref_modified)
1088 return false;
1090 else
1091 paa = NULL;
1093 gcc_checking_assert (gimple_vuse (stmt));
1094 ao_ref_init (&refd, ref);
1095 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1096 &modified, NULL);
1097 if (fbi)
1098 fbi->aa_walked += walked;
1099 if (paa && modified)
1100 paa->ref_modified = true;
1101 return !modified;
1104 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1105 is known to be unmodified in this function before reaching call statement
1106 CALL into which it is passed. FBI describes the function body. */
1108 static bool
1109 parm_ref_data_pass_through_p (struct func_body_info *fbi, int index,
1110 gimple call, tree parm)
1112 bool modified = false;
1113 ao_ref refd;
1115 /* It's unnecessary to calculate anything about memory contnets for a const
1116 function because it is not goin to use it. But do not cache the result
1117 either. Also, no such calculations for non-pointers. */
1118 if (!gimple_vuse (call)
1119 || !POINTER_TYPE_P (TREE_TYPE (parm))
1120 || aa_overwalked (fbi))
1121 return false;
1123 struct param_aa_status *paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (call),
1124 index);
1125 if (paa->pt_modified)
1126 return false;
1128 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1129 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1130 &modified, NULL);
1131 fbi->aa_walked += walked;
1132 if (modified)
1133 paa->pt_modified = true;
1134 return !modified;
1137 /* Return true if we can prove that OP is a memory reference loading unmodified
1138 data from an aggregate passed as a parameter and if the aggregate is passed
1139 by reference, that the alias type of the load corresponds to the type of the
1140 formal parameter (so that we can rely on this type for TBAA in callers).
1141 INFO and PARMS_AINFO describe parameters of the current function (but the
1142 latter can be NULL), STMT is the load statement. If function returns true,
1143 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1144 within the aggregate and whether it is a load from a value passed by
1145 reference respectively. */
1147 static bool
1148 ipa_load_from_parm_agg_1 (struct func_body_info *fbi,
1149 vec<ipa_param_descriptor> descriptors,
1150 gimple stmt, tree op, int *index_p,
1151 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1152 bool *by_ref_p)
1154 int index;
1155 HOST_WIDE_INT size, max_size;
1156 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
1158 if (max_size == -1 || max_size != size || *offset_p < 0)
1159 return false;
1161 if (DECL_P (base))
1163 int index = ipa_get_param_decl_index_1 (descriptors, base);
1164 if (index >= 0
1165 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1167 *index_p = index;
1168 *by_ref_p = false;
1169 if (size_p)
1170 *size_p = size;
1171 return true;
1173 return false;
1176 if (TREE_CODE (base) != MEM_REF
1177 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1178 || !integer_zerop (TREE_OPERAND (base, 1)))
1179 return false;
1181 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1183 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1184 index = ipa_get_param_decl_index_1 (descriptors, parm);
1186 else
1188 /* This branch catches situations where a pointer parameter is not a
1189 gimple register, for example:
1191 void hip7(S*) (struct S * p)
1193 void (*<T2e4>) (struct S *) D.1867;
1194 struct S * p.1;
1196 <bb 2>:
1197 p.1_1 = p;
1198 D.1867_2 = p.1_1->f;
1199 D.1867_2 ();
1200 gdp = &p;
1203 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1204 index = load_from_unmodified_param (fbi, descriptors, def);
1207 if (index >= 0
1208 && parm_ref_data_preserved_p (fbi, index, stmt, op))
1210 *index_p = index;
1211 *by_ref_p = true;
1212 if (size_p)
1213 *size_p = size;
1214 return true;
1216 return false;
1219 /* Just like the previous function, just without the param_analysis_info
1220 pointer, for users outside of this file. */
1222 bool
1223 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
1224 tree op, int *index_p, HOST_WIDE_INT *offset_p,
1225 bool *by_ref_p)
1227 return ipa_load_from_parm_agg_1 (NULL, info->descriptors, stmt, op, index_p,
1228 offset_p, NULL, by_ref_p);
1231 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1232 of an assignment statement STMT, try to determine whether we are actually
1233 handling any of the following cases and construct an appropriate jump
1234 function into JFUNC if so:
1236 1) The passed value is loaded from a formal parameter which is not a gimple
1237 register (most probably because it is addressable, the value has to be
1238 scalar) and we can guarantee the value has not changed. This case can
1239 therefore be described by a simple pass-through jump function. For example:
1241 foo (int a)
1243 int a.0;
1245 a.0_2 = a;
1246 bar (a.0_2);
1248 2) The passed value can be described by a simple arithmetic pass-through
1249 jump function. E.g.
1251 foo (int a)
1253 int D.2064;
1255 D.2064_4 = a.1(D) + 4;
1256 bar (D.2064_4);
1258 This case can also occur in combination of the previous one, e.g.:
1260 foo (int a, int z)
1262 int a.0;
1263 int D.2064;
1265 a.0_3 = a;
1266 D.2064_4 = a.0_3 + 4;
1267 foo (D.2064_4);
1269 3) The passed value is an address of an object within another one (which
1270 also passed by reference). Such situations are described by an ancestor
1271 jump function and describe situations such as:
1273 B::foo() (struct B * const this)
1275 struct A * D.1845;
1277 D.1845_2 = &this_1(D)->D.1748;
1278 A::bar (D.1845_2);
1280 INFO is the structure describing individual parameters access different
1281 stages of IPA optimizations. PARMS_AINFO contains the information that is
1282 only needed for intraprocedural analysis. */
1284 static void
1285 compute_complex_assign_jump_func (struct func_body_info *fbi,
1286 struct ipa_node_params *info,
1287 struct ipa_jump_func *jfunc,
1288 gimple call, gimple stmt, tree name,
1289 tree param_type)
1291 HOST_WIDE_INT offset, size, max_size;
1292 tree op1, tc_ssa, base, ssa;
1293 int index;
1295 op1 = gimple_assign_rhs1 (stmt);
1297 if (TREE_CODE (op1) == SSA_NAME)
1299 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1300 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1301 else
1302 index = load_from_unmodified_param (fbi, info->descriptors,
1303 SSA_NAME_DEF_STMT (op1));
1304 tc_ssa = op1;
1306 else
1308 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1309 tc_ssa = gimple_assign_lhs (stmt);
1312 if (index >= 0)
1314 tree op2 = gimple_assign_rhs2 (stmt);
1316 if (op2)
1318 if (!is_gimple_ip_invariant (op2)
1319 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1320 && !useless_type_conversion_p (TREE_TYPE (name),
1321 TREE_TYPE (op1))))
1322 return;
1324 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1325 gimple_assign_rhs_code (stmt));
1327 else if (gimple_assign_single_p (stmt))
1329 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1330 bool type_p = false;
1332 if (param_type && POINTER_TYPE_P (param_type))
1333 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1334 call, jfunc);
1335 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1336 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
1338 return;
1341 if (TREE_CODE (op1) != ADDR_EXPR)
1342 return;
1343 op1 = TREE_OPERAND (op1, 0);
1344 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1345 return;
1346 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1347 if (TREE_CODE (base) != MEM_REF
1348 /* If this is a varying address, punt. */
1349 || max_size == -1
1350 || max_size != size)
1351 return;
1352 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1353 ssa = TREE_OPERAND (base, 0);
1354 if (TREE_CODE (ssa) != SSA_NAME
1355 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1356 || offset < 0)
1357 return;
1359 /* Dynamic types are changed in constructors and destructors. */
1360 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1361 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1363 bool type_p = (contains_polymorphic_type_p (TREE_TYPE (param_type))
1364 && !detect_type_change (op1, base, TREE_TYPE (param_type),
1365 call, jfunc, offset));
1366 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1367 ipa_set_ancestor_jf (jfunc, offset,
1368 type_p ? TREE_TYPE (param_type) : NULL, index,
1369 parm_ref_data_pass_through_p (fbi, index,
1370 call, ssa), type_p);
1374 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1375 it looks like:
1377 iftmp.1_3 = &obj_2(D)->D.1762;
1379 The base of the MEM_REF must be a default definition SSA NAME of a
1380 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1381 whole MEM_REF expression is returned and the offset calculated from any
1382 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1383 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1385 static tree
1386 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1388 HOST_WIDE_INT size, max_size;
1389 tree expr, parm, obj;
1391 if (!gimple_assign_single_p (assign))
1392 return NULL_TREE;
1393 expr = gimple_assign_rhs1 (assign);
1395 if (TREE_CODE (expr) != ADDR_EXPR)
1396 return NULL_TREE;
1397 expr = TREE_OPERAND (expr, 0);
1398 obj = expr;
1399 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1401 if (TREE_CODE (expr) != MEM_REF
1402 /* If this is a varying address, punt. */
1403 || max_size == -1
1404 || max_size != size
1405 || *offset < 0)
1406 return NULL_TREE;
1407 parm = TREE_OPERAND (expr, 0);
1408 if (TREE_CODE (parm) != SSA_NAME
1409 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1410 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1411 return NULL_TREE;
1413 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1414 *obj_p = obj;
1415 return expr;
1419 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1420 statement PHI, try to find out whether NAME is in fact a
1421 multiple-inheritance typecast from a descendant into an ancestor of a formal
1422 parameter and thus can be described by an ancestor jump function and if so,
1423 write the appropriate function into JFUNC.
1425 Essentially we want to match the following pattern:
1427 if (obj_2(D) != 0B)
1428 goto <bb 3>;
1429 else
1430 goto <bb 4>;
1432 <bb 3>:
1433 iftmp.1_3 = &obj_2(D)->D.1762;
1435 <bb 4>:
1436 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1437 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1438 return D.1879_6; */
1440 static void
1441 compute_complex_ancestor_jump_func (struct func_body_info *fbi,
1442 struct ipa_node_params *info,
1443 struct ipa_jump_func *jfunc,
1444 gimple call, gimple phi, tree param_type)
1446 HOST_WIDE_INT offset;
1447 gimple assign, cond;
1448 basic_block phi_bb, assign_bb, cond_bb;
1449 tree tmp, parm, expr, obj;
1450 int index, i;
1452 if (gimple_phi_num_args (phi) != 2)
1453 return;
1455 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1456 tmp = PHI_ARG_DEF (phi, 0);
1457 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1458 tmp = PHI_ARG_DEF (phi, 1);
1459 else
1460 return;
1461 if (TREE_CODE (tmp) != SSA_NAME
1462 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1463 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1464 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1465 return;
1467 assign = SSA_NAME_DEF_STMT (tmp);
1468 assign_bb = gimple_bb (assign);
1469 if (!single_pred_p (assign_bb))
1470 return;
1471 expr = get_ancestor_addr_info (assign, &obj, &offset);
1472 if (!expr)
1473 return;
1474 parm = TREE_OPERAND (expr, 0);
1475 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1476 if (index < 0)
1477 return;
1479 cond_bb = single_pred (assign_bb);
1480 cond = last_stmt (cond_bb);
1481 if (!cond
1482 || gimple_code (cond) != GIMPLE_COND
1483 || gimple_cond_code (cond) != NE_EXPR
1484 || gimple_cond_lhs (cond) != parm
1485 || !integer_zerop (gimple_cond_rhs (cond)))
1486 return;
1488 phi_bb = gimple_bb (phi);
1489 for (i = 0; i < 2; i++)
1491 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1492 if (pred != assign_bb && pred != cond_bb)
1493 return;
1496 bool type_p = false;
1497 if (param_type && POINTER_TYPE_P (param_type)
1498 && contains_polymorphic_type_p (TREE_TYPE (param_type)))
1499 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1500 call, jfunc, offset);
1501 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1502 ipa_set_ancestor_jf (jfunc, offset, type_p ? TREE_TYPE (param_type) : NULL,
1503 index,
1504 parm_ref_data_pass_through_p (fbi, index, call, parm),
1505 type_p);
1508 /* Given OP which is passed as an actual argument to a called function,
1509 determine if it is possible to construct a KNOWN_TYPE jump function for it
1510 and if so, create one and store it to JFUNC.
1511 EXPECTED_TYPE represents a type the argument should be in */
1513 static void
1514 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1515 gimple call, tree expected_type)
1517 HOST_WIDE_INT offset, size, max_size;
1518 tree base;
1520 if (!flag_devirtualize
1521 || TREE_CODE (op) != ADDR_EXPR
1522 || !contains_polymorphic_type_p (TREE_TYPE (TREE_TYPE (op)))
1523 /* Be sure expected_type is polymorphic. */
1524 || !expected_type
1525 || !contains_polymorphic_type_p (expected_type))
1526 return;
1528 op = TREE_OPERAND (op, 0);
1529 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1530 if (!DECL_P (base)
1531 || max_size == -1
1532 || max_size != size
1533 || !contains_polymorphic_type_p (TREE_TYPE (base)))
1534 return;
1536 if (decl_maybe_in_construction_p (base, TREE_TYPE (base),
1537 call, current_function_decl)
1538 /* Even if the var seems to be in construction by inline call stack,
1539 we may work out the actual type by walking memory writes. */
1540 && (!is_global_var (base)
1541 && detect_type_change (op, base, expected_type, call, jfunc, offset)))
1542 return;
1544 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1545 expected_type);
1548 /* Inspect the given TYPE and return true iff it has the same structure (the
1549 same number of fields of the same types) as a C++ member pointer. If
1550 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1551 corresponding fields there. */
1553 static bool
1554 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1556 tree fld;
1558 if (TREE_CODE (type) != RECORD_TYPE)
1559 return false;
1561 fld = TYPE_FIELDS (type);
1562 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1563 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1564 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1565 return false;
1567 if (method_ptr)
1568 *method_ptr = fld;
1570 fld = DECL_CHAIN (fld);
1571 if (!fld || INTEGRAL_TYPE_P (fld)
1572 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1573 return false;
1574 if (delta)
1575 *delta = fld;
1577 if (DECL_CHAIN (fld))
1578 return false;
1580 return true;
1583 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1584 return the rhs of its defining statement. Otherwise return RHS as it
1585 is. */
1587 static inline tree
1588 get_ssa_def_if_simple_copy (tree rhs)
1590 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1592 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1594 if (gimple_assign_single_p (def_stmt))
1595 rhs = gimple_assign_rhs1 (def_stmt);
1596 else
1597 break;
1599 return rhs;
1602 /* Simple linked list, describing known contents of an aggregate beforere
1603 call. */
1605 struct ipa_known_agg_contents_list
1607 /* Offset and size of the described part of the aggregate. */
1608 HOST_WIDE_INT offset, size;
1609 /* Known constant value or NULL if the contents is known to be unknown. */
1610 tree constant;
1611 /* Pointer to the next structure in the list. */
1612 struct ipa_known_agg_contents_list *next;
1615 /* Find the proper place in linked list of ipa_known_agg_contents_list
1616 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1617 unless there is a partial overlap, in which case return NULL, or such
1618 element is already there, in which case set *ALREADY_THERE to true. */
1620 static struct ipa_known_agg_contents_list **
1621 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1622 HOST_WIDE_INT lhs_offset,
1623 HOST_WIDE_INT lhs_size,
1624 bool *already_there)
1626 struct ipa_known_agg_contents_list **p = list;
1627 while (*p && (*p)->offset < lhs_offset)
1629 if ((*p)->offset + (*p)->size > lhs_offset)
1630 return NULL;
1631 p = &(*p)->next;
1634 if (*p && (*p)->offset < lhs_offset + lhs_size)
1636 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1637 /* We already know this value is subsequently overwritten with
1638 something else. */
1639 *already_there = true;
1640 else
1641 /* Otherwise this is a partial overlap which we cannot
1642 represent. */
1643 return NULL;
1645 return p;
1648 /* Build aggregate jump function from LIST, assuming there are exactly
1649 CONST_COUNT constant entries there and that th offset of the passed argument
1650 is ARG_OFFSET and store it into JFUNC. */
1652 static void
1653 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1654 int const_count, HOST_WIDE_INT arg_offset,
1655 struct ipa_jump_func *jfunc)
1657 vec_alloc (jfunc->agg.items, const_count);
1658 while (list)
1660 if (list->constant)
1662 struct ipa_agg_jf_item item;
1663 item.offset = list->offset - arg_offset;
1664 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1665 item.value = unshare_expr_without_location (list->constant);
1666 jfunc->agg.items->quick_push (item);
1668 list = list->next;
1672 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1673 in ARG is filled in with constant values. ARG can either be an aggregate
1674 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1675 aggregate. JFUNC is the jump function into which the constants are
1676 subsequently stored. */
1678 static void
1679 determine_locally_known_aggregate_parts (gimple call, tree arg, tree arg_type,
1680 struct ipa_jump_func *jfunc)
1682 struct ipa_known_agg_contents_list *list = NULL;
1683 int item_count = 0, const_count = 0;
1684 HOST_WIDE_INT arg_offset, arg_size;
1685 gimple_stmt_iterator gsi;
1686 tree arg_base;
1687 bool check_ref, by_ref;
1688 ao_ref r;
1690 /* The function operates in three stages. First, we prepare check_ref, r,
1691 arg_base and arg_offset based on what is actually passed as an actual
1692 argument. */
1694 if (POINTER_TYPE_P (arg_type))
1696 by_ref = true;
1697 if (TREE_CODE (arg) == SSA_NAME)
1699 tree type_size;
1700 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1701 return;
1702 check_ref = true;
1703 arg_base = arg;
1704 arg_offset = 0;
1705 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1706 arg_size = tree_to_uhwi (type_size);
1707 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1709 else if (TREE_CODE (arg) == ADDR_EXPR)
1711 HOST_WIDE_INT arg_max_size;
1713 arg = TREE_OPERAND (arg, 0);
1714 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1715 &arg_max_size);
1716 if (arg_max_size == -1
1717 || arg_max_size != arg_size
1718 || arg_offset < 0)
1719 return;
1720 if (DECL_P (arg_base))
1722 check_ref = false;
1723 ao_ref_init (&r, arg_base);
1725 else
1726 return;
1728 else
1729 return;
1731 else
1733 HOST_WIDE_INT arg_max_size;
1735 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1737 by_ref = false;
1738 check_ref = false;
1739 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1740 &arg_max_size);
1741 if (arg_max_size == -1
1742 || arg_max_size != arg_size
1743 || arg_offset < 0)
1744 return;
1746 ao_ref_init (&r, arg);
1749 /* Second stage walks back the BB, looks at individual statements and as long
1750 as it is confident of how the statements affect contents of the
1751 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1752 describing it. */
1753 gsi = gsi_for_stmt (call);
1754 gsi_prev (&gsi);
1755 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1757 struct ipa_known_agg_contents_list *n, **p;
1758 gimple stmt = gsi_stmt (gsi);
1759 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1760 tree lhs, rhs, lhs_base;
1762 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1763 continue;
1764 if (!gimple_assign_single_p (stmt))
1765 break;
1767 lhs = gimple_assign_lhs (stmt);
1768 rhs = gimple_assign_rhs1 (stmt);
1769 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1770 || TREE_CODE (lhs) == BIT_FIELD_REF
1771 || contains_bitfld_component_ref_p (lhs))
1772 break;
1774 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1775 &lhs_max_size);
1776 if (lhs_max_size == -1
1777 || lhs_max_size != lhs_size)
1778 break;
1780 if (check_ref)
1782 if (TREE_CODE (lhs_base) != MEM_REF
1783 || TREE_OPERAND (lhs_base, 0) != arg_base
1784 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1785 break;
1787 else if (lhs_base != arg_base)
1789 if (DECL_P (lhs_base))
1790 continue;
1791 else
1792 break;
1795 bool already_there = false;
1796 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1797 &already_there);
1798 if (!p)
1799 break;
1800 if (already_there)
1801 continue;
1803 rhs = get_ssa_def_if_simple_copy (rhs);
1804 n = XALLOCA (struct ipa_known_agg_contents_list);
1805 n->size = lhs_size;
1806 n->offset = lhs_offset;
1807 if (is_gimple_ip_invariant (rhs))
1809 n->constant = rhs;
1810 const_count++;
1812 else
1813 n->constant = NULL_TREE;
1814 n->next = *p;
1815 *p = n;
1817 item_count++;
1818 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1819 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1820 break;
1823 /* Third stage just goes over the list and creates an appropriate vector of
1824 ipa_agg_jf_item structures out of it, of sourse only if there are
1825 any known constants to begin with. */
1827 if (const_count)
1829 jfunc->agg.by_ref = by_ref;
1830 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1834 static tree
1835 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1837 int n;
1838 tree type = (e->callee
1839 ? TREE_TYPE (e->callee->decl)
1840 : gimple_call_fntype (e->call_stmt));
1841 tree t = TYPE_ARG_TYPES (type);
1843 for (n = 0; n < i; n++)
1845 if (!t)
1846 break;
1847 t = TREE_CHAIN (t);
1849 if (t)
1850 return TREE_VALUE (t);
1851 if (!e->callee)
1852 return NULL;
1853 t = DECL_ARGUMENTS (e->callee->decl);
1854 for (n = 0; n < i; n++)
1856 if (!t)
1857 return NULL;
1858 t = TREE_CHAIN (t);
1860 if (t)
1861 return TREE_TYPE (t);
1862 return NULL;
1865 /* Compute jump function for all arguments of callsite CS and insert the
1866 information in the jump_functions array in the ipa_edge_args corresponding
1867 to this callsite. */
1869 static void
1870 ipa_compute_jump_functions_for_edge (struct func_body_info *fbi,
1871 struct cgraph_edge *cs)
1873 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1874 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1875 gimple call = cs->call_stmt;
1876 int n, arg_num = gimple_call_num_args (call);
1878 if (arg_num == 0 || args->jump_functions)
1879 return;
1880 vec_safe_grow_cleared (args->jump_functions, arg_num);
1882 if (gimple_call_internal_p (call))
1883 return;
1884 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1885 return;
1887 for (n = 0; n < arg_num; n++)
1889 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1890 tree arg = gimple_call_arg (call, n);
1891 tree param_type = ipa_get_callee_param_type (cs, n);
1893 if (is_gimple_ip_invariant (arg))
1894 ipa_set_jf_constant (jfunc, arg, cs);
1895 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1896 && TREE_CODE (arg) == PARM_DECL)
1898 int index = ipa_get_param_decl_index (info, arg);
1900 gcc_assert (index >=0);
1901 /* Aggregate passed by value, check for pass-through, otherwise we
1902 will attempt to fill in aggregate contents later in this
1903 for cycle. */
1904 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1906 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
1907 continue;
1910 else if (TREE_CODE (arg) == SSA_NAME)
1912 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1914 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1915 if (index >= 0)
1917 bool agg_p, type_p;
1918 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1919 if (param_type && POINTER_TYPE_P (param_type))
1920 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1921 call, jfunc);
1922 else
1923 type_p = false;
1924 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1925 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1926 type_p);
1929 else
1931 gimple stmt = SSA_NAME_DEF_STMT (arg);
1932 if (is_gimple_assign (stmt))
1933 compute_complex_assign_jump_func (fbi, info, jfunc,
1934 call, stmt, arg, param_type);
1935 else if (gimple_code (stmt) == GIMPLE_PHI)
1936 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1937 call, stmt, param_type);
1940 else
1941 compute_known_type_jump_func (arg, jfunc, call,
1942 param_type
1943 && POINTER_TYPE_P (param_type)
1944 ? TREE_TYPE (param_type)
1945 : NULL);
1947 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1948 passed (because type conversions are ignored in gimple). Usually we can
1949 safely get type from function declaration, but in case of K&R prototypes or
1950 variadic functions we can try our luck with type of the pointer passed.
1951 TODO: Since we look for actual initialization of the memory object, we may better
1952 work out the type based on the memory stores we find. */
1953 if (!param_type)
1954 param_type = TREE_TYPE (arg);
1956 if ((jfunc->type != IPA_JF_PASS_THROUGH
1957 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1958 && (jfunc->type != IPA_JF_ANCESTOR
1959 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1960 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1961 || POINTER_TYPE_P (param_type)))
1962 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1966 /* Compute jump functions for all edges - both direct and indirect - outgoing
1967 from BB. */
1969 static void
1970 ipa_compute_jump_functions_for_bb (struct func_body_info *fbi, basic_block bb)
1972 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1973 int i;
1974 struct cgraph_edge *cs;
1976 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1978 struct cgraph_node *callee = cs->callee;
1980 if (callee)
1982 callee->ultimate_alias_target ();
1983 /* We do not need to bother analyzing calls to unknown functions
1984 unless they may become known during lto/whopr. */
1985 if (!callee->definition && !flag_lto)
1986 continue;
1988 ipa_compute_jump_functions_for_edge (fbi, cs);
1992 /* If STMT looks like a statement loading a value from a member pointer formal
1993 parameter, return that parameter and store the offset of the field to
1994 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1995 might be clobbered). If USE_DELTA, then we look for a use of the delta
1996 field rather than the pfn. */
1998 static tree
1999 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
2000 HOST_WIDE_INT *offset_p)
2002 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2004 if (!gimple_assign_single_p (stmt))
2005 return NULL_TREE;
2007 rhs = gimple_assign_rhs1 (stmt);
2008 if (TREE_CODE (rhs) == COMPONENT_REF)
2010 ref_field = TREE_OPERAND (rhs, 1);
2011 rhs = TREE_OPERAND (rhs, 0);
2013 else
2014 ref_field = NULL_TREE;
2015 if (TREE_CODE (rhs) != MEM_REF)
2016 return NULL_TREE;
2017 rec = TREE_OPERAND (rhs, 0);
2018 if (TREE_CODE (rec) != ADDR_EXPR)
2019 return NULL_TREE;
2020 rec = TREE_OPERAND (rec, 0);
2021 if (TREE_CODE (rec) != PARM_DECL
2022 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2023 return NULL_TREE;
2024 ref_offset = TREE_OPERAND (rhs, 1);
2026 if (use_delta)
2027 fld = delta_field;
2028 else
2029 fld = ptr_field;
2030 if (offset_p)
2031 *offset_p = int_bit_position (fld);
2033 if (ref_field)
2035 if (integer_nonzerop (ref_offset))
2036 return NULL_TREE;
2037 return ref_field == fld ? rec : NULL_TREE;
2039 else
2040 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2041 : NULL_TREE;
2044 /* Returns true iff T is an SSA_NAME defined by a statement. */
2046 static bool
2047 ipa_is_ssa_with_stmt_def (tree t)
2049 if (TREE_CODE (t) == SSA_NAME
2050 && !SSA_NAME_IS_DEFAULT_DEF (t))
2051 return true;
2052 else
2053 return false;
2056 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2057 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2058 indirect call graph edge. */
2060 static struct cgraph_edge *
2061 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
2063 struct cgraph_edge *cs;
2065 cs = node->get_edge (stmt);
2066 cs->indirect_info->param_index = param_index;
2067 cs->indirect_info->agg_contents = 0;
2068 cs->indirect_info->member_ptr = 0;
2069 return cs;
2072 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2073 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2074 intermediate information about each formal parameter. Currently it checks
2075 whether the call calls a pointer that is a formal parameter and if so, the
2076 parameter is marked with the called flag and an indirect call graph edge
2077 describing the call is created. This is very simple for ordinary pointers
2078 represented in SSA but not-so-nice when it comes to member pointers. The
2079 ugly part of this function does nothing more than trying to match the
2080 pattern of such a call. An example of such a pattern is the gimple dump
2081 below, the call is on the last line:
2083 <bb 2>:
2084 f$__delta_5 = f.__delta;
2085 f$__pfn_24 = f.__pfn;
2088 <bb 2>:
2089 f$__delta_5 = MEM[(struct *)&f];
2090 f$__pfn_24 = MEM[(struct *)&f + 4B];
2092 and a few lines below:
2094 <bb 5>
2095 D.2496_3 = (int) f$__pfn_24;
2096 D.2497_4 = D.2496_3 & 1;
2097 if (D.2497_4 != 0)
2098 goto <bb 3>;
2099 else
2100 goto <bb 4>;
2102 <bb 6>:
2103 D.2500_7 = (unsigned int) f$__delta_5;
2104 D.2501_8 = &S + D.2500_7;
2105 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2106 D.2503_10 = *D.2502_9;
2107 D.2504_12 = f$__pfn_24 + -1;
2108 D.2505_13 = (unsigned int) D.2504_12;
2109 D.2506_14 = D.2503_10 + D.2505_13;
2110 D.2507_15 = *D.2506_14;
2111 iftmp.11_16 = (String:: *) D.2507_15;
2113 <bb 7>:
2114 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2115 D.2500_19 = (unsigned int) f$__delta_5;
2116 D.2508_20 = &S + D.2500_19;
2117 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2119 Such patterns are results of simple calls to a member pointer:
2121 int doprinting (int (MyString::* f)(int) const)
2123 MyString S ("somestring");
2125 return (S.*f)(4);
2128 Moreover, the function also looks for called pointers loaded from aggregates
2129 passed by value or reference. */
2131 static void
2132 ipa_analyze_indirect_call_uses (struct func_body_info *fbi, gimple call,
2133 tree target)
2135 struct ipa_node_params *info = fbi->info;
2136 HOST_WIDE_INT offset;
2137 bool by_ref;
2139 if (SSA_NAME_IS_DEFAULT_DEF (target))
2141 tree var = SSA_NAME_VAR (target);
2142 int index = ipa_get_param_decl_index (info, var);
2143 if (index >= 0)
2144 ipa_note_param_call (fbi->node, index, call);
2145 return;
2148 int index;
2149 gimple def = SSA_NAME_DEF_STMT (target);
2150 if (gimple_assign_single_p (def)
2151 && ipa_load_from_parm_agg_1 (fbi, info->descriptors, def,
2152 gimple_assign_rhs1 (def), &index, &offset,
2153 NULL, &by_ref))
2155 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2156 if (cs->indirect_info->offset != offset)
2157 cs->indirect_info->outer_type = NULL;
2158 cs->indirect_info->offset = offset;
2159 cs->indirect_info->agg_contents = 1;
2160 cs->indirect_info->by_ref = by_ref;
2161 return;
2164 /* Now we need to try to match the complex pattern of calling a member
2165 pointer. */
2166 if (gimple_code (def) != GIMPLE_PHI
2167 || gimple_phi_num_args (def) != 2
2168 || !POINTER_TYPE_P (TREE_TYPE (target))
2169 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2170 return;
2172 /* First, we need to check whether one of these is a load from a member
2173 pointer that is a parameter to this function. */
2174 tree n1 = PHI_ARG_DEF (def, 0);
2175 tree n2 = PHI_ARG_DEF (def, 1);
2176 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2177 return;
2178 gimple d1 = SSA_NAME_DEF_STMT (n1);
2179 gimple d2 = SSA_NAME_DEF_STMT (n2);
2181 tree rec;
2182 basic_block bb, virt_bb;
2183 basic_block join = gimple_bb (def);
2184 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2186 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2187 return;
2189 bb = EDGE_PRED (join, 0)->src;
2190 virt_bb = gimple_bb (d2);
2192 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2194 bb = EDGE_PRED (join, 1)->src;
2195 virt_bb = gimple_bb (d1);
2197 else
2198 return;
2200 /* Second, we need to check that the basic blocks are laid out in the way
2201 corresponding to the pattern. */
2203 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2204 || single_pred (virt_bb) != bb
2205 || single_succ (virt_bb) != join)
2206 return;
2208 /* Third, let's see that the branching is done depending on the least
2209 significant bit of the pfn. */
2211 gimple branch = last_stmt (bb);
2212 if (!branch || gimple_code (branch) != GIMPLE_COND)
2213 return;
2215 if ((gimple_cond_code (branch) != NE_EXPR
2216 && gimple_cond_code (branch) != EQ_EXPR)
2217 || !integer_zerop (gimple_cond_rhs (branch)))
2218 return;
2220 tree cond = gimple_cond_lhs (branch);
2221 if (!ipa_is_ssa_with_stmt_def (cond))
2222 return;
2224 def = SSA_NAME_DEF_STMT (cond);
2225 if (!is_gimple_assign (def)
2226 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2227 || !integer_onep (gimple_assign_rhs2 (def)))
2228 return;
2230 cond = gimple_assign_rhs1 (def);
2231 if (!ipa_is_ssa_with_stmt_def (cond))
2232 return;
2234 def = SSA_NAME_DEF_STMT (cond);
2236 if (is_gimple_assign (def)
2237 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2239 cond = gimple_assign_rhs1 (def);
2240 if (!ipa_is_ssa_with_stmt_def (cond))
2241 return;
2242 def = SSA_NAME_DEF_STMT (cond);
2245 tree rec2;
2246 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2247 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2248 == ptrmemfunc_vbit_in_delta),
2249 NULL);
2250 if (rec != rec2)
2251 return;
2253 index = ipa_get_param_decl_index (info, rec);
2254 if (index >= 0
2255 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2257 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2258 if (cs->indirect_info->offset != offset)
2259 cs->indirect_info->outer_type = NULL;
2260 cs->indirect_info->offset = offset;
2261 cs->indirect_info->agg_contents = 1;
2262 cs->indirect_info->member_ptr = 1;
2265 return;
2268 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2269 object referenced in the expression is a formal parameter of the caller
2270 FBI->node (described by FBI->info), create a call note for the
2271 statement. */
2273 static void
2274 ipa_analyze_virtual_call_uses (struct func_body_info *fbi,
2275 gimple call, tree target)
2277 tree obj = OBJ_TYPE_REF_OBJECT (target);
2278 int index;
2279 HOST_WIDE_INT anc_offset;
2281 if (!flag_devirtualize)
2282 return;
2284 if (TREE_CODE (obj) != SSA_NAME)
2285 return;
2287 struct ipa_node_params *info = fbi->info;
2288 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2290 struct ipa_jump_func jfunc;
2291 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2292 return;
2294 anc_offset = 0;
2295 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2296 gcc_assert (index >= 0);
2297 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2298 call, &jfunc))
2299 return;
2301 else
2303 struct ipa_jump_func jfunc;
2304 gimple stmt = SSA_NAME_DEF_STMT (obj);
2305 tree expr;
2307 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2308 if (!expr)
2309 return;
2310 index = ipa_get_param_decl_index (info,
2311 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2312 gcc_assert (index >= 0);
2313 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2314 call, &jfunc, anc_offset))
2315 return;
2318 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2319 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2320 ii->offset = anc_offset;
2321 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2322 ii->otr_type = obj_type_ref_class (target);
2323 ii->polymorphic = 1;
2326 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2327 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2328 containing intermediate information about each formal parameter. */
2330 static void
2331 ipa_analyze_call_uses (struct func_body_info *fbi, gimple call)
2333 tree target = gimple_call_fn (call);
2335 if (!target
2336 || (TREE_CODE (target) != SSA_NAME
2337 && !virtual_method_call_p (target)))
2338 return;
2340 /* If we previously turned the call into a direct call, there is
2341 no need to analyze. */
2342 struct cgraph_edge *cs = fbi->node->get_edge (call);
2343 if (cs && !cs->indirect_unknown_callee)
2344 return;
2345 if (TREE_CODE (target) == SSA_NAME)
2346 ipa_analyze_indirect_call_uses (fbi, call, target);
2347 else if (virtual_method_call_p (target))
2348 ipa_analyze_virtual_call_uses (fbi, call, target);
2352 /* Analyze the call statement STMT with respect to formal parameters (described
2353 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2354 formal parameters are called. */
2356 static void
2357 ipa_analyze_stmt_uses (struct func_body_info *fbi, gimple stmt)
2359 if (is_gimple_call (stmt))
2360 ipa_analyze_call_uses (fbi, stmt);
2363 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2364 If OP is a parameter declaration, mark it as used in the info structure
2365 passed in DATA. */
2367 static bool
2368 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2370 struct ipa_node_params *info = (struct ipa_node_params *) data;
2372 op = get_base_address (op);
2373 if (op
2374 && TREE_CODE (op) == PARM_DECL)
2376 int index = ipa_get_param_decl_index (info, op);
2377 gcc_assert (index >= 0);
2378 ipa_set_param_used (info, index, true);
2381 return false;
2384 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2385 the findings in various structures of the associated ipa_node_params
2386 structure, such as parameter flags, notes etc. FBI holds various data about
2387 the function being analyzed. */
2389 static void
2390 ipa_analyze_params_uses_in_bb (struct func_body_info *fbi, basic_block bb)
2392 gimple_stmt_iterator gsi;
2393 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2395 gimple stmt = gsi_stmt (gsi);
2397 if (is_gimple_debug (stmt))
2398 continue;
2400 ipa_analyze_stmt_uses (fbi, stmt);
2401 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2402 visit_ref_for_mod_analysis,
2403 visit_ref_for_mod_analysis,
2404 visit_ref_for_mod_analysis);
2406 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2407 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2408 visit_ref_for_mod_analysis,
2409 visit_ref_for_mod_analysis,
2410 visit_ref_for_mod_analysis);
2413 /* Calculate controlled uses of parameters of NODE. */
2415 static void
2416 ipa_analyze_controlled_uses (struct cgraph_node *node)
2418 struct ipa_node_params *info = IPA_NODE_REF (node);
2420 for (int i = 0; i < ipa_get_param_count (info); i++)
2422 tree parm = ipa_get_param (info, i);
2423 int controlled_uses = 0;
2425 /* For SSA regs see if parameter is used. For non-SSA we compute
2426 the flag during modification analysis. */
2427 if (is_gimple_reg (parm))
2429 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2430 parm);
2431 if (ddef && !has_zero_uses (ddef))
2433 imm_use_iterator imm_iter;
2434 use_operand_p use_p;
2436 ipa_set_param_used (info, i, true);
2437 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2438 if (!is_gimple_call (USE_STMT (use_p)))
2440 if (!is_gimple_debug (USE_STMT (use_p)))
2442 controlled_uses = IPA_UNDESCRIBED_USE;
2443 break;
2446 else
2447 controlled_uses++;
2449 else
2450 controlled_uses = 0;
2452 else
2453 controlled_uses = IPA_UNDESCRIBED_USE;
2454 ipa_set_controlled_uses (info, i, controlled_uses);
2458 /* Free stuff in BI. */
2460 static void
2461 free_ipa_bb_info (struct ipa_bb_info *bi)
2463 bi->cg_edges.release ();
2464 bi->param_aa_statuses.release ();
2467 /* Dominator walker driving the analysis. */
2469 class analysis_dom_walker : public dom_walker
2471 public:
2472 analysis_dom_walker (struct func_body_info *fbi)
2473 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2475 virtual void before_dom_children (basic_block);
2477 private:
2478 struct func_body_info *m_fbi;
2481 void
2482 analysis_dom_walker::before_dom_children (basic_block bb)
2484 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2485 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2488 /* Initialize the array describing properties of of formal parameters
2489 of NODE, analyze their uses and compute jump functions associated
2490 with actual arguments of calls from within NODE. */
2492 void
2493 ipa_analyze_node (struct cgraph_node *node)
2495 struct func_body_info fbi;
2496 struct ipa_node_params *info;
2498 ipa_check_create_node_params ();
2499 ipa_check_create_edge_args ();
2500 info = IPA_NODE_REF (node);
2502 if (info->analysis_done)
2503 return;
2504 info->analysis_done = 1;
2506 if (ipa_func_spec_opts_forbid_analysis_p (node))
2508 for (int i = 0; i < ipa_get_param_count (info); i++)
2510 ipa_set_param_used (info, i, true);
2511 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2513 return;
2516 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2517 push_cfun (func);
2518 calculate_dominance_info (CDI_DOMINATORS);
2519 ipa_initialize_node_params (node);
2520 ipa_analyze_controlled_uses (node);
2522 fbi.node = node;
2523 fbi.info = IPA_NODE_REF (node);
2524 fbi.bb_infos = vNULL;
2525 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2526 fbi.param_count = ipa_get_param_count (info);
2527 fbi.aa_walked = 0;
2529 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2531 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2532 bi->cg_edges.safe_push (cs);
2535 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2537 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2538 bi->cg_edges.safe_push (cs);
2541 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2543 int i;
2544 struct ipa_bb_info *bi;
2545 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
2546 free_ipa_bb_info (bi);
2547 fbi.bb_infos.release ();
2548 free_dominance_info (CDI_DOMINATORS);
2549 pop_cfun ();
2552 /* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2553 attempt a type-based devirtualization. If successful, return the
2554 target function declaration, otherwise return NULL. */
2556 tree
2557 ipa_intraprocedural_devirtualization (gimple call)
2559 tree binfo, token, fndecl;
2560 struct ipa_jump_func jfunc;
2561 tree otr = gimple_call_fn (call);
2563 jfunc.type = IPA_JF_UNKNOWN;
2564 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
2565 call, obj_type_ref_class (otr));
2566 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2567 return NULL_TREE;
2568 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2569 if (!binfo)
2570 return NULL_TREE;
2571 token = OBJ_TYPE_REF_TOKEN (otr);
2572 fndecl = gimple_get_virt_method_for_binfo (tree_to_uhwi (token),
2573 binfo);
2574 #ifdef ENABLE_CHECKING
2575 if (fndecl)
2576 gcc_assert (possible_polymorphic_call_target_p
2577 (otr, cgraph_node::get (fndecl)));
2578 #endif
2579 return fndecl;
2582 /* Update the jump function DST when the call graph edge corresponding to SRC is
2583 is being inlined, knowing that DST is of type ancestor and src of known
2584 type. */
2586 static void
2587 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2588 struct ipa_jump_func *dst)
2590 HOST_WIDE_INT combined_offset;
2591 tree combined_type;
2593 if (!ipa_get_jf_ancestor_type_preserved (dst))
2595 dst->type = IPA_JF_UNKNOWN;
2596 return;
2599 combined_offset = ipa_get_jf_known_type_offset (src)
2600 + ipa_get_jf_ancestor_offset (dst);
2601 combined_type = ipa_get_jf_ancestor_type (dst);
2603 ipa_set_jf_known_type (dst, combined_offset,
2604 ipa_get_jf_known_type_base_type (src),
2605 combined_type);
2608 /* Update the jump functions associated with call graph edge E when the call
2609 graph edge CS is being inlined, assuming that E->caller is already (possibly
2610 indirectly) inlined into CS->callee and that E has not been inlined. */
2612 static void
2613 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2614 struct cgraph_edge *e)
2616 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2617 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2618 int count = ipa_get_cs_argument_count (args);
2619 int i;
2621 for (i = 0; i < count; i++)
2623 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2625 if (dst->type == IPA_JF_ANCESTOR)
2627 struct ipa_jump_func *src;
2628 int dst_fid = dst->value.ancestor.formal_id;
2630 /* Variable number of arguments can cause havoc if we try to access
2631 one that does not exist in the inlined edge. So make sure we
2632 don't. */
2633 if (dst_fid >= ipa_get_cs_argument_count (top))
2635 dst->type = IPA_JF_UNKNOWN;
2636 continue;
2639 src = ipa_get_ith_jump_func (top, dst_fid);
2641 if (src->agg.items
2642 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2644 struct ipa_agg_jf_item *item;
2645 int j;
2647 /* Currently we do not produce clobber aggregate jump functions,
2648 replace with merging when we do. */
2649 gcc_assert (!dst->agg.items);
2651 dst->agg.items = vec_safe_copy (src->agg.items);
2652 dst->agg.by_ref = src->agg.by_ref;
2653 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2654 item->offset -= dst->value.ancestor.offset;
2657 if (src->type == IPA_JF_KNOWN_TYPE)
2658 combine_known_type_and_ancestor_jfs (src, dst);
2659 else if (src->type == IPA_JF_PASS_THROUGH
2660 && src->value.pass_through.operation == NOP_EXPR)
2662 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2663 dst->value.ancestor.agg_preserved &=
2664 src->value.pass_through.agg_preserved;
2665 dst->value.ancestor.type_preserved &=
2666 src->value.pass_through.type_preserved;
2668 else if (src->type == IPA_JF_ANCESTOR)
2670 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2671 dst->value.ancestor.offset += src->value.ancestor.offset;
2672 dst->value.ancestor.agg_preserved &=
2673 src->value.ancestor.agg_preserved;
2674 dst->value.ancestor.type_preserved &=
2675 src->value.ancestor.type_preserved;
2677 else
2678 dst->type = IPA_JF_UNKNOWN;
2680 else if (dst->type == IPA_JF_PASS_THROUGH)
2682 struct ipa_jump_func *src;
2683 /* We must check range due to calls with variable number of arguments
2684 and we cannot combine jump functions with operations. */
2685 if (dst->value.pass_through.operation == NOP_EXPR
2686 && (dst->value.pass_through.formal_id
2687 < ipa_get_cs_argument_count (top)))
2689 int dst_fid = dst->value.pass_through.formal_id;
2690 src = ipa_get_ith_jump_func (top, dst_fid);
2691 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2693 switch (src->type)
2695 case IPA_JF_UNKNOWN:
2696 dst->type = IPA_JF_UNKNOWN;
2697 break;
2698 case IPA_JF_KNOWN_TYPE:
2699 if (ipa_get_jf_pass_through_type_preserved (dst))
2700 ipa_set_jf_known_type (dst,
2701 ipa_get_jf_known_type_offset (src),
2702 ipa_get_jf_known_type_base_type (src),
2703 ipa_get_jf_known_type_component_type (src));
2704 else
2705 dst->type = IPA_JF_UNKNOWN;
2706 break;
2707 case IPA_JF_CONST:
2708 ipa_set_jf_cst_copy (dst, src);
2709 break;
2711 case IPA_JF_PASS_THROUGH:
2713 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2714 enum tree_code operation;
2715 operation = ipa_get_jf_pass_through_operation (src);
2717 if (operation == NOP_EXPR)
2719 bool agg_p, type_p;
2720 agg_p = dst_agg_p
2721 && ipa_get_jf_pass_through_agg_preserved (src);
2722 type_p = ipa_get_jf_pass_through_type_preserved (src)
2723 && ipa_get_jf_pass_through_type_preserved (dst);
2724 ipa_set_jf_simple_pass_through (dst, formal_id,
2725 agg_p, type_p);
2727 else
2729 tree operand = ipa_get_jf_pass_through_operand (src);
2730 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2731 operation);
2733 break;
2735 case IPA_JF_ANCESTOR:
2737 bool agg_p, type_p;
2738 agg_p = dst_agg_p
2739 && ipa_get_jf_ancestor_agg_preserved (src);
2740 type_p = ipa_get_jf_ancestor_type_preserved (src)
2741 && ipa_get_jf_pass_through_type_preserved (dst);
2742 ipa_set_ancestor_jf (dst,
2743 ipa_get_jf_ancestor_offset (src),
2744 ipa_get_jf_ancestor_type (src),
2745 ipa_get_jf_ancestor_formal_id (src),
2746 agg_p, type_p);
2747 break;
2749 default:
2750 gcc_unreachable ();
2753 if (src->agg.items
2754 && (dst_agg_p || !src->agg.by_ref))
2756 /* Currently we do not produce clobber aggregate jump
2757 functions, replace with merging when we do. */
2758 gcc_assert (!dst->agg.items);
2760 dst->agg.by_ref = src->agg.by_ref;
2761 dst->agg.items = vec_safe_copy (src->agg.items);
2764 else
2765 dst->type = IPA_JF_UNKNOWN;
2770 /* If TARGET is an addr_expr of a function declaration, make it the destination
2771 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2773 struct cgraph_edge *
2774 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2776 struct cgraph_node *callee;
2777 struct inline_edge_summary *es = inline_edge_summary (ie);
2778 bool unreachable = false;
2780 if (TREE_CODE (target) == ADDR_EXPR)
2781 target = TREE_OPERAND (target, 0);
2782 if (TREE_CODE (target) != FUNCTION_DECL)
2784 target = canonicalize_constructor_val (target, NULL);
2785 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2787 if (ie->indirect_info->member_ptr)
2788 /* Member pointer call that goes through a VMT lookup. */
2789 return NULL;
2791 if (dump_enabled_p ())
2793 location_t loc = gimple_location_safe (ie->call_stmt);
2794 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2795 "discovered direct call to non-function in %s/%i, "
2796 "making it __builtin_unreachable\n",
2797 ie->caller->name (), ie->caller->order);
2800 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2801 callee = cgraph_node::get_create (target);
2802 unreachable = true;
2804 else
2805 callee = cgraph_node::get (target);
2807 else
2808 callee = cgraph_node::get (target);
2810 /* Because may-edges are not explicitely represented and vtable may be external,
2811 we may create the first reference to the object in the unit. */
2812 if (!callee || callee->global.inlined_to)
2815 /* We are better to ensure we can refer to it.
2816 In the case of static functions we are out of luck, since we already
2817 removed its body. In the case of public functions we may or may
2818 not introduce the reference. */
2819 if (!canonicalize_constructor_val (target, NULL)
2820 || !TREE_PUBLIC (target))
2822 if (dump_file)
2823 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2824 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2825 xstrdup (ie->caller->name ()),
2826 ie->caller->order,
2827 xstrdup (ie->callee->name ()),
2828 ie->callee->order);
2829 return NULL;
2831 callee = cgraph_node::get_create (target);
2834 if (!dbg_cnt (devirt))
2835 return NULL;
2837 ipa_check_create_node_params ();
2839 /* We can not make edges to inline clones. It is bug that someone removed
2840 the cgraph node too early. */
2841 gcc_assert (!callee->global.inlined_to);
2843 if (dump_file && !unreachable)
2845 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2846 "(%s/%i -> %s/%i), for stmt ",
2847 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2848 xstrdup (ie->caller->name ()),
2849 ie->caller->order,
2850 xstrdup (callee->name ()),
2851 callee->order);
2852 if (ie->call_stmt)
2853 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2854 else
2855 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2857 if (dump_enabled_p ())
2859 location_t loc = gimple_location_safe (ie->call_stmt);
2861 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2862 "converting indirect call in %s to direct call to %s\n",
2863 ie->caller->name (), callee->name ());
2865 ie = cgraph_make_edge_direct (ie, callee);
2866 es = inline_edge_summary (ie);
2867 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2868 - eni_size_weights.call_cost);
2869 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2870 - eni_time_weights.call_cost);
2872 return ie;
2875 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2876 return NULL if there is not any. BY_REF specifies whether the value has to
2877 be passed by reference or by value. */
2879 tree
2880 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2881 HOST_WIDE_INT offset, bool by_ref)
2883 struct ipa_agg_jf_item *item;
2884 int i;
2886 if (by_ref != agg->by_ref)
2887 return NULL;
2889 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2890 if (item->offset == offset)
2892 /* Currently we do not have clobber values, return NULL for them once
2893 we do. */
2894 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2895 return item->value;
2897 return NULL;
2900 /* Remove a reference to SYMBOL from the list of references of a node given by
2901 reference description RDESC. Return true if the reference has been
2902 successfully found and removed. */
2904 static bool
2905 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2907 struct ipa_ref *to_del;
2908 struct cgraph_edge *origin;
2910 origin = rdesc->cs;
2911 if (!origin)
2912 return false;
2913 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
2914 origin->lto_stmt_uid);
2915 if (!to_del)
2916 return false;
2918 to_del->remove_reference ();
2919 if (dump_file)
2920 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2921 xstrdup (origin->caller->name ()),
2922 origin->caller->order, xstrdup (symbol->name ()));
2923 return true;
2926 /* If JFUNC has a reference description with refcount different from
2927 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2928 NULL. JFUNC must be a constant jump function. */
2930 static struct ipa_cst_ref_desc *
2931 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2933 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2934 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2935 return rdesc;
2936 else
2937 return NULL;
2940 /* If the value of constant jump function JFUNC is an address of a function
2941 declaration, return the associated call graph node. Otherwise return
2942 NULL. */
2944 static cgraph_node *
2945 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2947 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2948 tree cst = ipa_get_jf_constant (jfunc);
2949 if (TREE_CODE (cst) != ADDR_EXPR
2950 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2951 return NULL;
2953 return cgraph_node::get (TREE_OPERAND (cst, 0));
2957 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2958 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2959 the edge specified in the rdesc. Return false if either the symbol or the
2960 reference could not be found, otherwise return true. */
2962 static bool
2963 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2965 struct ipa_cst_ref_desc *rdesc;
2966 if (jfunc->type == IPA_JF_CONST
2967 && (rdesc = jfunc_rdesc_usable (jfunc))
2968 && --rdesc->refcount == 0)
2970 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
2971 if (!symbol)
2972 return false;
2974 return remove_described_reference (symbol, rdesc);
2976 return true;
2979 /* Try to find a destination for indirect edge IE that corresponds to a simple
2980 call or a call of a member function pointer and where the destination is a
2981 pointer formal parameter described by jump function JFUNC. If it can be
2982 determined, return the newly direct edge, otherwise return NULL.
2983 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2985 static struct cgraph_edge *
2986 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2987 struct ipa_jump_func *jfunc,
2988 struct ipa_node_params *new_root_info)
2990 struct cgraph_edge *cs;
2991 tree target;
2992 bool agg_contents = ie->indirect_info->agg_contents;
2994 if (ie->indirect_info->agg_contents)
2995 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2996 ie->indirect_info->offset,
2997 ie->indirect_info->by_ref);
2998 else
2999 target = ipa_value_from_jfunc (new_root_info, jfunc);
3000 if (!target)
3001 return NULL;
3002 cs = ipa_make_edge_direct_to_target (ie, target);
3004 if (cs && !agg_contents)
3006 bool ok;
3007 gcc_checking_assert (cs->callee
3008 && (cs != ie
3009 || jfunc->type != IPA_JF_CONST
3010 || !cgraph_node_for_jfunc (jfunc)
3011 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3012 ok = try_decrement_rdesc_refcount (jfunc);
3013 gcc_checking_assert (ok);
3016 return cs;
3019 /* Return the target to be used in cases of impossible devirtualization. IE
3020 and target (the latter can be NULL) are dumped when dumping is enabled. */
3022 tree
3023 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3025 if (dump_file)
3027 if (target)
3028 fprintf (dump_file,
3029 "Type inconsistent devirtualization: %s/%i->%s\n",
3030 ie->caller->name (), ie->caller->order,
3031 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3032 else
3033 fprintf (dump_file,
3034 "No devirtualization target in %s/%i\n",
3035 ie->caller->name (), ie->caller->order);
3037 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3038 cgraph_node::get_create (new_target);
3039 return new_target;
3042 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3043 call based on a formal parameter which is described by jump function JFUNC
3044 and if it can be determined, make it direct and return the direct edge.
3045 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
3046 are relative to. */
3048 static struct cgraph_edge *
3049 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3050 struct ipa_jump_func *jfunc,
3051 struct ipa_node_params *new_root_info)
3053 tree binfo, target;
3055 if (!flag_devirtualize)
3056 return NULL;
3058 /* First try to do lookup via known virtual table pointer value. */
3059 if (!ie->indirect_info->by_ref)
3061 tree vtable;
3062 unsigned HOST_WIDE_INT offset;
3063 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
3064 ie->indirect_info->offset,
3065 true);
3066 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3068 target = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3069 vtable, offset);
3070 if (target)
3072 if ((TREE_CODE (TREE_TYPE (target)) == FUNCTION_TYPE
3073 && DECL_FUNCTION_CODE (target) == BUILT_IN_UNREACHABLE)
3074 || !possible_polymorphic_call_target_p
3075 (ie, cgraph_node::get (target)))
3076 target = ipa_impossible_devirt_target (ie, target);
3077 return ipa_make_edge_direct_to_target (ie, target);
3082 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
3084 if (!binfo)
3085 return NULL;
3087 if (TREE_CODE (binfo) != TREE_BINFO)
3089 ipa_polymorphic_call_context context;
3090 vec <cgraph_node *>targets;
3091 bool final;
3093 if (!get_polymorphic_call_info_from_invariant
3094 (&context, binfo, ie->indirect_info->otr_type,
3095 ie->indirect_info->offset))
3096 return NULL;
3097 targets = possible_polymorphic_call_targets
3098 (ie->indirect_info->otr_type,
3099 ie->indirect_info->otr_token,
3100 context, &final);
3101 if (!final || targets.length () > 1)
3102 return NULL;
3103 if (targets.length () == 1)
3104 target = targets[0]->decl;
3105 else
3106 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3108 else
3110 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
3111 ie->indirect_info->otr_type);
3112 if (binfo)
3113 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
3114 binfo);
3115 else
3116 return NULL;
3119 if (target)
3121 if (!possible_polymorphic_call_target_p (ie, cgraph_node::get (target)))
3122 target = ipa_impossible_devirt_target (ie, target);
3123 return ipa_make_edge_direct_to_target (ie, target);
3125 else
3126 return NULL;
3129 /* Update the param called notes associated with NODE when CS is being inlined,
3130 assuming NODE is (potentially indirectly) inlined into CS->callee.
3131 Moreover, if the callee is discovered to be constant, create a new cgraph
3132 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3133 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3135 static bool
3136 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3137 struct cgraph_node *node,
3138 vec<cgraph_edge *> *new_edges)
3140 struct ipa_edge_args *top;
3141 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3142 struct ipa_node_params *new_root_info;
3143 bool res = false;
3145 ipa_check_create_edge_args ();
3146 top = IPA_EDGE_REF (cs);
3147 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3148 ? cs->caller->global.inlined_to
3149 : cs->caller);
3151 for (ie = node->indirect_calls; ie; ie = next_ie)
3153 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3154 struct ipa_jump_func *jfunc;
3155 int param_index;
3157 next_ie = ie->next_callee;
3159 if (ici->param_index == -1)
3160 continue;
3162 /* We must check range due to calls with variable number of arguments: */
3163 if (ici->param_index >= ipa_get_cs_argument_count (top))
3165 ici->param_index = -1;
3166 continue;
3169 param_index = ici->param_index;
3170 jfunc = ipa_get_ith_jump_func (top, param_index);
3172 if (!flag_indirect_inlining)
3173 new_direct_edge = NULL;
3174 else if (ici->polymorphic)
3175 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
3176 new_root_info);
3177 else
3178 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3179 new_root_info);
3180 /* If speculation was removed, then we need to do nothing. */
3181 if (new_direct_edge && new_direct_edge != ie)
3183 new_direct_edge->indirect_inlining_edge = 1;
3184 top = IPA_EDGE_REF (cs);
3185 res = true;
3187 else if (new_direct_edge)
3189 new_direct_edge->indirect_inlining_edge = 1;
3190 if (new_direct_edge->call_stmt)
3191 new_direct_edge->call_stmt_cannot_inline_p
3192 = !gimple_check_call_matching_types (
3193 new_direct_edge->call_stmt,
3194 new_direct_edge->callee->decl, false);
3195 if (new_edges)
3197 new_edges->safe_push (new_direct_edge);
3198 res = true;
3200 top = IPA_EDGE_REF (cs);
3202 else if (jfunc->type == IPA_JF_PASS_THROUGH
3203 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3205 if ((ici->agg_contents
3206 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
3207 || (ici->polymorphic
3208 && !ipa_get_jf_pass_through_type_preserved (jfunc)))
3209 ici->param_index = -1;
3210 else
3211 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3213 else if (jfunc->type == IPA_JF_ANCESTOR)
3215 if ((ici->agg_contents
3216 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
3217 || (ici->polymorphic
3218 && !ipa_get_jf_ancestor_type_preserved (jfunc)))
3219 ici->param_index = -1;
3220 else
3222 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3223 if (ipa_get_jf_ancestor_offset (jfunc))
3224 ici->outer_type = NULL;
3225 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3228 else
3229 /* Either we can find a destination for this edge now or never. */
3230 ici->param_index = -1;
3233 return res;
3236 /* Recursively traverse subtree of NODE (including node) made of inlined
3237 cgraph_edges when CS has been inlined and invoke
3238 update_indirect_edges_after_inlining on all nodes and
3239 update_jump_functions_after_inlining on all non-inlined edges that lead out
3240 of this subtree. Newly discovered indirect edges will be added to
3241 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3242 created. */
3244 static bool
3245 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3246 struct cgraph_node *node,
3247 vec<cgraph_edge *> *new_edges)
3249 struct cgraph_edge *e;
3250 bool res;
3252 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3254 for (e = node->callees; e; e = e->next_callee)
3255 if (!e->inline_failed)
3256 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3257 else
3258 update_jump_functions_after_inlining (cs, e);
3259 for (e = node->indirect_calls; e; e = e->next_callee)
3260 update_jump_functions_after_inlining (cs, e);
3262 return res;
3265 /* Combine two controlled uses counts as done during inlining. */
3267 static int
3268 combine_controlled_uses_counters (int c, int d)
3270 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3271 return IPA_UNDESCRIBED_USE;
3272 else
3273 return c + d - 1;
3276 /* Propagate number of controlled users from CS->caleee to the new root of the
3277 tree of inlined nodes. */
3279 static void
3280 propagate_controlled_uses (struct cgraph_edge *cs)
3282 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3283 struct cgraph_node *new_root = cs->caller->global.inlined_to
3284 ? cs->caller->global.inlined_to : cs->caller;
3285 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3286 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3287 int count, i;
3289 count = MIN (ipa_get_cs_argument_count (args),
3290 ipa_get_param_count (old_root_info));
3291 for (i = 0; i < count; i++)
3293 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3294 struct ipa_cst_ref_desc *rdesc;
3296 if (jf->type == IPA_JF_PASS_THROUGH)
3298 int src_idx, c, d;
3299 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3300 c = ipa_get_controlled_uses (new_root_info, src_idx);
3301 d = ipa_get_controlled_uses (old_root_info, i);
3303 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3304 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3305 c = combine_controlled_uses_counters (c, d);
3306 ipa_set_controlled_uses (new_root_info, src_idx, c);
3307 if (c == 0 && new_root_info->ipcp_orig_node)
3309 struct cgraph_node *n;
3310 struct ipa_ref *ref;
3311 tree t = new_root_info->known_vals[src_idx];
3313 if (t && TREE_CODE (t) == ADDR_EXPR
3314 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3315 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3316 && (ref = new_root->find_reference (n, NULL, 0)))
3318 if (dump_file)
3319 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3320 "reference from %s/%i to %s/%i.\n",
3321 xstrdup (new_root->name ()),
3322 new_root->order,
3323 xstrdup (n->name ()), n->order);
3324 ref->remove_reference ();
3328 else if (jf->type == IPA_JF_CONST
3329 && (rdesc = jfunc_rdesc_usable (jf)))
3331 int d = ipa_get_controlled_uses (old_root_info, i);
3332 int c = rdesc->refcount;
3333 rdesc->refcount = combine_controlled_uses_counters (c, d);
3334 if (rdesc->refcount == 0)
3336 tree cst = ipa_get_jf_constant (jf);
3337 struct cgraph_node *n;
3338 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3339 && TREE_CODE (TREE_OPERAND (cst, 0))
3340 == FUNCTION_DECL);
3341 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3342 if (n)
3344 struct cgraph_node *clone;
3345 bool ok;
3346 ok = remove_described_reference (n, rdesc);
3347 gcc_checking_assert (ok);
3349 clone = cs->caller;
3350 while (clone->global.inlined_to
3351 && clone != rdesc->cs->caller
3352 && IPA_NODE_REF (clone)->ipcp_orig_node)
3354 struct ipa_ref *ref;
3355 ref = clone->find_reference (n, NULL, 0);
3356 if (ref)
3358 if (dump_file)
3359 fprintf (dump_file, "ipa-prop: Removing "
3360 "cloning-created reference "
3361 "from %s/%i to %s/%i.\n",
3362 xstrdup (clone->name ()),
3363 clone->order,
3364 xstrdup (n->name ()),
3365 n->order);
3366 ref->remove_reference ();
3368 clone = clone->callers->caller;
3375 for (i = ipa_get_param_count (old_root_info);
3376 i < ipa_get_cs_argument_count (args);
3377 i++)
3379 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3381 if (jf->type == IPA_JF_CONST)
3383 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3384 if (rdesc)
3385 rdesc->refcount = IPA_UNDESCRIBED_USE;
3387 else if (jf->type == IPA_JF_PASS_THROUGH)
3388 ipa_set_controlled_uses (new_root_info,
3389 jf->value.pass_through.formal_id,
3390 IPA_UNDESCRIBED_USE);
3394 /* Update jump functions and call note functions on inlining the call site CS.
3395 CS is expected to lead to a node already cloned by
3396 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3397 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3398 created. */
3400 bool
3401 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3402 vec<cgraph_edge *> *new_edges)
3404 bool changed;
3405 /* Do nothing if the preparation phase has not been carried out yet
3406 (i.e. during early inlining). */
3407 if (!ipa_node_params_vector.exists ())
3408 return false;
3409 gcc_assert (ipa_edge_args_vector);
3411 propagate_controlled_uses (cs);
3412 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3414 return changed;
3417 /* Frees all dynamically allocated structures that the argument info points
3418 to. */
3420 void
3421 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3423 vec_free (args->jump_functions);
3424 memset (args, 0, sizeof (*args));
3427 /* Free all ipa_edge structures. */
3429 void
3430 ipa_free_all_edge_args (void)
3432 int i;
3433 struct ipa_edge_args *args;
3435 if (!ipa_edge_args_vector)
3436 return;
3438 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3439 ipa_free_edge_args_substructures (args);
3441 vec_free (ipa_edge_args_vector);
3444 /* Frees all dynamically allocated structures that the param info points
3445 to. */
3447 void
3448 ipa_free_node_params_substructures (struct ipa_node_params *info)
3450 info->descriptors.release ();
3451 free (info->lattices);
3452 /* Lattice values and their sources are deallocated with their alocation
3453 pool. */
3454 info->known_vals.release ();
3455 memset (info, 0, sizeof (*info));
3458 /* Free all ipa_node_params structures. */
3460 void
3461 ipa_free_all_node_params (void)
3463 int i;
3464 struct ipa_node_params *info;
3466 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
3467 ipa_free_node_params_substructures (info);
3469 ipa_node_params_vector.release ();
3472 /* Set the aggregate replacements of NODE to be AGGVALS. */
3474 void
3475 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3476 struct ipa_agg_replacement_value *aggvals)
3478 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
3479 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
3481 (*ipa_node_agg_replacements)[node->uid] = aggvals;
3484 /* Hook that is called by cgraph.c when an edge is removed. */
3486 static void
3487 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3489 struct ipa_edge_args *args;
3491 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3492 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3493 return;
3495 args = IPA_EDGE_REF (cs);
3496 if (args->jump_functions)
3498 struct ipa_jump_func *jf;
3499 int i;
3500 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3502 struct ipa_cst_ref_desc *rdesc;
3503 try_decrement_rdesc_refcount (jf);
3504 if (jf->type == IPA_JF_CONST
3505 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3506 && rdesc->cs == cs)
3507 rdesc->cs = NULL;
3511 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3514 /* Hook that is called by cgraph.c when a node is removed. */
3516 static void
3517 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3519 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3520 if (ipa_node_params_vector.length () > (unsigned)node->uid)
3521 ipa_free_node_params_substructures (IPA_NODE_REF (node));
3522 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3523 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
3526 /* Hook that is called by cgraph.c when an edge is duplicated. */
3528 static void
3529 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3530 __attribute__((unused)) void *data)
3532 struct ipa_edge_args *old_args, *new_args;
3533 unsigned int i;
3535 ipa_check_create_edge_args ();
3537 old_args = IPA_EDGE_REF (src);
3538 new_args = IPA_EDGE_REF (dst);
3540 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3542 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3544 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3545 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3547 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3549 if (src_jf->type == IPA_JF_CONST)
3551 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3553 if (!src_rdesc)
3554 dst_jf->value.constant.rdesc = NULL;
3555 else if (src->caller == dst->caller)
3557 struct ipa_ref *ref;
3558 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3559 gcc_checking_assert (n);
3560 ref = src->caller->find_reference (n, src->call_stmt,
3561 src->lto_stmt_uid);
3562 gcc_checking_assert (ref);
3563 dst->caller->clone_reference (ref, ref->stmt);
3565 gcc_checking_assert (ipa_refdesc_pool);
3566 struct ipa_cst_ref_desc *dst_rdesc
3567 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3568 dst_rdesc->cs = dst;
3569 dst_rdesc->refcount = src_rdesc->refcount;
3570 dst_rdesc->next_duplicate = NULL;
3571 dst_jf->value.constant.rdesc = dst_rdesc;
3573 else if (src_rdesc->cs == src)
3575 struct ipa_cst_ref_desc *dst_rdesc;
3576 gcc_checking_assert (ipa_refdesc_pool);
3577 dst_rdesc
3578 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3579 dst_rdesc->cs = dst;
3580 dst_rdesc->refcount = src_rdesc->refcount;
3581 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3582 src_rdesc->next_duplicate = dst_rdesc;
3583 dst_jf->value.constant.rdesc = dst_rdesc;
3585 else
3587 struct ipa_cst_ref_desc *dst_rdesc;
3588 /* This can happen during inlining, when a JFUNC can refer to a
3589 reference taken in a function up in the tree of inline clones.
3590 We need to find the duplicate that refers to our tree of
3591 inline clones. */
3593 gcc_assert (dst->caller->global.inlined_to);
3594 for (dst_rdesc = src_rdesc->next_duplicate;
3595 dst_rdesc;
3596 dst_rdesc = dst_rdesc->next_duplicate)
3598 struct cgraph_node *top;
3599 top = dst_rdesc->cs->caller->global.inlined_to
3600 ? dst_rdesc->cs->caller->global.inlined_to
3601 : dst_rdesc->cs->caller;
3602 if (dst->caller->global.inlined_to == top)
3603 break;
3605 gcc_assert (dst_rdesc);
3606 dst_jf->value.constant.rdesc = dst_rdesc;
3612 /* Hook that is called by cgraph.c when a node is duplicated. */
3614 static void
3615 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3616 ATTRIBUTE_UNUSED void *data)
3618 struct ipa_node_params *old_info, *new_info;
3619 struct ipa_agg_replacement_value *old_av, *new_av;
3621 ipa_check_create_node_params ();
3622 old_info = IPA_NODE_REF (src);
3623 new_info = IPA_NODE_REF (dst);
3625 new_info->descriptors = old_info->descriptors.copy ();
3626 new_info->lattices = NULL;
3627 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3629 new_info->analysis_done = old_info->analysis_done;
3630 new_info->node_enqueued = old_info->node_enqueued;
3632 old_av = ipa_get_agg_replacements_for_node (src);
3633 if (!old_av)
3634 return;
3636 new_av = NULL;
3637 while (old_av)
3639 struct ipa_agg_replacement_value *v;
3641 v = ggc_alloc<ipa_agg_replacement_value> ();
3642 memcpy (v, old_av, sizeof (*v));
3643 v->next = new_av;
3644 new_av = v;
3645 old_av = old_av->next;
3647 ipa_set_node_agg_value_chain (dst, new_av);
3651 /* Analyze newly added function into callgraph. */
3653 static void
3654 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3656 if (node->has_gimple_body_p ())
3657 ipa_analyze_node (node);
3660 /* Register our cgraph hooks if they are not already there. */
3662 void
3663 ipa_register_cgraph_hooks (void)
3665 if (!edge_removal_hook_holder)
3666 edge_removal_hook_holder =
3667 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3668 if (!node_removal_hook_holder)
3669 node_removal_hook_holder =
3670 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
3671 if (!edge_duplication_hook_holder)
3672 edge_duplication_hook_holder =
3673 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3674 if (!node_duplication_hook_holder)
3675 node_duplication_hook_holder =
3676 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
3677 function_insertion_hook_holder =
3678 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
3681 /* Unregister our cgraph hooks if they are not already there. */
3683 static void
3684 ipa_unregister_cgraph_hooks (void)
3686 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
3687 edge_removal_hook_holder = NULL;
3688 cgraph_remove_node_removal_hook (node_removal_hook_holder);
3689 node_removal_hook_holder = NULL;
3690 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
3691 edge_duplication_hook_holder = NULL;
3692 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
3693 node_duplication_hook_holder = NULL;
3694 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
3695 function_insertion_hook_holder = NULL;
3698 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3699 longer needed after ipa-cp. */
3701 void
3702 ipa_free_all_structures_after_ipa_cp (void)
3704 if (!optimize)
3706 ipa_free_all_edge_args ();
3707 ipa_free_all_node_params ();
3708 free_alloc_pool (ipcp_sources_pool);
3709 free_alloc_pool (ipcp_values_pool);
3710 free_alloc_pool (ipcp_agg_lattice_pool);
3711 ipa_unregister_cgraph_hooks ();
3712 if (ipa_refdesc_pool)
3713 free_alloc_pool (ipa_refdesc_pool);
3717 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3718 longer needed after indirect inlining. */
3720 void
3721 ipa_free_all_structures_after_iinln (void)
3723 ipa_free_all_edge_args ();
3724 ipa_free_all_node_params ();
3725 ipa_unregister_cgraph_hooks ();
3726 if (ipcp_sources_pool)
3727 free_alloc_pool (ipcp_sources_pool);
3728 if (ipcp_values_pool)
3729 free_alloc_pool (ipcp_values_pool);
3730 if (ipcp_agg_lattice_pool)
3731 free_alloc_pool (ipcp_agg_lattice_pool);
3732 if (ipa_refdesc_pool)
3733 free_alloc_pool (ipa_refdesc_pool);
3736 /* Print ipa_tree_map data structures of all functions in the
3737 callgraph to F. */
3739 void
3740 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3742 int i, count;
3743 struct ipa_node_params *info;
3745 if (!node->definition)
3746 return;
3747 info = IPA_NODE_REF (node);
3748 fprintf (f, " function %s/%i parameter descriptors:\n",
3749 node->name (), node->order);
3750 count = ipa_get_param_count (info);
3751 for (i = 0; i < count; i++)
3753 int c;
3755 fprintf (f, " ");
3756 ipa_dump_param (f, info, i);
3757 if (ipa_is_param_used (info, i))
3758 fprintf (f, " used");
3759 c = ipa_get_controlled_uses (info, i);
3760 if (c == IPA_UNDESCRIBED_USE)
3761 fprintf (f, " undescribed_use");
3762 else
3763 fprintf (f, " controlled_uses=%i", c);
3764 fprintf (f, "\n");
3768 /* Print ipa_tree_map data structures of all functions in the
3769 callgraph to F. */
3771 void
3772 ipa_print_all_params (FILE * f)
3774 struct cgraph_node *node;
3776 fprintf (f, "\nFunction parameters:\n");
3777 FOR_EACH_FUNCTION (node)
3778 ipa_print_node_params (f, node);
3781 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3783 vec<tree>
3784 ipa_get_vector_of_formal_parms (tree fndecl)
3786 vec<tree> args;
3787 int count;
3788 tree parm;
3790 gcc_assert (!flag_wpa);
3791 count = count_formal_params (fndecl);
3792 args.create (count);
3793 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3794 args.quick_push (parm);
3796 return args;
3799 /* Return a heap allocated vector containing types of formal parameters of
3800 function type FNTYPE. */
3802 vec<tree>
3803 ipa_get_vector_of_formal_parm_types (tree fntype)
3805 vec<tree> types;
3806 int count = 0;
3807 tree t;
3809 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3810 count++;
3812 types.create (count);
3813 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3814 types.quick_push (TREE_VALUE (t));
3816 return types;
3819 /* Modify the function declaration FNDECL and its type according to the plan in
3820 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3821 to reflect the actual parameters being modified which are determined by the
3822 base_index field. */
3824 void
3825 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3827 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3828 tree orig_type = TREE_TYPE (fndecl);
3829 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3831 /* The following test is an ugly hack, some functions simply don't have any
3832 arguments in their type. This is probably a bug but well... */
3833 bool care_for_types = (old_arg_types != NULL_TREE);
3834 bool last_parm_void;
3835 vec<tree> otypes;
3836 if (care_for_types)
3838 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3839 == void_type_node);
3840 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3841 if (last_parm_void)
3842 gcc_assert (oparms.length () + 1 == otypes.length ());
3843 else
3844 gcc_assert (oparms.length () == otypes.length ());
3846 else
3848 last_parm_void = false;
3849 otypes.create (0);
3852 int len = adjustments.length ();
3853 tree *link = &DECL_ARGUMENTS (fndecl);
3854 tree new_arg_types = NULL;
3855 for (int i = 0; i < len; i++)
3857 struct ipa_parm_adjustment *adj;
3858 gcc_assert (link);
3860 adj = &adjustments[i];
3861 tree parm;
3862 if (adj->op == IPA_PARM_OP_NEW)
3863 parm = NULL;
3864 else
3865 parm = oparms[adj->base_index];
3866 adj->base = parm;
3868 if (adj->op == IPA_PARM_OP_COPY)
3870 if (care_for_types)
3871 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3872 new_arg_types);
3873 *link = parm;
3874 link = &DECL_CHAIN (parm);
3876 else if (adj->op != IPA_PARM_OP_REMOVE)
3878 tree new_parm;
3879 tree ptype;
3881 if (adj->by_ref)
3882 ptype = build_pointer_type (adj->type);
3883 else
3885 ptype = adj->type;
3886 if (is_gimple_reg_type (ptype))
3888 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3889 if (TYPE_ALIGN (ptype) < malign)
3890 ptype = build_aligned_type (ptype, malign);
3894 if (care_for_types)
3895 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3897 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3898 ptype);
3899 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3900 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3901 DECL_ARTIFICIAL (new_parm) = 1;
3902 DECL_ARG_TYPE (new_parm) = ptype;
3903 DECL_CONTEXT (new_parm) = fndecl;
3904 TREE_USED (new_parm) = 1;
3905 DECL_IGNORED_P (new_parm) = 1;
3906 layout_decl (new_parm, 0);
3908 if (adj->op == IPA_PARM_OP_NEW)
3909 adj->base = NULL;
3910 else
3911 adj->base = parm;
3912 adj->new_decl = new_parm;
3914 *link = new_parm;
3915 link = &DECL_CHAIN (new_parm);
3919 *link = NULL_TREE;
3921 tree new_reversed = NULL;
3922 if (care_for_types)
3924 new_reversed = nreverse (new_arg_types);
3925 if (last_parm_void)
3927 if (new_reversed)
3928 TREE_CHAIN (new_arg_types) = void_list_node;
3929 else
3930 new_reversed = void_list_node;
3934 /* Use copy_node to preserve as much as possible from original type
3935 (debug info, attribute lists etc.)
3936 Exception is METHOD_TYPEs must have THIS argument.
3937 When we are asked to remove it, we need to build new FUNCTION_TYPE
3938 instead. */
3939 tree new_type = NULL;
3940 if (TREE_CODE (orig_type) != METHOD_TYPE
3941 || (adjustments[0].op == IPA_PARM_OP_COPY
3942 && adjustments[0].base_index == 0))
3944 new_type = build_distinct_type_copy (orig_type);
3945 TYPE_ARG_TYPES (new_type) = new_reversed;
3947 else
3949 new_type
3950 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3951 new_reversed));
3952 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3953 DECL_VINDEX (fndecl) = NULL_TREE;
3956 /* When signature changes, we need to clear builtin info. */
3957 if (DECL_BUILT_IN (fndecl))
3959 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3960 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3963 /* This is a new type, not a copy of an old type. Need to reassociate
3964 variants. We can handle everything except the main variant lazily. */
3965 tree t = TYPE_MAIN_VARIANT (orig_type);
3966 if (orig_type != t)
3968 TYPE_MAIN_VARIANT (new_type) = t;
3969 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
3970 TYPE_NEXT_VARIANT (t) = new_type;
3972 else
3974 TYPE_MAIN_VARIANT (new_type) = new_type;
3975 TYPE_NEXT_VARIANT (new_type) = NULL;
3978 TREE_TYPE (fndecl) = new_type;
3979 DECL_VIRTUAL_P (fndecl) = 0;
3980 DECL_LANG_SPECIFIC (fndecl) = NULL;
3981 otypes.release ();
3982 oparms.release ();
3985 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3986 If this is a directly recursive call, CS must be NULL. Otherwise it must
3987 contain the corresponding call graph edge. */
3989 void
3990 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3991 ipa_parm_adjustment_vec adjustments)
3993 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
3994 vec<tree> vargs;
3995 vec<tree, va_gc> **debug_args = NULL;
3996 gimple new_stmt;
3997 gimple_stmt_iterator gsi, prev_gsi;
3998 tree callee_decl;
3999 int i, len;
4001 len = adjustments.length ();
4002 vargs.create (len);
4003 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
4004 current_node->remove_stmt_references (stmt);
4006 gsi = gsi_for_stmt (stmt);
4007 prev_gsi = gsi;
4008 gsi_prev (&prev_gsi);
4009 for (i = 0; i < len; i++)
4011 struct ipa_parm_adjustment *adj;
4013 adj = &adjustments[i];
4015 if (adj->op == IPA_PARM_OP_COPY)
4017 tree arg = gimple_call_arg (stmt, adj->base_index);
4019 vargs.quick_push (arg);
4021 else if (adj->op != IPA_PARM_OP_REMOVE)
4023 tree expr, base, off;
4024 location_t loc;
4025 unsigned int deref_align = 0;
4026 bool deref_base = false;
4028 /* We create a new parameter out of the value of the old one, we can
4029 do the following kind of transformations:
4031 - A scalar passed by reference is converted to a scalar passed by
4032 value. (adj->by_ref is false and the type of the original
4033 actual argument is a pointer to a scalar).
4035 - A part of an aggregate is passed instead of the whole aggregate.
4036 The part can be passed either by value or by reference, this is
4037 determined by value of adj->by_ref. Moreover, the code below
4038 handles both situations when the original aggregate is passed by
4039 value (its type is not a pointer) and when it is passed by
4040 reference (it is a pointer to an aggregate).
4042 When the new argument is passed by reference (adj->by_ref is true)
4043 it must be a part of an aggregate and therefore we form it by
4044 simply taking the address of a reference inside the original
4045 aggregate. */
4047 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
4048 base = gimple_call_arg (stmt, adj->base_index);
4049 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
4050 : EXPR_LOCATION (base);
4052 if (TREE_CODE (base) != ADDR_EXPR
4053 && POINTER_TYPE_P (TREE_TYPE (base)))
4054 off = build_int_cst (adj->alias_ptr_type,
4055 adj->offset / BITS_PER_UNIT);
4056 else
4058 HOST_WIDE_INT base_offset;
4059 tree prev_base;
4060 bool addrof;
4062 if (TREE_CODE (base) == ADDR_EXPR)
4064 base = TREE_OPERAND (base, 0);
4065 addrof = true;
4067 else
4068 addrof = false;
4069 prev_base = base;
4070 base = get_addr_base_and_unit_offset (base, &base_offset);
4071 /* Aggregate arguments can have non-invariant addresses. */
4072 if (!base)
4074 base = build_fold_addr_expr (prev_base);
4075 off = build_int_cst (adj->alias_ptr_type,
4076 adj->offset / BITS_PER_UNIT);
4078 else if (TREE_CODE (base) == MEM_REF)
4080 if (!addrof)
4082 deref_base = true;
4083 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4085 off = build_int_cst (adj->alias_ptr_type,
4086 base_offset
4087 + adj->offset / BITS_PER_UNIT);
4088 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
4089 off);
4090 base = TREE_OPERAND (base, 0);
4092 else
4094 off = build_int_cst (adj->alias_ptr_type,
4095 base_offset
4096 + adj->offset / BITS_PER_UNIT);
4097 base = build_fold_addr_expr (base);
4101 if (!adj->by_ref)
4103 tree type = adj->type;
4104 unsigned int align;
4105 unsigned HOST_WIDE_INT misalign;
4107 if (deref_base)
4109 align = deref_align;
4110 misalign = 0;
4112 else
4114 get_pointer_alignment_1 (base, &align, &misalign);
4115 if (TYPE_ALIGN (type) > align)
4116 align = TYPE_ALIGN (type);
4118 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
4119 * BITS_PER_UNIT);
4120 misalign = misalign & (align - 1);
4121 if (misalign != 0)
4122 align = (misalign & -misalign);
4123 if (align < TYPE_ALIGN (type))
4124 type = build_aligned_type (type, align);
4125 base = force_gimple_operand_gsi (&gsi, base,
4126 true, NULL, true, GSI_SAME_STMT);
4127 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4128 /* If expr is not a valid gimple call argument emit
4129 a load into a temporary. */
4130 if (is_gimple_reg_type (TREE_TYPE (expr)))
4132 gimple tem = gimple_build_assign (NULL_TREE, expr);
4133 if (gimple_in_ssa_p (cfun))
4135 gimple_set_vuse (tem, gimple_vuse (stmt));
4136 expr = make_ssa_name (TREE_TYPE (expr), tem);
4138 else
4139 expr = create_tmp_reg (TREE_TYPE (expr), NULL);
4140 gimple_assign_set_lhs (tem, expr);
4141 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4144 else
4146 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4147 expr = build_fold_addr_expr (expr);
4148 expr = force_gimple_operand_gsi (&gsi, expr,
4149 true, NULL, true, GSI_SAME_STMT);
4151 vargs.quick_push (expr);
4153 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4155 unsigned int ix;
4156 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4157 gimple def_temp;
4159 arg = gimple_call_arg (stmt, adj->base_index);
4160 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4162 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4163 continue;
4164 arg = fold_convert_loc (gimple_location (stmt),
4165 TREE_TYPE (origin), arg);
4167 if (debug_args == NULL)
4168 debug_args = decl_debug_args_insert (callee_decl);
4169 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4170 if (ddecl == origin)
4172 ddecl = (**debug_args)[ix + 1];
4173 break;
4175 if (ddecl == NULL)
4177 ddecl = make_node (DEBUG_EXPR_DECL);
4178 DECL_ARTIFICIAL (ddecl) = 1;
4179 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4180 DECL_MODE (ddecl) = DECL_MODE (origin);
4182 vec_safe_push (*debug_args, origin);
4183 vec_safe_push (*debug_args, ddecl);
4185 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4186 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4190 if (dump_file && (dump_flags & TDF_DETAILS))
4192 fprintf (dump_file, "replacing stmt:");
4193 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4196 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4197 vargs.release ();
4198 if (gimple_call_lhs (stmt))
4199 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4201 gimple_set_block (new_stmt, gimple_block (stmt));
4202 if (gimple_has_location (stmt))
4203 gimple_set_location (new_stmt, gimple_location (stmt));
4204 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4205 gimple_call_copy_flags (new_stmt, stmt);
4206 if (gimple_in_ssa_p (cfun))
4208 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4209 if (gimple_vdef (stmt))
4211 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4212 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4216 if (dump_file && (dump_flags & TDF_DETAILS))
4218 fprintf (dump_file, "with stmt:");
4219 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4220 fprintf (dump_file, "\n");
4222 gsi_replace (&gsi, new_stmt, true);
4223 if (cs)
4224 cgraph_set_call_stmt (cs, new_stmt);
4227 current_node->record_stmt_references (gsi_stmt (gsi));
4228 gsi_prev (&gsi);
4230 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4233 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4234 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4235 specifies whether the function should care about type incompatibility the
4236 current and new expressions. If it is false, the function will leave
4237 incompatibility issues to the caller. Return true iff the expression
4238 was modified. */
4240 bool
4241 ipa_modify_expr (tree *expr, bool convert,
4242 ipa_parm_adjustment_vec adjustments)
4244 struct ipa_parm_adjustment *cand
4245 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4246 if (!cand)
4247 return false;
4249 tree src;
4250 if (cand->by_ref)
4251 src = build_simple_mem_ref (cand->new_decl);
4252 else
4253 src = cand->new_decl;
4255 if (dump_file && (dump_flags & TDF_DETAILS))
4257 fprintf (dump_file, "About to replace expr ");
4258 print_generic_expr (dump_file, *expr, 0);
4259 fprintf (dump_file, " with ");
4260 print_generic_expr (dump_file, src, 0);
4261 fprintf (dump_file, "\n");
4264 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4266 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4267 *expr = vce;
4269 else
4270 *expr = src;
4271 return true;
4274 /* If T is an SSA_NAME, return NULL if it is not a default def or
4275 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4276 the base variable is always returned, regardless if it is a default
4277 def. Return T if it is not an SSA_NAME. */
4279 static tree
4280 get_ssa_base_param (tree t, bool ignore_default_def)
4282 if (TREE_CODE (t) == SSA_NAME)
4284 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4285 return SSA_NAME_VAR (t);
4286 else
4287 return NULL_TREE;
4289 return t;
4292 /* Given an expression, return an adjustment entry specifying the
4293 transformation to be done on EXPR. If no suitable adjustment entry
4294 was found, returns NULL.
4296 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4297 default def, otherwise bail on them.
4299 If CONVERT is non-NULL, this function will set *CONVERT if the
4300 expression provided is a component reference. ADJUSTMENTS is the
4301 adjustments vector. */
4303 ipa_parm_adjustment *
4304 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4305 ipa_parm_adjustment_vec adjustments,
4306 bool ignore_default_def)
4308 if (TREE_CODE (**expr) == BIT_FIELD_REF
4309 || TREE_CODE (**expr) == IMAGPART_EXPR
4310 || TREE_CODE (**expr) == REALPART_EXPR)
4312 *expr = &TREE_OPERAND (**expr, 0);
4313 if (convert)
4314 *convert = true;
4317 HOST_WIDE_INT offset, size, max_size;
4318 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
4319 if (!base || size == -1 || max_size == -1)
4320 return NULL;
4322 if (TREE_CODE (base) == MEM_REF)
4324 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4325 base = TREE_OPERAND (base, 0);
4328 base = get_ssa_base_param (base, ignore_default_def);
4329 if (!base || TREE_CODE (base) != PARM_DECL)
4330 return NULL;
4332 struct ipa_parm_adjustment *cand = NULL;
4333 unsigned int len = adjustments.length ();
4334 for (unsigned i = 0; i < len; i++)
4336 struct ipa_parm_adjustment *adj = &adjustments[i];
4338 if (adj->base == base
4339 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4341 cand = adj;
4342 break;
4346 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4347 return NULL;
4348 return cand;
4351 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4353 static bool
4354 index_in_adjustments_multiple_times_p (int base_index,
4355 ipa_parm_adjustment_vec adjustments)
4357 int i, len = adjustments.length ();
4358 bool one = false;
4360 for (i = 0; i < len; i++)
4362 struct ipa_parm_adjustment *adj;
4363 adj = &adjustments[i];
4365 if (adj->base_index == base_index)
4367 if (one)
4368 return true;
4369 else
4370 one = true;
4373 return false;
4377 /* Return adjustments that should have the same effect on function parameters
4378 and call arguments as if they were first changed according to adjustments in
4379 INNER and then by adjustments in OUTER. */
4381 ipa_parm_adjustment_vec
4382 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4383 ipa_parm_adjustment_vec outer)
4385 int i, outlen = outer.length ();
4386 int inlen = inner.length ();
4387 int removals = 0;
4388 ipa_parm_adjustment_vec adjustments, tmp;
4390 tmp.create (inlen);
4391 for (i = 0; i < inlen; i++)
4393 struct ipa_parm_adjustment *n;
4394 n = &inner[i];
4396 if (n->op == IPA_PARM_OP_REMOVE)
4397 removals++;
4398 else
4400 /* FIXME: Handling of new arguments are not implemented yet. */
4401 gcc_assert (n->op != IPA_PARM_OP_NEW);
4402 tmp.quick_push (*n);
4406 adjustments.create (outlen + removals);
4407 for (i = 0; i < outlen; i++)
4409 struct ipa_parm_adjustment r;
4410 struct ipa_parm_adjustment *out = &outer[i];
4411 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4413 memset (&r, 0, sizeof (r));
4414 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4415 if (out->op == IPA_PARM_OP_REMOVE)
4417 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4419 r.op = IPA_PARM_OP_REMOVE;
4420 adjustments.quick_push (r);
4422 continue;
4424 else
4426 /* FIXME: Handling of new arguments are not implemented yet. */
4427 gcc_assert (out->op != IPA_PARM_OP_NEW);
4430 r.base_index = in->base_index;
4431 r.type = out->type;
4433 /* FIXME: Create nonlocal value too. */
4435 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4436 r.op = IPA_PARM_OP_COPY;
4437 else if (in->op == IPA_PARM_OP_COPY)
4438 r.offset = out->offset;
4439 else if (out->op == IPA_PARM_OP_COPY)
4440 r.offset = in->offset;
4441 else
4442 r.offset = in->offset + out->offset;
4443 adjustments.quick_push (r);
4446 for (i = 0; i < inlen; i++)
4448 struct ipa_parm_adjustment *n = &inner[i];
4450 if (n->op == IPA_PARM_OP_REMOVE)
4451 adjustments.quick_push (*n);
4454 tmp.release ();
4455 return adjustments;
4458 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4459 friendly way, assuming they are meant to be applied to FNDECL. */
4461 void
4462 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4463 tree fndecl)
4465 int i, len = adjustments.length ();
4466 bool first = true;
4467 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4469 fprintf (file, "IPA param adjustments: ");
4470 for (i = 0; i < len; i++)
4472 struct ipa_parm_adjustment *adj;
4473 adj = &adjustments[i];
4475 if (!first)
4476 fprintf (file, " ");
4477 else
4478 first = false;
4480 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4481 print_generic_expr (file, parms[adj->base_index], 0);
4482 if (adj->base)
4484 fprintf (file, ", base: ");
4485 print_generic_expr (file, adj->base, 0);
4487 if (adj->new_decl)
4489 fprintf (file, ", new_decl: ");
4490 print_generic_expr (file, adj->new_decl, 0);
4492 if (adj->new_ssa_base)
4494 fprintf (file, ", new_ssa_base: ");
4495 print_generic_expr (file, adj->new_ssa_base, 0);
4498 if (adj->op == IPA_PARM_OP_COPY)
4499 fprintf (file, ", copy_param");
4500 else if (adj->op == IPA_PARM_OP_REMOVE)
4501 fprintf (file, ", remove_param");
4502 else
4503 fprintf (file, ", offset %li", (long) adj->offset);
4504 if (adj->by_ref)
4505 fprintf (file, ", by_ref");
4506 print_node_brief (file, ", type: ", adj->type, 0);
4507 fprintf (file, "\n");
4509 parms.release ();
4512 /* Dump the AV linked list. */
4514 void
4515 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4517 bool comma = false;
4518 fprintf (f, " Aggregate replacements:");
4519 for (; av; av = av->next)
4521 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4522 av->index, av->offset);
4523 print_generic_expr (f, av->value, 0);
4524 comma = true;
4526 fprintf (f, "\n");
4529 /* Stream out jump function JUMP_FUNC to OB. */
4531 static void
4532 ipa_write_jump_function (struct output_block *ob,
4533 struct ipa_jump_func *jump_func)
4535 struct ipa_agg_jf_item *item;
4536 struct bitpack_d bp;
4537 int i, count;
4539 streamer_write_uhwi (ob, jump_func->type);
4540 switch (jump_func->type)
4542 case IPA_JF_UNKNOWN:
4543 break;
4544 case IPA_JF_KNOWN_TYPE:
4545 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
4546 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
4547 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
4548 break;
4549 case IPA_JF_CONST:
4550 gcc_assert (
4551 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4552 stream_write_tree (ob, jump_func->value.constant.value, true);
4553 break;
4554 case IPA_JF_PASS_THROUGH:
4555 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4556 if (jump_func->value.pass_through.operation == NOP_EXPR)
4558 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4559 bp = bitpack_create (ob->main_stream);
4560 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4561 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
4562 streamer_write_bitpack (&bp);
4564 else
4566 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4567 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4569 break;
4570 case IPA_JF_ANCESTOR:
4571 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4572 stream_write_tree (ob, jump_func->value.ancestor.type, true);
4573 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4574 bp = bitpack_create (ob->main_stream);
4575 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4576 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
4577 streamer_write_bitpack (&bp);
4578 break;
4581 count = vec_safe_length (jump_func->agg.items);
4582 streamer_write_uhwi (ob, count);
4583 if (count)
4585 bp = bitpack_create (ob->main_stream);
4586 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4587 streamer_write_bitpack (&bp);
4590 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4592 streamer_write_uhwi (ob, item->offset);
4593 stream_write_tree (ob, item->value, true);
4597 /* Read in jump function JUMP_FUNC from IB. */
4599 static void
4600 ipa_read_jump_function (struct lto_input_block *ib,
4601 struct ipa_jump_func *jump_func,
4602 struct cgraph_edge *cs,
4603 struct data_in *data_in)
4605 enum jump_func_type jftype;
4606 enum tree_code operation;
4607 int i, count;
4609 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4610 switch (jftype)
4612 case IPA_JF_UNKNOWN:
4613 jump_func->type = IPA_JF_UNKNOWN;
4614 break;
4615 case IPA_JF_KNOWN_TYPE:
4617 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4618 tree base_type = stream_read_tree (ib, data_in);
4619 tree component_type = stream_read_tree (ib, data_in);
4621 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
4622 break;
4624 case IPA_JF_CONST:
4625 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4626 break;
4627 case IPA_JF_PASS_THROUGH:
4628 operation = (enum tree_code) streamer_read_uhwi (ib);
4629 if (operation == NOP_EXPR)
4631 int formal_id = streamer_read_uhwi (ib);
4632 struct bitpack_d bp = streamer_read_bitpack (ib);
4633 bool agg_preserved = bp_unpack_value (&bp, 1);
4634 bool type_preserved = bp_unpack_value (&bp, 1);
4635 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4636 type_preserved);
4638 else
4640 tree operand = stream_read_tree (ib, data_in);
4641 int formal_id = streamer_read_uhwi (ib);
4642 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4643 operation);
4645 break;
4646 case IPA_JF_ANCESTOR:
4648 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4649 tree type = stream_read_tree (ib, data_in);
4650 int formal_id = streamer_read_uhwi (ib);
4651 struct bitpack_d bp = streamer_read_bitpack (ib);
4652 bool agg_preserved = bp_unpack_value (&bp, 1);
4653 bool type_preserved = bp_unpack_value (&bp, 1);
4655 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4656 type_preserved);
4657 break;
4661 count = streamer_read_uhwi (ib);
4662 vec_alloc (jump_func->agg.items, count);
4663 if (count)
4665 struct bitpack_d bp = streamer_read_bitpack (ib);
4666 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4668 for (i = 0; i < count; i++)
4670 struct ipa_agg_jf_item item;
4671 item.offset = streamer_read_uhwi (ib);
4672 item.value = stream_read_tree (ib, data_in);
4673 jump_func->agg.items->quick_push (item);
4677 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4678 relevant to indirect inlining to OB. */
4680 static void
4681 ipa_write_indirect_edge_info (struct output_block *ob,
4682 struct cgraph_edge *cs)
4684 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4685 struct bitpack_d bp;
4687 streamer_write_hwi (ob, ii->param_index);
4688 streamer_write_hwi (ob, ii->offset);
4689 bp = bitpack_create (ob->main_stream);
4690 bp_pack_value (&bp, ii->polymorphic, 1);
4691 bp_pack_value (&bp, ii->agg_contents, 1);
4692 bp_pack_value (&bp, ii->member_ptr, 1);
4693 bp_pack_value (&bp, ii->by_ref, 1);
4694 bp_pack_value (&bp, ii->maybe_in_construction, 1);
4695 bp_pack_value (&bp, ii->maybe_derived_type, 1);
4696 bp_pack_value (&bp, ii->speculative_maybe_derived_type, 1);
4697 streamer_write_bitpack (&bp);
4699 if (ii->polymorphic)
4701 streamer_write_hwi (ob, ii->otr_token);
4702 stream_write_tree (ob, ii->otr_type, true);
4703 stream_write_tree (ob, ii->outer_type, true);
4704 stream_write_tree (ob, ii->speculative_outer_type, true);
4705 if (ii->speculative_outer_type)
4706 streamer_write_hwi (ob, ii->speculative_offset);
4710 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4711 relevant to indirect inlining from IB. */
4713 static void
4714 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4715 struct data_in *data_in ATTRIBUTE_UNUSED,
4716 struct cgraph_edge *cs)
4718 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4719 struct bitpack_d bp;
4721 ii->param_index = (int) streamer_read_hwi (ib);
4722 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4723 bp = streamer_read_bitpack (ib);
4724 ii->polymorphic = bp_unpack_value (&bp, 1);
4725 ii->agg_contents = bp_unpack_value (&bp, 1);
4726 ii->member_ptr = bp_unpack_value (&bp, 1);
4727 ii->by_ref = bp_unpack_value (&bp, 1);
4728 ii->maybe_in_construction = bp_unpack_value (&bp, 1);
4729 ii->maybe_derived_type = bp_unpack_value (&bp, 1);
4730 ii->speculative_maybe_derived_type = bp_unpack_value (&bp, 1);
4731 if (ii->polymorphic)
4733 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4734 ii->otr_type = stream_read_tree (ib, data_in);
4735 ii->outer_type = stream_read_tree (ib, data_in);
4736 ii->speculative_outer_type = stream_read_tree (ib, data_in);
4737 if (ii->speculative_outer_type)
4738 ii->speculative_offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4742 /* Stream out NODE info to OB. */
4744 static void
4745 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4747 int node_ref;
4748 lto_symtab_encoder_t encoder;
4749 struct ipa_node_params *info = IPA_NODE_REF (node);
4750 int j;
4751 struct cgraph_edge *e;
4752 struct bitpack_d bp;
4754 encoder = ob->decl_state->symtab_node_encoder;
4755 node_ref = lto_symtab_encoder_encode (encoder, node);
4756 streamer_write_uhwi (ob, node_ref);
4758 streamer_write_uhwi (ob, ipa_get_param_count (info));
4759 for (j = 0; j < ipa_get_param_count (info); j++)
4760 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4761 bp = bitpack_create (ob->main_stream);
4762 gcc_assert (info->analysis_done
4763 || ipa_get_param_count (info) == 0);
4764 gcc_assert (!info->node_enqueued);
4765 gcc_assert (!info->ipcp_orig_node);
4766 for (j = 0; j < ipa_get_param_count (info); j++)
4767 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4768 streamer_write_bitpack (&bp);
4769 for (j = 0; j < ipa_get_param_count (info); j++)
4770 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4771 for (e = node->callees; e; e = e->next_callee)
4773 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4775 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4776 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4777 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4779 for (e = node->indirect_calls; e; e = e->next_callee)
4781 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4783 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4784 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4785 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4786 ipa_write_indirect_edge_info (ob, e);
4790 /* Stream in NODE info from IB. */
4792 static void
4793 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4794 struct data_in *data_in)
4796 struct ipa_node_params *info = IPA_NODE_REF (node);
4797 int k;
4798 struct cgraph_edge *e;
4799 struct bitpack_d bp;
4801 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4803 for (k = 0; k < ipa_get_param_count (info); k++)
4804 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4806 bp = streamer_read_bitpack (ib);
4807 if (ipa_get_param_count (info) != 0)
4808 info->analysis_done = true;
4809 info->node_enqueued = false;
4810 for (k = 0; k < ipa_get_param_count (info); k++)
4811 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4812 for (k = 0; k < ipa_get_param_count (info); k++)
4813 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4814 for (e = node->callees; e; e = e->next_callee)
4816 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4817 int count = streamer_read_uhwi (ib);
4819 if (!count)
4820 continue;
4821 vec_safe_grow_cleared (args->jump_functions, count);
4823 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4824 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4825 data_in);
4827 for (e = node->indirect_calls; e; e = e->next_callee)
4829 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4830 int count = streamer_read_uhwi (ib);
4832 if (count)
4834 vec_safe_grow_cleared (args->jump_functions, count);
4835 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4836 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4837 data_in);
4839 ipa_read_indirect_edge_info (ib, data_in, e);
4843 /* Write jump functions for nodes in SET. */
4845 void
4846 ipa_prop_write_jump_functions (void)
4848 struct cgraph_node *node;
4849 struct output_block *ob;
4850 unsigned int count = 0;
4851 lto_symtab_encoder_iterator lsei;
4852 lto_symtab_encoder_t encoder;
4855 if (!ipa_node_params_vector.exists ())
4856 return;
4858 ob = create_output_block (LTO_section_jump_functions);
4859 encoder = ob->decl_state->symtab_node_encoder;
4860 ob->symbol = NULL;
4861 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4862 lsei_next_function_in_partition (&lsei))
4864 node = lsei_cgraph_node (lsei);
4865 if (node->has_gimple_body_p ()
4866 && IPA_NODE_REF (node) != NULL)
4867 count++;
4870 streamer_write_uhwi (ob, count);
4872 /* Process all of the functions. */
4873 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4874 lsei_next_function_in_partition (&lsei))
4876 node = lsei_cgraph_node (lsei);
4877 if (node->has_gimple_body_p ()
4878 && IPA_NODE_REF (node) != NULL)
4879 ipa_write_node_info (ob, node);
4881 streamer_write_char_stream (ob->main_stream, 0);
4882 produce_asm (ob, NULL);
4883 destroy_output_block (ob);
4886 /* Read section in file FILE_DATA of length LEN with data DATA. */
4888 static void
4889 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4890 size_t len)
4892 const struct lto_function_header *header =
4893 (const struct lto_function_header *) data;
4894 const int cfg_offset = sizeof (struct lto_function_header);
4895 const int main_offset = cfg_offset + header->cfg_size;
4896 const int string_offset = main_offset + header->main_size;
4897 struct data_in *data_in;
4898 struct lto_input_block ib_main;
4899 unsigned int i;
4900 unsigned int count;
4902 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4903 header->main_size);
4905 data_in =
4906 lto_data_in_create (file_data, (const char *) data + string_offset,
4907 header->string_size, vNULL);
4908 count = streamer_read_uhwi (&ib_main);
4910 for (i = 0; i < count; i++)
4912 unsigned int index;
4913 struct cgraph_node *node;
4914 lto_symtab_encoder_t encoder;
4916 index = streamer_read_uhwi (&ib_main);
4917 encoder = file_data->symtab_node_encoder;
4918 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4919 index));
4920 gcc_assert (node->definition);
4921 ipa_read_node_info (&ib_main, node, data_in);
4923 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4924 len);
4925 lto_data_in_delete (data_in);
4928 /* Read ipcp jump functions. */
4930 void
4931 ipa_prop_read_jump_functions (void)
4933 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4934 struct lto_file_decl_data *file_data;
4935 unsigned int j = 0;
4937 ipa_check_create_node_params ();
4938 ipa_check_create_edge_args ();
4939 ipa_register_cgraph_hooks ();
4941 while ((file_data = file_data_vec[j++]))
4943 size_t len;
4944 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4946 if (data)
4947 ipa_prop_read_section (file_data, data, len);
4951 /* After merging units, we can get mismatch in argument counts.
4952 Also decl merging might've rendered parameter lists obsolete.
4953 Also compute called_with_variable_arg info. */
4955 void
4956 ipa_update_after_lto_read (void)
4958 ipa_check_create_node_params ();
4959 ipa_check_create_edge_args ();
4962 void
4963 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4965 int node_ref;
4966 unsigned int count = 0;
4967 lto_symtab_encoder_t encoder;
4968 struct ipa_agg_replacement_value *aggvals, *av;
4970 aggvals = ipa_get_agg_replacements_for_node (node);
4971 encoder = ob->decl_state->symtab_node_encoder;
4972 node_ref = lto_symtab_encoder_encode (encoder, node);
4973 streamer_write_uhwi (ob, node_ref);
4975 for (av = aggvals; av; av = av->next)
4976 count++;
4977 streamer_write_uhwi (ob, count);
4979 for (av = aggvals; av; av = av->next)
4981 struct bitpack_d bp;
4983 streamer_write_uhwi (ob, av->offset);
4984 streamer_write_uhwi (ob, av->index);
4985 stream_write_tree (ob, av->value, true);
4987 bp = bitpack_create (ob->main_stream);
4988 bp_pack_value (&bp, av->by_ref, 1);
4989 streamer_write_bitpack (&bp);
4993 /* Stream in the aggregate value replacement chain for NODE from IB. */
4995 static void
4996 read_agg_replacement_chain (struct lto_input_block *ib,
4997 struct cgraph_node *node,
4998 struct data_in *data_in)
5000 struct ipa_agg_replacement_value *aggvals = NULL;
5001 unsigned int count, i;
5003 count = streamer_read_uhwi (ib);
5004 for (i = 0; i <count; i++)
5006 struct ipa_agg_replacement_value *av;
5007 struct bitpack_d bp;
5009 av = ggc_alloc<ipa_agg_replacement_value> ();
5010 av->offset = streamer_read_uhwi (ib);
5011 av->index = streamer_read_uhwi (ib);
5012 av->value = stream_read_tree (ib, data_in);
5013 bp = streamer_read_bitpack (ib);
5014 av->by_ref = bp_unpack_value (&bp, 1);
5015 av->next = aggvals;
5016 aggvals = av;
5018 ipa_set_node_agg_value_chain (node, aggvals);
5021 /* Write all aggregate replacement for nodes in set. */
5023 void
5024 ipa_prop_write_all_agg_replacement (void)
5026 struct cgraph_node *node;
5027 struct output_block *ob;
5028 unsigned int count = 0;
5029 lto_symtab_encoder_iterator lsei;
5030 lto_symtab_encoder_t encoder;
5032 if (!ipa_node_agg_replacements)
5033 return;
5035 ob = create_output_block (LTO_section_ipcp_transform);
5036 encoder = ob->decl_state->symtab_node_encoder;
5037 ob->symbol = NULL;
5038 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5039 lsei_next_function_in_partition (&lsei))
5041 node = lsei_cgraph_node (lsei);
5042 if (node->has_gimple_body_p ()
5043 && ipa_get_agg_replacements_for_node (node) != NULL)
5044 count++;
5047 streamer_write_uhwi (ob, count);
5049 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5050 lsei_next_function_in_partition (&lsei))
5052 node = lsei_cgraph_node (lsei);
5053 if (node->has_gimple_body_p ()
5054 && ipa_get_agg_replacements_for_node (node) != NULL)
5055 write_agg_replacement_chain (ob, node);
5057 streamer_write_char_stream (ob->main_stream, 0);
5058 produce_asm (ob, NULL);
5059 destroy_output_block (ob);
5062 /* Read replacements section in file FILE_DATA of length LEN with data
5063 DATA. */
5065 static void
5066 read_replacements_section (struct lto_file_decl_data *file_data,
5067 const char *data,
5068 size_t len)
5070 const struct lto_function_header *header =
5071 (const struct lto_function_header *) data;
5072 const int cfg_offset = sizeof (struct lto_function_header);
5073 const int main_offset = cfg_offset + header->cfg_size;
5074 const int string_offset = main_offset + header->main_size;
5075 struct data_in *data_in;
5076 struct lto_input_block ib_main;
5077 unsigned int i;
5078 unsigned int count;
5080 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
5081 header->main_size);
5083 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5084 header->string_size, vNULL);
5085 count = streamer_read_uhwi (&ib_main);
5087 for (i = 0; i < count; i++)
5089 unsigned int index;
5090 struct cgraph_node *node;
5091 lto_symtab_encoder_t encoder;
5093 index = streamer_read_uhwi (&ib_main);
5094 encoder = file_data->symtab_node_encoder;
5095 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5096 index));
5097 gcc_assert (node->definition);
5098 read_agg_replacement_chain (&ib_main, node, data_in);
5100 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5101 len);
5102 lto_data_in_delete (data_in);
5105 /* Read IPA-CP aggregate replacements. */
5107 void
5108 ipa_prop_read_all_agg_replacement (void)
5110 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5111 struct lto_file_decl_data *file_data;
5112 unsigned int j = 0;
5114 while ((file_data = file_data_vec[j++]))
5116 size_t len;
5117 const char *data = lto_get_section_data (file_data,
5118 LTO_section_ipcp_transform,
5119 NULL, &len);
5120 if (data)
5121 read_replacements_section (file_data, data, len);
5125 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5126 NODE. */
5128 static void
5129 adjust_agg_replacement_values (struct cgraph_node *node,
5130 struct ipa_agg_replacement_value *aggval)
5132 struct ipa_agg_replacement_value *v;
5133 int i, c = 0, d = 0, *adj;
5135 if (!node->clone.combined_args_to_skip)
5136 return;
5138 for (v = aggval; v; v = v->next)
5140 gcc_assert (v->index >= 0);
5141 if (c < v->index)
5142 c = v->index;
5144 c++;
5146 adj = XALLOCAVEC (int, c);
5147 for (i = 0; i < c; i++)
5148 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5150 adj[i] = -1;
5151 d++;
5153 else
5154 adj[i] = i - d;
5156 for (v = aggval; v; v = v->next)
5157 v->index = adj[v->index];
5160 /* Dominator walker driving the ipcp modification phase. */
5162 class ipcp_modif_dom_walker : public dom_walker
5164 public:
5165 ipcp_modif_dom_walker (struct func_body_info *fbi,
5166 vec<ipa_param_descriptor> descs,
5167 struct ipa_agg_replacement_value *av,
5168 bool *sc, bool *cc)
5169 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5170 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5172 virtual void before_dom_children (basic_block);
5174 private:
5175 struct func_body_info *m_fbi;
5176 vec<ipa_param_descriptor> m_descriptors;
5177 struct ipa_agg_replacement_value *m_aggval;
5178 bool *m_something_changed, *m_cfg_changed;
5181 void
5182 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5184 gimple_stmt_iterator gsi;
5185 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5187 struct ipa_agg_replacement_value *v;
5188 gimple stmt = gsi_stmt (gsi);
5189 tree rhs, val, t;
5190 HOST_WIDE_INT offset, size;
5191 int index;
5192 bool by_ref, vce;
5194 if (!gimple_assign_load_p (stmt))
5195 continue;
5196 rhs = gimple_assign_rhs1 (stmt);
5197 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5198 continue;
5200 vce = false;
5201 t = rhs;
5202 while (handled_component_p (t))
5204 /* V_C_E can do things like convert an array of integers to one
5205 bigger integer and similar things we do not handle below. */
5206 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5208 vce = true;
5209 break;
5211 t = TREE_OPERAND (t, 0);
5213 if (vce)
5214 continue;
5216 if (!ipa_load_from_parm_agg_1 (m_fbi, m_descriptors, stmt, rhs, &index,
5217 &offset, &size, &by_ref))
5218 continue;
5219 for (v = m_aggval; v; v = v->next)
5220 if (v->index == index
5221 && v->offset == offset)
5222 break;
5223 if (!v
5224 || v->by_ref != by_ref
5225 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5226 continue;
5228 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5229 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5231 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5232 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5233 else if (TYPE_SIZE (TREE_TYPE (rhs))
5234 == TYPE_SIZE (TREE_TYPE (v->value)))
5235 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5236 else
5238 if (dump_file)
5240 fprintf (dump_file, " const ");
5241 print_generic_expr (dump_file, v->value, 0);
5242 fprintf (dump_file, " can't be converted to type of ");
5243 print_generic_expr (dump_file, rhs, 0);
5244 fprintf (dump_file, "\n");
5246 continue;
5249 else
5250 val = v->value;
5252 if (dump_file && (dump_flags & TDF_DETAILS))
5254 fprintf (dump_file, "Modifying stmt:\n ");
5255 print_gimple_stmt (dump_file, stmt, 0, 0);
5257 gimple_assign_set_rhs_from_tree (&gsi, val);
5258 update_stmt (stmt);
5260 if (dump_file && (dump_flags & TDF_DETAILS))
5262 fprintf (dump_file, "into:\n ");
5263 print_gimple_stmt (dump_file, stmt, 0, 0);
5264 fprintf (dump_file, "\n");
5267 *m_something_changed = true;
5268 if (maybe_clean_eh_stmt (stmt)
5269 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5270 *m_cfg_changed = true;
5275 /* IPCP transformation phase doing propagation of aggregate values. */
5277 unsigned int
5278 ipcp_transform_function (struct cgraph_node *node)
5280 vec<ipa_param_descriptor> descriptors = vNULL;
5281 struct func_body_info fbi;
5282 struct ipa_agg_replacement_value *aggval;
5283 int param_count;
5284 bool cfg_changed = false, something_changed = false;
5286 gcc_checking_assert (cfun);
5287 gcc_checking_assert (current_function_decl);
5289 if (dump_file)
5290 fprintf (dump_file, "Modification phase of node %s/%i\n",
5291 node->name (), node->order);
5293 aggval = ipa_get_agg_replacements_for_node (node);
5294 if (!aggval)
5295 return 0;
5296 param_count = count_formal_params (node->decl);
5297 if (param_count == 0)
5298 return 0;
5299 adjust_agg_replacement_values (node, aggval);
5300 if (dump_file)
5301 ipa_dump_agg_replacement_values (dump_file, aggval);
5303 fbi.node = node;
5304 fbi.info = NULL;
5305 fbi.bb_infos = vNULL;
5306 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5307 fbi.param_count = param_count;
5308 fbi.aa_walked = 0;
5310 descriptors.safe_grow_cleared (param_count);
5311 ipa_populate_param_decls (node, descriptors);
5312 calculate_dominance_info (CDI_DOMINATORS);
5313 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5314 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5316 int i;
5317 struct ipa_bb_info *bi;
5318 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5319 free_ipa_bb_info (bi);
5320 fbi.bb_infos.release ();
5321 free_dominance_info (CDI_DOMINATORS);
5322 (*ipa_node_agg_replacements)[node->uid] = NULL;
5323 descriptors.release ();
5325 if (!something_changed)
5326 return 0;
5327 else if (cfg_changed)
5328 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5329 else
5330 return TODO_update_ssa_only_virtuals;