Also turn off OPTION_MASK_ABI_X32 for -m16
[official-gcc.git] / gcc / ipa-prop.c
blobbbb417dead5629949eeae77fbd55f131e7363254
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "basic-block.h"
25 #include "tree-ssa-alias.h"
26 #include "internal-fn.h"
27 #include "gimple-fold.h"
28 #include "tree-eh.h"
29 #include "gimple-expr.h"
30 #include "is-a.h"
31 #include "gimple.h"
32 #include "expr.h"
33 #include "stor-layout.h"
34 #include "print-tree.h"
35 #include "gimplify.h"
36 #include "gimple-iterator.h"
37 #include "gimplify-me.h"
38 #include "gimple-walk.h"
39 #include "langhooks.h"
40 #include "target.h"
41 #include "ipa-prop.h"
42 #include "bitmap.h"
43 #include "gimple-ssa.h"
44 #include "tree-cfg.h"
45 #include "tree-phinodes.h"
46 #include "ssa-iterators.h"
47 #include "tree-into-ssa.h"
48 #include "tree-dfa.h"
49 #include "tree-pass.h"
50 #include "tree-inline.h"
51 #include "ipa-inline.h"
52 #include "flags.h"
53 #include "diagnostic.h"
54 #include "gimple-pretty-print.h"
55 #include "lto-streamer.h"
56 #include "data-streamer.h"
57 #include "tree-streamer.h"
58 #include "params.h"
59 #include "ipa-utils.h"
60 #include "stringpool.h"
61 #include "tree-ssanames.h"
62 #include "dbgcnt.h"
63 #include "domwalk.h"
64 #include "builtins.h"
65 #include "calls.h"
67 /* Intermediate information that we get from alias analysis about a particular
68 parameter in a particular basic_block. When a parameter or the memory it
69 references is marked modified, we use that information in all dominatd
70 blocks without cosulting alias analysis oracle. */
72 struct param_aa_status
74 /* Set when this structure contains meaningful information. If not, the
75 structure describing a dominating BB should be used instead. */
76 bool valid;
78 /* Whether we have seen something which might have modified the data in
79 question. PARM is for the parameter itself, REF is for data it points to
80 but using the alias type of individual accesses and PT is the same thing
81 but for computing aggregate pass-through functions using a very inclusive
82 ao_ref. */
83 bool parm_modified, ref_modified, pt_modified;
86 /* Information related to a given BB that used only when looking at function
87 body. */
89 struct ipa_bb_info
91 /* Call graph edges going out of this BB. */
92 vec<cgraph_edge *> cg_edges;
93 /* Alias analysis statuses of each formal parameter at this bb. */
94 vec<param_aa_status> param_aa_statuses;
97 /* Structure with global information that is only used when looking at function
98 body. */
100 struct func_body_info
102 /* The node that is being analyzed. */
103 cgraph_node *node;
105 /* Its info. */
106 struct ipa_node_params *info;
108 /* Information about individual BBs. */
109 vec<ipa_bb_info> bb_infos;
111 /* Number of parameters. */
112 int param_count;
114 /* Number of statements already walked by when analyzing this function. */
115 unsigned int aa_walked;
118 /* Vector where the parameter infos are actually stored. */
119 vec<ipa_node_params> ipa_node_params_vector;
120 /* Vector of known aggregate values in cloned nodes. */
121 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
122 /* Vector where the parameter infos are actually stored. */
123 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
125 /* Holders of ipa cgraph hooks: */
126 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
127 static struct cgraph_node_hook_list *node_removal_hook_holder;
128 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
129 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
130 static struct cgraph_node_hook_list *function_insertion_hook_holder;
132 /* Description of a reference to an IPA constant. */
133 struct ipa_cst_ref_desc
135 /* Edge that corresponds to the statement which took the reference. */
136 struct cgraph_edge *cs;
137 /* Linked list of duplicates created when call graph edges are cloned. */
138 struct ipa_cst_ref_desc *next_duplicate;
139 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
140 if out of control. */
141 int refcount;
144 /* Allocation pool for reference descriptions. */
146 static alloc_pool ipa_refdesc_pool;
148 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
149 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
151 static bool
152 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
154 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
155 struct cl_optimization *os;
157 if (!fs_opts)
158 return false;
159 os = TREE_OPTIMIZATION (fs_opts);
160 return !os->x_optimize || !os->x_flag_ipa_cp;
163 /* Return index of the formal whose tree is PTREE in function which corresponds
164 to INFO. */
166 static int
167 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
169 int i, count;
171 count = descriptors.length ();
172 for (i = 0; i < count; i++)
173 if (descriptors[i].decl == ptree)
174 return i;
176 return -1;
179 /* Return index of the formal whose tree is PTREE in function which corresponds
180 to INFO. */
183 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
185 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
188 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
189 NODE. */
191 static void
192 ipa_populate_param_decls (struct cgraph_node *node,
193 vec<ipa_param_descriptor> &descriptors)
195 tree fndecl;
196 tree fnargs;
197 tree parm;
198 int param_num;
200 fndecl = node->decl;
201 gcc_assert (gimple_has_body_p (fndecl));
202 fnargs = DECL_ARGUMENTS (fndecl);
203 param_num = 0;
204 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
206 descriptors[param_num].decl = parm;
207 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
208 true);
209 param_num++;
213 /* Return how many formal parameters FNDECL has. */
216 count_formal_params (tree fndecl)
218 tree parm;
219 int count = 0;
220 gcc_assert (gimple_has_body_p (fndecl));
222 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
223 count++;
225 return count;
228 /* Return the declaration of Ith formal parameter of the function corresponding
229 to INFO. Note there is no setter function as this array is built just once
230 using ipa_initialize_node_params. */
232 void
233 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
235 fprintf (file, "param #%i", i);
236 if (info->descriptors[i].decl)
238 fprintf (file, " ");
239 print_generic_expr (file, info->descriptors[i].decl, 0);
243 /* Initialize the ipa_node_params structure associated with NODE
244 to hold PARAM_COUNT parameters. */
246 void
247 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
249 struct ipa_node_params *info = IPA_NODE_REF (node);
251 if (!info->descriptors.exists () && param_count)
252 info->descriptors.safe_grow_cleared (param_count);
255 /* Initialize the ipa_node_params structure associated with NODE by counting
256 the function parameters, creating the descriptors and populating their
257 param_decls. */
259 void
260 ipa_initialize_node_params (struct cgraph_node *node)
262 struct ipa_node_params *info = IPA_NODE_REF (node);
264 if (!info->descriptors.exists ())
266 ipa_alloc_node_params (node, count_formal_params (node->decl));
267 ipa_populate_param_decls (node, info->descriptors);
271 /* Print the jump functions associated with call graph edge CS to file F. */
273 static void
274 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
276 int i, count;
278 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
279 for (i = 0; i < count; i++)
281 struct ipa_jump_func *jump_func;
282 enum jump_func_type type;
284 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
285 type = jump_func->type;
287 fprintf (f, " param %d: ", i);
288 if (type == IPA_JF_UNKNOWN)
289 fprintf (f, "UNKNOWN\n");
290 else if (type == IPA_JF_KNOWN_TYPE)
292 fprintf (f, "KNOWN TYPE: base ");
293 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
294 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
295 jump_func->value.known_type.offset);
296 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
297 fprintf (f, "\n");
299 else if (type == IPA_JF_CONST)
301 tree val = jump_func->value.constant.value;
302 fprintf (f, "CONST: ");
303 print_generic_expr (f, val, 0);
304 if (TREE_CODE (val) == ADDR_EXPR
305 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
307 fprintf (f, " -> ");
308 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
311 fprintf (f, "\n");
313 else if (type == IPA_JF_PASS_THROUGH)
315 fprintf (f, "PASS THROUGH: ");
316 fprintf (f, "%d, op %s",
317 jump_func->value.pass_through.formal_id,
318 get_tree_code_name(jump_func->value.pass_through.operation));
319 if (jump_func->value.pass_through.operation != NOP_EXPR)
321 fprintf (f, " ");
322 print_generic_expr (f,
323 jump_func->value.pass_through.operand, 0);
325 if (jump_func->value.pass_through.agg_preserved)
326 fprintf (f, ", agg_preserved");
327 if (jump_func->value.pass_through.type_preserved)
328 fprintf (f, ", type_preserved");
329 fprintf (f, "\n");
331 else if (type == IPA_JF_ANCESTOR)
333 fprintf (f, "ANCESTOR: ");
334 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
335 jump_func->value.ancestor.formal_id,
336 jump_func->value.ancestor.offset);
337 print_generic_expr (f, jump_func->value.ancestor.type, 0);
338 if (jump_func->value.ancestor.agg_preserved)
339 fprintf (f, ", agg_preserved");
340 if (jump_func->value.ancestor.type_preserved)
341 fprintf (f, ", type_preserved");
342 fprintf (f, "\n");
345 if (jump_func->agg.items)
347 struct ipa_agg_jf_item *item;
348 int j;
350 fprintf (f, " Aggregate passed by %s:\n",
351 jump_func->agg.by_ref ? "reference" : "value");
352 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
354 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
355 item->offset);
356 if (TYPE_P (item->value))
357 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
358 tree_to_uhwi (TYPE_SIZE (item->value)));
359 else
361 fprintf (f, "cst: ");
362 print_generic_expr (f, item->value, 0);
364 fprintf (f, "\n");
371 /* Print the jump functions of all arguments on all call graph edges going from
372 NODE to file F. */
374 void
375 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
377 struct cgraph_edge *cs;
379 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
380 node->order);
381 for (cs = node->callees; cs; cs = cs->next_callee)
383 if (!ipa_edge_args_info_available_for_edge_p (cs))
384 continue;
386 fprintf (f, " callsite %s/%i -> %s/%i : \n",
387 xstrdup (node->name ()), node->order,
388 xstrdup (cs->callee->name ()),
389 cs->callee->order);
390 ipa_print_node_jump_functions_for_edge (f, cs);
393 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
395 struct cgraph_indirect_call_info *ii;
396 if (!ipa_edge_args_info_available_for_edge_p (cs))
397 continue;
399 ii = cs->indirect_info;
400 if (ii->agg_contents)
401 fprintf (f, " indirect %s callsite, calling param %i, "
402 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
403 ii->member_ptr ? "member ptr" : "aggregate",
404 ii->param_index, ii->offset,
405 ii->by_ref ? "by reference" : "by_value");
406 else
407 fprintf (f, " indirect %s callsite, calling param %i, "
408 "offset " HOST_WIDE_INT_PRINT_DEC,
409 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
410 ii->offset);
412 if (cs->call_stmt)
414 fprintf (f, ", for stmt ");
415 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
417 else
418 fprintf (f, "\n");
419 ipa_print_node_jump_functions_for_edge (f, cs);
423 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
425 void
426 ipa_print_all_jump_functions (FILE *f)
428 struct cgraph_node *node;
430 fprintf (f, "\nJump functions:\n");
431 FOR_EACH_FUNCTION (node)
433 ipa_print_node_jump_functions (f, node);
437 /* Set JFUNC to be a known type jump function. */
439 static void
440 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
441 tree base_type, tree component_type)
443 /* Recording and propagating main variants increases change that types
444 will match. */
445 base_type = TYPE_MAIN_VARIANT (base_type);
446 component_type = TYPE_MAIN_VARIANT (component_type);
448 gcc_assert (contains_polymorphic_type_p (base_type)
449 && contains_polymorphic_type_p (component_type));
450 if (!flag_devirtualize)
451 return;
452 jfunc->type = IPA_JF_KNOWN_TYPE;
453 jfunc->value.known_type.offset = offset,
454 jfunc->value.known_type.base_type = base_type;
455 jfunc->value.known_type.component_type = component_type;
456 gcc_assert (component_type);
459 /* Set JFUNC to be a copy of another jmp (to be used by jump function
460 combination code). The two functions will share their rdesc. */
462 static void
463 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
464 struct ipa_jump_func *src)
467 gcc_checking_assert (src->type == IPA_JF_CONST);
468 dst->type = IPA_JF_CONST;
469 dst->value.constant = src->value.constant;
472 /* Set JFUNC to be a constant jmp function. */
474 static void
475 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
476 struct cgraph_edge *cs)
478 constant = unshare_expr (constant);
479 if (constant && EXPR_P (constant))
480 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
481 jfunc->type = IPA_JF_CONST;
482 jfunc->value.constant.value = unshare_expr_without_location (constant);
484 if (TREE_CODE (constant) == ADDR_EXPR
485 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
487 struct ipa_cst_ref_desc *rdesc;
488 if (!ipa_refdesc_pool)
489 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
490 sizeof (struct ipa_cst_ref_desc), 32);
492 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
493 rdesc->cs = cs;
494 rdesc->next_duplicate = NULL;
495 rdesc->refcount = 1;
496 jfunc->value.constant.rdesc = rdesc;
498 else
499 jfunc->value.constant.rdesc = NULL;
502 /* Set JFUNC to be a simple pass-through jump function. */
503 static void
504 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
505 bool agg_preserved, bool type_preserved)
507 jfunc->type = IPA_JF_PASS_THROUGH;
508 jfunc->value.pass_through.operand = NULL_TREE;
509 jfunc->value.pass_through.formal_id = formal_id;
510 jfunc->value.pass_through.operation = NOP_EXPR;
511 jfunc->value.pass_through.agg_preserved = agg_preserved;
512 jfunc->value.pass_through.type_preserved = type_preserved;
515 /* Set JFUNC to be an arithmetic pass through jump function. */
517 static void
518 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
519 tree operand, enum tree_code operation)
521 jfunc->type = IPA_JF_PASS_THROUGH;
522 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
523 jfunc->value.pass_through.formal_id = formal_id;
524 jfunc->value.pass_through.operation = operation;
525 jfunc->value.pass_through.agg_preserved = false;
526 jfunc->value.pass_through.type_preserved = false;
529 /* Set JFUNC to be an ancestor jump function. */
531 static void
532 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
533 tree type, int formal_id, bool agg_preserved,
534 bool type_preserved)
536 if (!flag_devirtualize)
537 type_preserved = false;
538 if (!type_preserved)
539 type = NULL_TREE;
540 if (type)
541 type = TYPE_MAIN_VARIANT (type);
542 gcc_assert (!type_preserved || contains_polymorphic_type_p (type));
543 jfunc->type = IPA_JF_ANCESTOR;
544 jfunc->value.ancestor.formal_id = formal_id;
545 jfunc->value.ancestor.offset = offset;
546 jfunc->value.ancestor.type = type_preserved ? type : NULL;
547 jfunc->value.ancestor.agg_preserved = agg_preserved;
548 jfunc->value.ancestor.type_preserved = type_preserved;
551 /* Extract the acual BINFO being described by JFUNC which must be a known type
552 jump function. */
554 tree
555 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
557 if (!RECORD_OR_UNION_TYPE_P (jfunc->value.known_type.base_type))
558 return NULL_TREE;
560 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
562 if (!base_binfo)
563 return NULL_TREE;
564 /* FIXME: At LTO we can't propagate to non-polymorphic type, because
565 we have no ODR equivalency on those. This should be fixed by
566 propagating on types rather than binfos that would make type
567 matching here unnecesary. */
568 if (in_lto_p
569 && (TREE_CODE (jfunc->value.known_type.component_type) != RECORD_TYPE
570 || !TYPE_BINFO (jfunc->value.known_type.component_type)
571 || !BINFO_VTABLE (TYPE_BINFO (jfunc->value.known_type.component_type))))
573 if (!jfunc->value.known_type.offset)
574 return base_binfo;
575 return NULL;
577 return get_binfo_at_offset (base_binfo,
578 jfunc->value.known_type.offset,
579 jfunc->value.known_type.component_type);
582 /* Get IPA BB information about the given BB. FBI is the context of analyzis
583 of this function body. */
585 static struct ipa_bb_info *
586 ipa_get_bb_info (struct func_body_info *fbi, basic_block bb)
588 gcc_checking_assert (fbi);
589 return &fbi->bb_infos[bb->index];
592 /* Structure to be passed in between detect_type_change and
593 check_stmt_for_type_change. */
595 struct type_change_info
597 /* Offset into the object where there is the virtual method pointer we are
598 looking for. */
599 HOST_WIDE_INT offset;
600 /* The declaration or SSA_NAME pointer of the base that we are checking for
601 type change. */
602 tree object;
603 /* If we actually can tell the type that the object has changed to, it is
604 stored in this field. Otherwise it remains NULL_TREE. */
605 tree known_current_type;
606 /* Set to true if dynamic type change has been detected. */
607 bool type_maybe_changed;
608 /* Set to true if multiple types have been encountered. known_current_type
609 must be disregarded in that case. */
610 bool multiple_types_encountered;
613 /* Return true if STMT can modify a virtual method table pointer.
615 This function makes special assumptions about both constructors and
616 destructors which are all the functions that are allowed to alter the VMT
617 pointers. It assumes that destructors begin with assignment into all VMT
618 pointers and that constructors essentially look in the following way:
620 1) The very first thing they do is that they call constructors of ancestor
621 sub-objects that have them.
623 2) Then VMT pointers of this and all its ancestors is set to new values
624 corresponding to the type corresponding to the constructor.
626 3) Only afterwards, other stuff such as constructor of member sub-objects
627 and the code written by the user is run. Only this may include calling
628 virtual functions, directly or indirectly.
630 There is no way to call a constructor of an ancestor sub-object in any
631 other way.
633 This means that we do not have to care whether constructors get the correct
634 type information because they will always change it (in fact, if we define
635 the type to be given by the VMT pointer, it is undefined).
637 The most important fact to derive from the above is that if, for some
638 statement in the section 3, we try to detect whether the dynamic type has
639 changed, we can safely ignore all calls as we examine the function body
640 backwards until we reach statements in section 2 because these calls cannot
641 be ancestor constructors or destructors (if the input is not bogus) and so
642 do not change the dynamic type (this holds true only for automatically
643 allocated objects but at the moment we devirtualize only these). We then
644 must detect that statements in section 2 change the dynamic type and can try
645 to derive the new type. That is enough and we can stop, we will never see
646 the calls into constructors of sub-objects in this code. Therefore we can
647 safely ignore all call statements that we traverse.
650 static bool
651 stmt_may_be_vtbl_ptr_store (gimple stmt)
653 if (is_gimple_call (stmt))
654 return false;
655 if (gimple_clobber_p (stmt))
656 return false;
657 else if (is_gimple_assign (stmt))
659 tree lhs = gimple_assign_lhs (stmt);
661 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
663 if (flag_strict_aliasing
664 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
665 return false;
667 if (TREE_CODE (lhs) == COMPONENT_REF
668 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
669 return false;
670 /* In the future we might want to use get_base_ref_and_offset to find
671 if there is a field corresponding to the offset and if so, proceed
672 almost like if it was a component ref. */
675 return true;
678 /* If STMT can be proved to be an assignment to the virtual method table
679 pointer of ANALYZED_OBJ and the type associated with the new table
680 identified, return the type. Otherwise return NULL_TREE. */
682 static tree
683 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
685 HOST_WIDE_INT offset, size, max_size;
686 tree lhs, rhs, base, binfo;
688 if (!gimple_assign_single_p (stmt))
689 return NULL_TREE;
691 lhs = gimple_assign_lhs (stmt);
692 rhs = gimple_assign_rhs1 (stmt);
693 if (TREE_CODE (lhs) != COMPONENT_REF
694 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
695 return NULL_TREE;
697 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
698 if (offset != tci->offset
699 || size != POINTER_SIZE
700 || max_size != POINTER_SIZE)
701 return NULL_TREE;
702 if (TREE_CODE (base) == MEM_REF)
704 if (TREE_CODE (tci->object) != MEM_REF
705 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
706 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
707 TREE_OPERAND (base, 1)))
708 return NULL_TREE;
710 else if (tci->object != base)
711 return NULL_TREE;
713 binfo = vtable_pointer_value_to_binfo (rhs);
715 /* FIXME: vtable_pointer_value_to_binfo may return BINFO of a
716 base of outer type. In this case we would need to either
717 work on binfos or translate it back to outer type and offset.
718 KNOWN_TYPE jump functions are not ready for that, yet. */
719 if (!binfo || TYPE_BINFO (BINFO_TYPE (binfo)) != binfo)
720 return NULL;
722 return BINFO_TYPE (binfo);
725 /* Callback of walk_aliased_vdefs and a helper function for
726 detect_type_change to check whether a particular statement may modify
727 the virtual table pointer, and if possible also determine the new type of
728 the (sub-)object. It stores its result into DATA, which points to a
729 type_change_info structure. */
731 static bool
732 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
734 gimple stmt = SSA_NAME_DEF_STMT (vdef);
735 struct type_change_info *tci = (struct type_change_info *) data;
737 if (stmt_may_be_vtbl_ptr_store (stmt))
739 tree type;
741 type = extr_type_from_vtbl_ptr_store (stmt, tci);
742 gcc_assert (!type || TYPE_MAIN_VARIANT (type) == type);
743 if (tci->type_maybe_changed
744 && type != tci->known_current_type)
745 tci->multiple_types_encountered = true;
746 tci->known_current_type = type;
747 tci->type_maybe_changed = true;
748 return true;
750 else
751 return false;
754 /* See if ARG is PARAM_DECl describing instance passed by pointer
755 or reference in FUNCTION. Return false if the dynamic type may change
756 in between beggining of the function until CALL is invoked.
758 Generally functions are not allowed to change type of such instances,
759 but they call destructors. We assume that methods can not destroy the THIS
760 pointer. Also as a special cases, constructor and destructors may change
761 type of the THIS pointer. */
763 static bool
764 param_type_may_change_p (tree function, tree arg, gimple call)
766 /* Pure functions can not do any changes on the dynamic type;
767 that require writting to memory. */
768 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
769 return false;
770 /* We need to check if we are within inlined consturctor
771 or destructor (ideally we would have way to check that the
772 inline cdtor is actually working on ARG, but we don't have
773 easy tie on this, so punt on all non-pure cdtors.
774 We may also record the types of cdtors and once we know type
775 of the instance match them.
777 Also code unification optimizations may merge calls from
778 different blocks making return values unreliable. So
779 do nothing during late optimization. */
780 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
781 return true;
782 if (TREE_CODE (arg) == SSA_NAME
783 && SSA_NAME_IS_DEFAULT_DEF (arg)
784 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
786 /* Normal (non-THIS) argument. */
787 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
788 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
789 /* THIS pointer of an method - here we we want to watch constructors
790 and destructors as those definitely may change the dynamic
791 type. */
792 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
793 && !DECL_CXX_CONSTRUCTOR_P (function)
794 && !DECL_CXX_DESTRUCTOR_P (function)
795 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
797 /* Walk the inline stack and watch out for ctors/dtors. */
798 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
799 block = BLOCK_SUPERCONTEXT (block))
800 if (BLOCK_ABSTRACT_ORIGIN (block)
801 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
803 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
805 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
806 continue;
807 if (TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
808 && (DECL_CXX_CONSTRUCTOR_P (fn)
809 || DECL_CXX_DESTRUCTOR_P (fn)))
810 return true;
812 return false;
815 return true;
818 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
819 callsite CALL) by looking for assignments to its virtual table pointer. If
820 it is, return true and fill in the jump function JFUNC with relevant type
821 information or set it to unknown. ARG is the object itself (not a pointer
822 to it, unless dereferenced). BASE is the base of the memory access as
823 returned by get_ref_base_and_extent, as is the offset.
825 This is helper function for detect_type_change and detect_type_change_ssa
826 that does the heavy work which is usually unnecesary. */
828 static bool
829 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
830 gimple call, struct ipa_jump_func *jfunc,
831 HOST_WIDE_INT offset)
833 struct type_change_info tci;
834 ao_ref ao;
835 bool entry_reached = false;
837 gcc_checking_assert (DECL_P (arg)
838 || TREE_CODE (arg) == MEM_REF
839 || handled_component_p (arg));
841 comp_type = TYPE_MAIN_VARIANT (comp_type);
843 /* Const calls cannot call virtual methods through VMT and so type changes do
844 not matter. */
845 if (!flag_devirtualize || !gimple_vuse (call)
846 /* Be sure expected_type is polymorphic. */
847 || !comp_type
848 || TREE_CODE (comp_type) != RECORD_TYPE
849 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
850 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
851 return true;
853 ao_ref_init (&ao, arg);
854 ao.base = base;
855 ao.offset = offset;
856 ao.size = POINTER_SIZE;
857 ao.max_size = ao.size;
859 tci.offset = offset;
860 tci.object = get_base_address (arg);
861 tci.known_current_type = NULL_TREE;
862 tci.type_maybe_changed = false;
863 tci.multiple_types_encountered = false;
865 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
866 &tci, NULL, &entry_reached);
867 if (!tci.type_maybe_changed)
868 return false;
870 if (!tci.known_current_type
871 || tci.multiple_types_encountered
872 || offset != 0
873 /* When the walk reached function entry, it means that type
874 is set along some paths but not along others. */
875 || entry_reached)
876 jfunc->type = IPA_JF_UNKNOWN;
877 else
878 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
880 return true;
883 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
884 If it is, return true and fill in the jump function JFUNC with relevant type
885 information or set it to unknown. ARG is the object itself (not a pointer
886 to it, unless dereferenced). BASE is the base of the memory access as
887 returned by get_ref_base_and_extent, as is the offset. */
889 static bool
890 detect_type_change (tree arg, tree base, tree comp_type, gimple call,
891 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
893 if (!flag_devirtualize)
894 return false;
896 if (TREE_CODE (base) == MEM_REF
897 && !param_type_may_change_p (current_function_decl,
898 TREE_OPERAND (base, 0),
899 call))
900 return false;
901 return detect_type_change_from_memory_writes (arg, base, comp_type,
902 call, jfunc, offset);
905 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
906 SSA name (its dereference will become the base and the offset is assumed to
907 be zero). */
909 static bool
910 detect_type_change_ssa (tree arg, tree comp_type,
911 gimple call, struct ipa_jump_func *jfunc)
913 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
914 if (!flag_devirtualize
915 || !POINTER_TYPE_P (TREE_TYPE (arg)))
916 return false;
918 if (!param_type_may_change_p (current_function_decl, arg, call))
919 return false;
921 arg = build2 (MEM_REF, ptr_type_node, arg,
922 build_int_cst (ptr_type_node, 0));
924 return detect_type_change_from_memory_writes (arg, arg, comp_type,
925 call, jfunc, 0);
928 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
929 boolean variable pointed to by DATA. */
931 static bool
932 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
933 void *data)
935 bool *b = (bool *) data;
936 *b = true;
937 return true;
940 /* Return true if we have already walked so many statements in AA that we
941 should really just start giving up. */
943 static bool
944 aa_overwalked (struct func_body_info *fbi)
946 gcc_checking_assert (fbi);
947 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
950 /* Find the nearest valid aa status for parameter specified by INDEX that
951 dominates BB. */
953 static struct param_aa_status *
954 find_dominating_aa_status (struct func_body_info *fbi, basic_block bb,
955 int index)
957 while (true)
959 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
960 if (!bb)
961 return NULL;
962 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
963 if (!bi->param_aa_statuses.is_empty ()
964 && bi->param_aa_statuses[index].valid)
965 return &bi->param_aa_statuses[index];
969 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
970 structures and/or intialize the result with a dominating description as
971 necessary. */
973 static struct param_aa_status *
974 parm_bb_aa_status_for_bb (struct func_body_info *fbi, basic_block bb,
975 int index)
977 gcc_checking_assert (fbi);
978 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
979 if (bi->param_aa_statuses.is_empty ())
980 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
981 struct param_aa_status *paa = &bi->param_aa_statuses[index];
982 if (!paa->valid)
984 gcc_checking_assert (!paa->parm_modified
985 && !paa->ref_modified
986 && !paa->pt_modified);
987 struct param_aa_status *dom_paa;
988 dom_paa = find_dominating_aa_status (fbi, bb, index);
989 if (dom_paa)
990 *paa = *dom_paa;
991 else
992 paa->valid = true;
995 return paa;
998 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
999 a value known not to be modified in this function before reaching the
1000 statement STMT. FBI holds information about the function we have so far
1001 gathered but do not survive the summary building stage. */
1003 static bool
1004 parm_preserved_before_stmt_p (struct func_body_info *fbi, int index,
1005 gimple stmt, tree parm_load)
1007 struct param_aa_status *paa;
1008 bool modified = false;
1009 ao_ref refd;
1011 /* FIXME: FBI can be NULL if we are being called from outside
1012 ipa_node_analysis or ipcp_transform_function, which currently happens
1013 during inlining analysis. It would be great to extend fbi's lifetime and
1014 always have it. Currently, we are just not afraid of too much walking in
1015 that case. */
1016 if (fbi)
1018 if (aa_overwalked (fbi))
1019 return false;
1020 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1021 if (paa->parm_modified)
1022 return false;
1024 else
1025 paa = NULL;
1027 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
1028 ao_ref_init (&refd, parm_load);
1029 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1030 &modified, NULL);
1031 if (fbi)
1032 fbi->aa_walked += walked;
1033 if (paa && modified)
1034 paa->parm_modified = true;
1035 return !modified;
1038 /* If STMT is an assignment that loads a value from an parameter declaration,
1039 return the index of the parameter in ipa_node_params which has not been
1040 modified. Otherwise return -1. */
1042 static int
1043 load_from_unmodified_param (struct func_body_info *fbi,
1044 vec<ipa_param_descriptor> descriptors,
1045 gimple stmt)
1047 int index;
1048 tree op1;
1050 if (!gimple_assign_single_p (stmt))
1051 return -1;
1053 op1 = gimple_assign_rhs1 (stmt);
1054 if (TREE_CODE (op1) != PARM_DECL)
1055 return -1;
1057 index = ipa_get_param_decl_index_1 (descriptors, op1);
1058 if (index < 0
1059 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
1060 return -1;
1062 return index;
1065 /* Return true if memory reference REF (which must be a load through parameter
1066 with INDEX) loads data that are known to be unmodified in this function
1067 before reaching statement STMT. */
1069 static bool
1070 parm_ref_data_preserved_p (struct func_body_info *fbi,
1071 int index, gimple stmt, tree ref)
1073 struct param_aa_status *paa;
1074 bool modified = false;
1075 ao_ref refd;
1077 /* FIXME: FBI can be NULL if we are being called from outside
1078 ipa_node_analysis or ipcp_transform_function, which currently happens
1079 during inlining analysis. It would be great to extend fbi's lifetime and
1080 always have it. Currently, we are just not afraid of too much walking in
1081 that case. */
1082 if (fbi)
1084 if (aa_overwalked (fbi))
1085 return false;
1086 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1087 if (paa->ref_modified)
1088 return false;
1090 else
1091 paa = NULL;
1093 gcc_checking_assert (gimple_vuse (stmt));
1094 ao_ref_init (&refd, ref);
1095 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1096 &modified, NULL);
1097 if (fbi)
1098 fbi->aa_walked += walked;
1099 if (paa && modified)
1100 paa->ref_modified = true;
1101 return !modified;
1104 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1105 is known to be unmodified in this function before reaching call statement
1106 CALL into which it is passed. FBI describes the function body. */
1108 static bool
1109 parm_ref_data_pass_through_p (struct func_body_info *fbi, int index,
1110 gimple call, tree parm)
1112 bool modified = false;
1113 ao_ref refd;
1115 /* It's unnecessary to calculate anything about memory contnets for a const
1116 function because it is not goin to use it. But do not cache the result
1117 either. Also, no such calculations for non-pointers. */
1118 if (!gimple_vuse (call)
1119 || !POINTER_TYPE_P (TREE_TYPE (parm))
1120 || aa_overwalked (fbi))
1121 return false;
1123 struct param_aa_status *paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (call),
1124 index);
1125 if (paa->pt_modified)
1126 return false;
1128 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1129 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1130 &modified, NULL);
1131 fbi->aa_walked += walked;
1132 if (modified)
1133 paa->pt_modified = true;
1134 return !modified;
1137 /* Return true if we can prove that OP is a memory reference loading unmodified
1138 data from an aggregate passed as a parameter and if the aggregate is passed
1139 by reference, that the alias type of the load corresponds to the type of the
1140 formal parameter (so that we can rely on this type for TBAA in callers).
1141 INFO and PARMS_AINFO describe parameters of the current function (but the
1142 latter can be NULL), STMT is the load statement. If function returns true,
1143 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1144 within the aggregate and whether it is a load from a value passed by
1145 reference respectively. */
1147 static bool
1148 ipa_load_from_parm_agg_1 (struct func_body_info *fbi,
1149 vec<ipa_param_descriptor> descriptors,
1150 gimple stmt, tree op, int *index_p,
1151 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1152 bool *by_ref_p)
1154 int index;
1155 HOST_WIDE_INT size, max_size;
1156 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
1158 if (max_size == -1 || max_size != size || *offset_p < 0)
1159 return false;
1161 if (DECL_P (base))
1163 int index = ipa_get_param_decl_index_1 (descriptors, base);
1164 if (index >= 0
1165 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1167 *index_p = index;
1168 *by_ref_p = false;
1169 if (size_p)
1170 *size_p = size;
1171 return true;
1173 return false;
1176 if (TREE_CODE (base) != MEM_REF
1177 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1178 || !integer_zerop (TREE_OPERAND (base, 1)))
1179 return false;
1181 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1183 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1184 index = ipa_get_param_decl_index_1 (descriptors, parm);
1186 else
1188 /* This branch catches situations where a pointer parameter is not a
1189 gimple register, for example:
1191 void hip7(S*) (struct S * p)
1193 void (*<T2e4>) (struct S *) D.1867;
1194 struct S * p.1;
1196 <bb 2>:
1197 p.1_1 = p;
1198 D.1867_2 = p.1_1->f;
1199 D.1867_2 ();
1200 gdp = &p;
1203 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1204 index = load_from_unmodified_param (fbi, descriptors, def);
1207 if (index >= 0
1208 && parm_ref_data_preserved_p (fbi, index, stmt, op))
1210 *index_p = index;
1211 *by_ref_p = true;
1212 if (size_p)
1213 *size_p = size;
1214 return true;
1216 return false;
1219 /* Just like the previous function, just without the param_analysis_info
1220 pointer, for users outside of this file. */
1222 bool
1223 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
1224 tree op, int *index_p, HOST_WIDE_INT *offset_p,
1225 bool *by_ref_p)
1227 return ipa_load_from_parm_agg_1 (NULL, info->descriptors, stmt, op, index_p,
1228 offset_p, NULL, by_ref_p);
1231 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1232 of an assignment statement STMT, try to determine whether we are actually
1233 handling any of the following cases and construct an appropriate jump
1234 function into JFUNC if so:
1236 1) The passed value is loaded from a formal parameter which is not a gimple
1237 register (most probably because it is addressable, the value has to be
1238 scalar) and we can guarantee the value has not changed. This case can
1239 therefore be described by a simple pass-through jump function. For example:
1241 foo (int a)
1243 int a.0;
1245 a.0_2 = a;
1246 bar (a.0_2);
1248 2) The passed value can be described by a simple arithmetic pass-through
1249 jump function. E.g.
1251 foo (int a)
1253 int D.2064;
1255 D.2064_4 = a.1(D) + 4;
1256 bar (D.2064_4);
1258 This case can also occur in combination of the previous one, e.g.:
1260 foo (int a, int z)
1262 int a.0;
1263 int D.2064;
1265 a.0_3 = a;
1266 D.2064_4 = a.0_3 + 4;
1267 foo (D.2064_4);
1269 3) The passed value is an address of an object within another one (which
1270 also passed by reference). Such situations are described by an ancestor
1271 jump function and describe situations such as:
1273 B::foo() (struct B * const this)
1275 struct A * D.1845;
1277 D.1845_2 = &this_1(D)->D.1748;
1278 A::bar (D.1845_2);
1280 INFO is the structure describing individual parameters access different
1281 stages of IPA optimizations. PARMS_AINFO contains the information that is
1282 only needed for intraprocedural analysis. */
1284 static void
1285 compute_complex_assign_jump_func (struct func_body_info *fbi,
1286 struct ipa_node_params *info,
1287 struct ipa_jump_func *jfunc,
1288 gimple call, gimple stmt, tree name,
1289 tree param_type)
1291 HOST_WIDE_INT offset, size, max_size;
1292 tree op1, tc_ssa, base, ssa;
1293 int index;
1295 op1 = gimple_assign_rhs1 (stmt);
1297 if (TREE_CODE (op1) == SSA_NAME)
1299 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1300 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1301 else
1302 index = load_from_unmodified_param (fbi, info->descriptors,
1303 SSA_NAME_DEF_STMT (op1));
1304 tc_ssa = op1;
1306 else
1308 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1309 tc_ssa = gimple_assign_lhs (stmt);
1312 if (index >= 0)
1314 tree op2 = gimple_assign_rhs2 (stmt);
1316 if (op2)
1318 if (!is_gimple_ip_invariant (op2)
1319 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1320 && !useless_type_conversion_p (TREE_TYPE (name),
1321 TREE_TYPE (op1))))
1322 return;
1324 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1325 gimple_assign_rhs_code (stmt));
1327 else if (gimple_assign_single_p (stmt))
1329 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1330 bool type_p = false;
1332 if (param_type && POINTER_TYPE_P (param_type))
1333 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1334 call, jfunc);
1335 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1336 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
1338 return;
1341 if (TREE_CODE (op1) != ADDR_EXPR)
1342 return;
1343 op1 = TREE_OPERAND (op1, 0);
1344 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1345 return;
1346 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1347 if (TREE_CODE (base) != MEM_REF
1348 /* If this is a varying address, punt. */
1349 || max_size == -1
1350 || max_size != size)
1351 return;
1352 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1353 ssa = TREE_OPERAND (base, 0);
1354 if (TREE_CODE (ssa) != SSA_NAME
1355 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1356 || offset < 0)
1357 return;
1359 /* Dynamic types are changed in constructors and destructors. */
1360 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1361 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1363 bool type_p = (contains_polymorphic_type_p (TREE_TYPE (param_type))
1364 && !detect_type_change (op1, base, TREE_TYPE (param_type),
1365 call, jfunc, offset));
1366 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1367 ipa_set_ancestor_jf (jfunc, offset,
1368 type_p ? TREE_TYPE (param_type) : NULL, index,
1369 parm_ref_data_pass_through_p (fbi, index,
1370 call, ssa), type_p);
1374 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1375 it looks like:
1377 iftmp.1_3 = &obj_2(D)->D.1762;
1379 The base of the MEM_REF must be a default definition SSA NAME of a
1380 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1381 whole MEM_REF expression is returned and the offset calculated from any
1382 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1383 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1385 static tree
1386 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1388 HOST_WIDE_INT size, max_size;
1389 tree expr, parm, obj;
1391 if (!gimple_assign_single_p (assign))
1392 return NULL_TREE;
1393 expr = gimple_assign_rhs1 (assign);
1395 if (TREE_CODE (expr) != ADDR_EXPR)
1396 return NULL_TREE;
1397 expr = TREE_OPERAND (expr, 0);
1398 obj = expr;
1399 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1401 if (TREE_CODE (expr) != MEM_REF
1402 /* If this is a varying address, punt. */
1403 || max_size == -1
1404 || max_size != size
1405 || *offset < 0)
1406 return NULL_TREE;
1407 parm = TREE_OPERAND (expr, 0);
1408 if (TREE_CODE (parm) != SSA_NAME
1409 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1410 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1411 return NULL_TREE;
1413 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1414 *obj_p = obj;
1415 return expr;
1419 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1420 statement PHI, try to find out whether NAME is in fact a
1421 multiple-inheritance typecast from a descendant into an ancestor of a formal
1422 parameter and thus can be described by an ancestor jump function and if so,
1423 write the appropriate function into JFUNC.
1425 Essentially we want to match the following pattern:
1427 if (obj_2(D) != 0B)
1428 goto <bb 3>;
1429 else
1430 goto <bb 4>;
1432 <bb 3>:
1433 iftmp.1_3 = &obj_2(D)->D.1762;
1435 <bb 4>:
1436 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1437 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1438 return D.1879_6; */
1440 static void
1441 compute_complex_ancestor_jump_func (struct func_body_info *fbi,
1442 struct ipa_node_params *info,
1443 struct ipa_jump_func *jfunc,
1444 gimple call, gimple phi, tree param_type)
1446 HOST_WIDE_INT offset;
1447 gimple assign, cond;
1448 basic_block phi_bb, assign_bb, cond_bb;
1449 tree tmp, parm, expr, obj;
1450 int index, i;
1452 if (gimple_phi_num_args (phi) != 2)
1453 return;
1455 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1456 tmp = PHI_ARG_DEF (phi, 0);
1457 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1458 tmp = PHI_ARG_DEF (phi, 1);
1459 else
1460 return;
1461 if (TREE_CODE (tmp) != SSA_NAME
1462 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1463 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1464 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1465 return;
1467 assign = SSA_NAME_DEF_STMT (tmp);
1468 assign_bb = gimple_bb (assign);
1469 if (!single_pred_p (assign_bb))
1470 return;
1471 expr = get_ancestor_addr_info (assign, &obj, &offset);
1472 if (!expr)
1473 return;
1474 parm = TREE_OPERAND (expr, 0);
1475 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1476 if (index < 0)
1477 return;
1479 cond_bb = single_pred (assign_bb);
1480 cond = last_stmt (cond_bb);
1481 if (!cond
1482 || gimple_code (cond) != GIMPLE_COND
1483 || gimple_cond_code (cond) != NE_EXPR
1484 || gimple_cond_lhs (cond) != parm
1485 || !integer_zerop (gimple_cond_rhs (cond)))
1486 return;
1488 phi_bb = gimple_bb (phi);
1489 for (i = 0; i < 2; i++)
1491 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1492 if (pred != assign_bb && pred != cond_bb)
1493 return;
1496 bool type_p = false;
1497 if (param_type && POINTER_TYPE_P (param_type)
1498 && contains_polymorphic_type_p (TREE_TYPE (param_type)))
1499 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1500 call, jfunc, offset);
1501 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1502 ipa_set_ancestor_jf (jfunc, offset, type_p ? TREE_TYPE (param_type) : NULL,
1503 index,
1504 parm_ref_data_pass_through_p (fbi, index, call, parm),
1505 type_p);
1508 /* Given OP which is passed as an actual argument to a called function,
1509 determine if it is possible to construct a KNOWN_TYPE jump function for it
1510 and if so, create one and store it to JFUNC.
1511 EXPECTED_TYPE represents a type the argument should be in */
1513 static void
1514 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1515 gimple call, tree expected_type)
1517 HOST_WIDE_INT offset, size, max_size;
1518 tree base;
1520 if (!flag_devirtualize
1521 || TREE_CODE (op) != ADDR_EXPR
1522 || !contains_polymorphic_type_p (TREE_TYPE (TREE_TYPE (op)))
1523 /* Be sure expected_type is polymorphic. */
1524 || !expected_type
1525 || !contains_polymorphic_type_p (expected_type))
1526 return;
1528 op = TREE_OPERAND (op, 0);
1529 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1530 if (!DECL_P (base)
1531 || max_size == -1
1532 || max_size != size
1533 || !contains_polymorphic_type_p (TREE_TYPE (base)))
1534 return;
1536 if (decl_maybe_in_construction_p (base, TREE_TYPE (base),
1537 call, current_function_decl)
1538 /* Even if the var seems to be in construction by inline call stack,
1539 we may work out the actual type by walking memory writes. */
1540 && (is_global_var (base)
1541 || detect_type_change (op, base, expected_type, call, jfunc, offset)))
1542 return;
1544 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1545 expected_type);
1548 /* Inspect the given TYPE and return true iff it has the same structure (the
1549 same number of fields of the same types) as a C++ member pointer. If
1550 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1551 corresponding fields there. */
1553 static bool
1554 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1556 tree fld;
1558 if (TREE_CODE (type) != RECORD_TYPE)
1559 return false;
1561 fld = TYPE_FIELDS (type);
1562 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1563 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1564 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1565 return false;
1567 if (method_ptr)
1568 *method_ptr = fld;
1570 fld = DECL_CHAIN (fld);
1571 if (!fld || INTEGRAL_TYPE_P (fld)
1572 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1573 return false;
1574 if (delta)
1575 *delta = fld;
1577 if (DECL_CHAIN (fld))
1578 return false;
1580 return true;
1583 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1584 return the rhs of its defining statement. Otherwise return RHS as it
1585 is. */
1587 static inline tree
1588 get_ssa_def_if_simple_copy (tree rhs)
1590 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1592 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1594 if (gimple_assign_single_p (def_stmt))
1595 rhs = gimple_assign_rhs1 (def_stmt);
1596 else
1597 break;
1599 return rhs;
1602 /* Simple linked list, describing known contents of an aggregate beforere
1603 call. */
1605 struct ipa_known_agg_contents_list
1607 /* Offset and size of the described part of the aggregate. */
1608 HOST_WIDE_INT offset, size;
1609 /* Known constant value or NULL if the contents is known to be unknown. */
1610 tree constant;
1611 /* Pointer to the next structure in the list. */
1612 struct ipa_known_agg_contents_list *next;
1615 /* Find the proper place in linked list of ipa_known_agg_contents_list
1616 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1617 unless there is a partial overlap, in which case return NULL, or such
1618 element is already there, in which case set *ALREADY_THERE to true. */
1620 static struct ipa_known_agg_contents_list **
1621 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1622 HOST_WIDE_INT lhs_offset,
1623 HOST_WIDE_INT lhs_size,
1624 bool *already_there)
1626 struct ipa_known_agg_contents_list **p = list;
1627 while (*p && (*p)->offset < lhs_offset)
1629 if ((*p)->offset + (*p)->size > lhs_offset)
1630 return NULL;
1631 p = &(*p)->next;
1634 if (*p && (*p)->offset < lhs_offset + lhs_size)
1636 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1637 /* We already know this value is subsequently overwritten with
1638 something else. */
1639 *already_there = true;
1640 else
1641 /* Otherwise this is a partial overlap which we cannot
1642 represent. */
1643 return NULL;
1645 return p;
1648 /* Build aggregate jump function from LIST, assuming there are exactly
1649 CONST_COUNT constant entries there and that th offset of the passed argument
1650 is ARG_OFFSET and store it into JFUNC. */
1652 static void
1653 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1654 int const_count, HOST_WIDE_INT arg_offset,
1655 struct ipa_jump_func *jfunc)
1657 vec_alloc (jfunc->agg.items, const_count);
1658 while (list)
1660 if (list->constant)
1662 struct ipa_agg_jf_item item;
1663 item.offset = list->offset - arg_offset;
1664 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1665 item.value = unshare_expr_without_location (list->constant);
1666 jfunc->agg.items->quick_push (item);
1668 list = list->next;
1672 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1673 in ARG is filled in with constant values. ARG can either be an aggregate
1674 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1675 aggregate. JFUNC is the jump function into which the constants are
1676 subsequently stored. */
1678 static void
1679 determine_locally_known_aggregate_parts (gimple call, tree arg, tree arg_type,
1680 struct ipa_jump_func *jfunc)
1682 struct ipa_known_agg_contents_list *list = NULL;
1683 int item_count = 0, const_count = 0;
1684 HOST_WIDE_INT arg_offset, arg_size;
1685 gimple_stmt_iterator gsi;
1686 tree arg_base;
1687 bool check_ref, by_ref;
1688 ao_ref r;
1690 /* The function operates in three stages. First, we prepare check_ref, r,
1691 arg_base and arg_offset based on what is actually passed as an actual
1692 argument. */
1694 if (POINTER_TYPE_P (arg_type))
1696 by_ref = true;
1697 if (TREE_CODE (arg) == SSA_NAME)
1699 tree type_size;
1700 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1701 return;
1702 check_ref = true;
1703 arg_base = arg;
1704 arg_offset = 0;
1705 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1706 arg_size = tree_to_uhwi (type_size);
1707 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1709 else if (TREE_CODE (arg) == ADDR_EXPR)
1711 HOST_WIDE_INT arg_max_size;
1713 arg = TREE_OPERAND (arg, 0);
1714 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1715 &arg_max_size);
1716 if (arg_max_size == -1
1717 || arg_max_size != arg_size
1718 || arg_offset < 0)
1719 return;
1720 if (DECL_P (arg_base))
1722 check_ref = false;
1723 ao_ref_init (&r, arg_base);
1725 else
1726 return;
1728 else
1729 return;
1731 else
1733 HOST_WIDE_INT arg_max_size;
1735 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1737 by_ref = false;
1738 check_ref = false;
1739 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1740 &arg_max_size);
1741 if (arg_max_size == -1
1742 || arg_max_size != arg_size
1743 || arg_offset < 0)
1744 return;
1746 ao_ref_init (&r, arg);
1749 /* Second stage walks back the BB, looks at individual statements and as long
1750 as it is confident of how the statements affect contents of the
1751 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1752 describing it. */
1753 gsi = gsi_for_stmt (call);
1754 gsi_prev (&gsi);
1755 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1757 struct ipa_known_agg_contents_list *n, **p;
1758 gimple stmt = gsi_stmt (gsi);
1759 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1760 tree lhs, rhs, lhs_base;
1762 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1763 continue;
1764 if (!gimple_assign_single_p (stmt))
1765 break;
1767 lhs = gimple_assign_lhs (stmt);
1768 rhs = gimple_assign_rhs1 (stmt);
1769 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1770 || TREE_CODE (lhs) == BIT_FIELD_REF
1771 || contains_bitfld_component_ref_p (lhs))
1772 break;
1774 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1775 &lhs_max_size);
1776 if (lhs_max_size == -1
1777 || lhs_max_size != lhs_size)
1778 break;
1780 if (check_ref)
1782 if (TREE_CODE (lhs_base) != MEM_REF
1783 || TREE_OPERAND (lhs_base, 0) != arg_base
1784 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1785 break;
1787 else if (lhs_base != arg_base)
1789 if (DECL_P (lhs_base))
1790 continue;
1791 else
1792 break;
1795 bool already_there = false;
1796 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1797 &already_there);
1798 if (!p)
1799 break;
1800 if (already_there)
1801 continue;
1803 rhs = get_ssa_def_if_simple_copy (rhs);
1804 n = XALLOCA (struct ipa_known_agg_contents_list);
1805 n->size = lhs_size;
1806 n->offset = lhs_offset;
1807 if (is_gimple_ip_invariant (rhs))
1809 n->constant = rhs;
1810 const_count++;
1812 else
1813 n->constant = NULL_TREE;
1814 n->next = *p;
1815 *p = n;
1817 item_count++;
1818 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1819 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1820 break;
1823 /* Third stage just goes over the list and creates an appropriate vector of
1824 ipa_agg_jf_item structures out of it, of sourse only if there are
1825 any known constants to begin with. */
1827 if (const_count)
1829 jfunc->agg.by_ref = by_ref;
1830 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1834 static tree
1835 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1837 int n;
1838 tree type = (e->callee
1839 ? TREE_TYPE (e->callee->decl)
1840 : gimple_call_fntype (e->call_stmt));
1841 tree t = TYPE_ARG_TYPES (type);
1843 for (n = 0; n < i; n++)
1845 if (!t)
1846 break;
1847 t = TREE_CHAIN (t);
1849 if (t)
1850 return TREE_VALUE (t);
1851 if (!e->callee)
1852 return NULL;
1853 t = DECL_ARGUMENTS (e->callee->decl);
1854 for (n = 0; n < i; n++)
1856 if (!t)
1857 return NULL;
1858 t = TREE_CHAIN (t);
1860 if (t)
1861 return TREE_TYPE (t);
1862 return NULL;
1865 /* Compute jump function for all arguments of callsite CS and insert the
1866 information in the jump_functions array in the ipa_edge_args corresponding
1867 to this callsite. */
1869 static void
1870 ipa_compute_jump_functions_for_edge (struct func_body_info *fbi,
1871 struct cgraph_edge *cs)
1873 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1874 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1875 gimple call = cs->call_stmt;
1876 int n, arg_num = gimple_call_num_args (call);
1878 if (arg_num == 0 || args->jump_functions)
1879 return;
1880 vec_safe_grow_cleared (args->jump_functions, arg_num);
1882 if (gimple_call_internal_p (call))
1883 return;
1884 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1885 return;
1887 for (n = 0; n < arg_num; n++)
1889 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1890 tree arg = gimple_call_arg (call, n);
1891 tree param_type = ipa_get_callee_param_type (cs, n);
1893 if (is_gimple_ip_invariant (arg))
1894 ipa_set_jf_constant (jfunc, arg, cs);
1895 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1896 && TREE_CODE (arg) == PARM_DECL)
1898 int index = ipa_get_param_decl_index (info, arg);
1900 gcc_assert (index >=0);
1901 /* Aggregate passed by value, check for pass-through, otherwise we
1902 will attempt to fill in aggregate contents later in this
1903 for cycle. */
1904 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1906 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
1907 continue;
1910 else if (TREE_CODE (arg) == SSA_NAME)
1912 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1914 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1915 if (index >= 0)
1917 bool agg_p, type_p;
1918 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1919 if (param_type && POINTER_TYPE_P (param_type))
1920 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1921 call, jfunc);
1922 else
1923 type_p = false;
1924 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1925 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1926 type_p);
1929 else
1931 gimple stmt = SSA_NAME_DEF_STMT (arg);
1932 if (is_gimple_assign (stmt))
1933 compute_complex_assign_jump_func (fbi, info, jfunc,
1934 call, stmt, arg, param_type);
1935 else if (gimple_code (stmt) == GIMPLE_PHI)
1936 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1937 call, stmt, param_type);
1940 else
1941 compute_known_type_jump_func (arg, jfunc, call,
1942 param_type
1943 && POINTER_TYPE_P (param_type)
1944 ? TREE_TYPE (param_type)
1945 : NULL);
1947 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1948 passed (because type conversions are ignored in gimple). Usually we can
1949 safely get type from function declaration, but in case of K&R prototypes or
1950 variadic functions we can try our luck with type of the pointer passed.
1951 TODO: Since we look for actual initialization of the memory object, we may better
1952 work out the type based on the memory stores we find. */
1953 if (!param_type)
1954 param_type = TREE_TYPE (arg);
1956 if ((jfunc->type != IPA_JF_PASS_THROUGH
1957 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1958 && (jfunc->type != IPA_JF_ANCESTOR
1959 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1960 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1961 || POINTER_TYPE_P (param_type)))
1962 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1966 /* Compute jump functions for all edges - both direct and indirect - outgoing
1967 from BB. */
1969 static void
1970 ipa_compute_jump_functions_for_bb (struct func_body_info *fbi, basic_block bb)
1972 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1973 int i;
1974 struct cgraph_edge *cs;
1976 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1978 struct cgraph_node *callee = cs->callee;
1980 if (callee)
1982 callee->ultimate_alias_target ();
1983 /* We do not need to bother analyzing calls to unknown functions
1984 unless they may become known during lto/whopr. */
1985 if (!callee->definition && !flag_lto)
1986 continue;
1988 ipa_compute_jump_functions_for_edge (fbi, cs);
1992 /* If STMT looks like a statement loading a value from a member pointer formal
1993 parameter, return that parameter and store the offset of the field to
1994 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1995 might be clobbered). If USE_DELTA, then we look for a use of the delta
1996 field rather than the pfn. */
1998 static tree
1999 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
2000 HOST_WIDE_INT *offset_p)
2002 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2004 if (!gimple_assign_single_p (stmt))
2005 return NULL_TREE;
2007 rhs = gimple_assign_rhs1 (stmt);
2008 if (TREE_CODE (rhs) == COMPONENT_REF)
2010 ref_field = TREE_OPERAND (rhs, 1);
2011 rhs = TREE_OPERAND (rhs, 0);
2013 else
2014 ref_field = NULL_TREE;
2015 if (TREE_CODE (rhs) != MEM_REF)
2016 return NULL_TREE;
2017 rec = TREE_OPERAND (rhs, 0);
2018 if (TREE_CODE (rec) != ADDR_EXPR)
2019 return NULL_TREE;
2020 rec = TREE_OPERAND (rec, 0);
2021 if (TREE_CODE (rec) != PARM_DECL
2022 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2023 return NULL_TREE;
2024 ref_offset = TREE_OPERAND (rhs, 1);
2026 if (use_delta)
2027 fld = delta_field;
2028 else
2029 fld = ptr_field;
2030 if (offset_p)
2031 *offset_p = int_bit_position (fld);
2033 if (ref_field)
2035 if (integer_nonzerop (ref_offset))
2036 return NULL_TREE;
2037 return ref_field == fld ? rec : NULL_TREE;
2039 else
2040 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2041 : NULL_TREE;
2044 /* Returns true iff T is an SSA_NAME defined by a statement. */
2046 static bool
2047 ipa_is_ssa_with_stmt_def (tree t)
2049 if (TREE_CODE (t) == SSA_NAME
2050 && !SSA_NAME_IS_DEFAULT_DEF (t))
2051 return true;
2052 else
2053 return false;
2056 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2057 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2058 indirect call graph edge. */
2060 static struct cgraph_edge *
2061 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
2063 struct cgraph_edge *cs;
2065 cs = node->get_edge (stmt);
2066 cs->indirect_info->param_index = param_index;
2067 cs->indirect_info->agg_contents = 0;
2068 cs->indirect_info->member_ptr = 0;
2069 return cs;
2072 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2073 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2074 intermediate information about each formal parameter. Currently it checks
2075 whether the call calls a pointer that is a formal parameter and if so, the
2076 parameter is marked with the called flag and an indirect call graph edge
2077 describing the call is created. This is very simple for ordinary pointers
2078 represented in SSA but not-so-nice when it comes to member pointers. The
2079 ugly part of this function does nothing more than trying to match the
2080 pattern of such a call. An example of such a pattern is the gimple dump
2081 below, the call is on the last line:
2083 <bb 2>:
2084 f$__delta_5 = f.__delta;
2085 f$__pfn_24 = f.__pfn;
2088 <bb 2>:
2089 f$__delta_5 = MEM[(struct *)&f];
2090 f$__pfn_24 = MEM[(struct *)&f + 4B];
2092 and a few lines below:
2094 <bb 5>
2095 D.2496_3 = (int) f$__pfn_24;
2096 D.2497_4 = D.2496_3 & 1;
2097 if (D.2497_4 != 0)
2098 goto <bb 3>;
2099 else
2100 goto <bb 4>;
2102 <bb 6>:
2103 D.2500_7 = (unsigned int) f$__delta_5;
2104 D.2501_8 = &S + D.2500_7;
2105 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2106 D.2503_10 = *D.2502_9;
2107 D.2504_12 = f$__pfn_24 + -1;
2108 D.2505_13 = (unsigned int) D.2504_12;
2109 D.2506_14 = D.2503_10 + D.2505_13;
2110 D.2507_15 = *D.2506_14;
2111 iftmp.11_16 = (String:: *) D.2507_15;
2113 <bb 7>:
2114 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2115 D.2500_19 = (unsigned int) f$__delta_5;
2116 D.2508_20 = &S + D.2500_19;
2117 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2119 Such patterns are results of simple calls to a member pointer:
2121 int doprinting (int (MyString::* f)(int) const)
2123 MyString S ("somestring");
2125 return (S.*f)(4);
2128 Moreover, the function also looks for called pointers loaded from aggregates
2129 passed by value or reference. */
2131 static void
2132 ipa_analyze_indirect_call_uses (struct func_body_info *fbi, gimple call,
2133 tree target)
2135 struct ipa_node_params *info = fbi->info;
2136 HOST_WIDE_INT offset;
2137 bool by_ref;
2139 if (SSA_NAME_IS_DEFAULT_DEF (target))
2141 tree var = SSA_NAME_VAR (target);
2142 int index = ipa_get_param_decl_index (info, var);
2143 if (index >= 0)
2144 ipa_note_param_call (fbi->node, index, call);
2145 return;
2148 int index;
2149 gimple def = SSA_NAME_DEF_STMT (target);
2150 if (gimple_assign_single_p (def)
2151 && ipa_load_from_parm_agg_1 (fbi, info->descriptors, def,
2152 gimple_assign_rhs1 (def), &index, &offset,
2153 NULL, &by_ref))
2155 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2156 if (cs->indirect_info->offset != offset)
2157 cs->indirect_info->outer_type = NULL;
2158 cs->indirect_info->offset = offset;
2159 cs->indirect_info->agg_contents = 1;
2160 cs->indirect_info->by_ref = by_ref;
2161 return;
2164 /* Now we need to try to match the complex pattern of calling a member
2165 pointer. */
2166 if (gimple_code (def) != GIMPLE_PHI
2167 || gimple_phi_num_args (def) != 2
2168 || !POINTER_TYPE_P (TREE_TYPE (target))
2169 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2170 return;
2172 /* First, we need to check whether one of these is a load from a member
2173 pointer that is a parameter to this function. */
2174 tree n1 = PHI_ARG_DEF (def, 0);
2175 tree n2 = PHI_ARG_DEF (def, 1);
2176 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2177 return;
2178 gimple d1 = SSA_NAME_DEF_STMT (n1);
2179 gimple d2 = SSA_NAME_DEF_STMT (n2);
2181 tree rec;
2182 basic_block bb, virt_bb;
2183 basic_block join = gimple_bb (def);
2184 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2186 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2187 return;
2189 bb = EDGE_PRED (join, 0)->src;
2190 virt_bb = gimple_bb (d2);
2192 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2194 bb = EDGE_PRED (join, 1)->src;
2195 virt_bb = gimple_bb (d1);
2197 else
2198 return;
2200 /* Second, we need to check that the basic blocks are laid out in the way
2201 corresponding to the pattern. */
2203 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2204 || single_pred (virt_bb) != bb
2205 || single_succ (virt_bb) != join)
2206 return;
2208 /* Third, let's see that the branching is done depending on the least
2209 significant bit of the pfn. */
2211 gimple branch = last_stmt (bb);
2212 if (!branch || gimple_code (branch) != GIMPLE_COND)
2213 return;
2215 if ((gimple_cond_code (branch) != NE_EXPR
2216 && gimple_cond_code (branch) != EQ_EXPR)
2217 || !integer_zerop (gimple_cond_rhs (branch)))
2218 return;
2220 tree cond = gimple_cond_lhs (branch);
2221 if (!ipa_is_ssa_with_stmt_def (cond))
2222 return;
2224 def = SSA_NAME_DEF_STMT (cond);
2225 if (!is_gimple_assign (def)
2226 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2227 || !integer_onep (gimple_assign_rhs2 (def)))
2228 return;
2230 cond = gimple_assign_rhs1 (def);
2231 if (!ipa_is_ssa_with_stmt_def (cond))
2232 return;
2234 def = SSA_NAME_DEF_STMT (cond);
2236 if (is_gimple_assign (def)
2237 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2239 cond = gimple_assign_rhs1 (def);
2240 if (!ipa_is_ssa_with_stmt_def (cond))
2241 return;
2242 def = SSA_NAME_DEF_STMT (cond);
2245 tree rec2;
2246 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2247 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2248 == ptrmemfunc_vbit_in_delta),
2249 NULL);
2250 if (rec != rec2)
2251 return;
2253 index = ipa_get_param_decl_index (info, rec);
2254 if (index >= 0
2255 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2257 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2258 if (cs->indirect_info->offset != offset)
2259 cs->indirect_info->outer_type = NULL;
2260 cs->indirect_info->offset = offset;
2261 cs->indirect_info->agg_contents = 1;
2262 cs->indirect_info->member_ptr = 1;
2265 return;
2268 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2269 object referenced in the expression is a formal parameter of the caller
2270 FBI->node (described by FBI->info), create a call note for the
2271 statement. */
2273 static void
2274 ipa_analyze_virtual_call_uses (struct func_body_info *fbi,
2275 gimple call, tree target)
2277 tree obj = OBJ_TYPE_REF_OBJECT (target);
2278 int index;
2279 HOST_WIDE_INT anc_offset;
2281 if (!flag_devirtualize)
2282 return;
2284 if (TREE_CODE (obj) != SSA_NAME)
2285 return;
2287 struct ipa_node_params *info = fbi->info;
2288 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2290 struct ipa_jump_func jfunc;
2291 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2292 return;
2294 anc_offset = 0;
2295 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2296 gcc_assert (index >= 0);
2297 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2298 call, &jfunc))
2299 return;
2301 else
2303 struct ipa_jump_func jfunc;
2304 gimple stmt = SSA_NAME_DEF_STMT (obj);
2305 tree expr;
2307 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2308 if (!expr)
2309 return;
2310 index = ipa_get_param_decl_index (info,
2311 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2312 gcc_assert (index >= 0);
2313 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2314 call, &jfunc, anc_offset))
2315 return;
2318 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2319 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2320 ii->offset = anc_offset;
2321 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2322 ii->otr_type = obj_type_ref_class (target);
2323 ii->polymorphic = 1;
2326 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2327 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2328 containing intermediate information about each formal parameter. */
2330 static void
2331 ipa_analyze_call_uses (struct func_body_info *fbi, gimple call)
2333 tree target = gimple_call_fn (call);
2335 if (!target
2336 || (TREE_CODE (target) != SSA_NAME
2337 && !virtual_method_call_p (target)))
2338 return;
2340 struct cgraph_edge *cs = fbi->node->get_edge (call);
2341 /* If we previously turned the call into a direct call, there is
2342 no need to analyze. */
2343 if (cs && !cs->indirect_unknown_callee)
2344 return;
2346 if (cs->indirect_info->polymorphic)
2348 tree otr_type;
2349 HOST_WIDE_INT otr_token;
2350 ipa_polymorphic_call_context context;
2351 tree instance;
2352 tree target = gimple_call_fn (call);
2354 instance = get_polymorphic_call_info (current_function_decl,
2355 target,
2356 &otr_type, &otr_token,
2357 &context, call);
2359 if (context.get_dynamic_type (instance,
2360 OBJ_TYPE_REF_OBJECT (target),
2361 otr_type, call))
2363 gcc_assert (TREE_CODE (otr_type) == RECORD_TYPE);
2364 cs->indirect_info->polymorphic = true;
2365 cs->indirect_info->param_index = -1;
2366 cs->indirect_info->otr_token = otr_token;
2367 cs->indirect_info->otr_type = otr_type;
2368 cs->indirect_info->outer_type = context.outer_type;
2369 cs->indirect_info->speculative_outer_type = context.speculative_outer_type;
2370 cs->indirect_info->offset = context.offset;
2371 cs->indirect_info->speculative_offset = context.speculative_offset;
2372 cs->indirect_info->maybe_in_construction
2373 = context.maybe_in_construction;
2374 cs->indirect_info->maybe_derived_type = context.maybe_derived_type;
2375 cs->indirect_info->speculative_maybe_derived_type
2376 = context.speculative_maybe_derived_type;
2380 if (TREE_CODE (target) == SSA_NAME)
2381 ipa_analyze_indirect_call_uses (fbi, call, target);
2382 else if (virtual_method_call_p (target))
2383 ipa_analyze_virtual_call_uses (fbi, call, target);
2387 /* Analyze the call statement STMT with respect to formal parameters (described
2388 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2389 formal parameters are called. */
2391 static void
2392 ipa_analyze_stmt_uses (struct func_body_info *fbi, gimple stmt)
2394 if (is_gimple_call (stmt))
2395 ipa_analyze_call_uses (fbi, stmt);
2398 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2399 If OP is a parameter declaration, mark it as used in the info structure
2400 passed in DATA. */
2402 static bool
2403 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2405 struct ipa_node_params *info = (struct ipa_node_params *) data;
2407 op = get_base_address (op);
2408 if (op
2409 && TREE_CODE (op) == PARM_DECL)
2411 int index = ipa_get_param_decl_index (info, op);
2412 gcc_assert (index >= 0);
2413 ipa_set_param_used (info, index, true);
2416 return false;
2419 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2420 the findings in various structures of the associated ipa_node_params
2421 structure, such as parameter flags, notes etc. FBI holds various data about
2422 the function being analyzed. */
2424 static void
2425 ipa_analyze_params_uses_in_bb (struct func_body_info *fbi, basic_block bb)
2427 gimple_stmt_iterator gsi;
2428 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2430 gimple stmt = gsi_stmt (gsi);
2432 if (is_gimple_debug (stmt))
2433 continue;
2435 ipa_analyze_stmt_uses (fbi, stmt);
2436 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2437 visit_ref_for_mod_analysis,
2438 visit_ref_for_mod_analysis,
2439 visit_ref_for_mod_analysis);
2441 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2442 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2443 visit_ref_for_mod_analysis,
2444 visit_ref_for_mod_analysis,
2445 visit_ref_for_mod_analysis);
2448 /* Calculate controlled uses of parameters of NODE. */
2450 static void
2451 ipa_analyze_controlled_uses (struct cgraph_node *node)
2453 struct ipa_node_params *info = IPA_NODE_REF (node);
2455 for (int i = 0; i < ipa_get_param_count (info); i++)
2457 tree parm = ipa_get_param (info, i);
2458 int controlled_uses = 0;
2460 /* For SSA regs see if parameter is used. For non-SSA we compute
2461 the flag during modification analysis. */
2462 if (is_gimple_reg (parm))
2464 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2465 parm);
2466 if (ddef && !has_zero_uses (ddef))
2468 imm_use_iterator imm_iter;
2469 use_operand_p use_p;
2471 ipa_set_param_used (info, i, true);
2472 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2473 if (!is_gimple_call (USE_STMT (use_p)))
2475 if (!is_gimple_debug (USE_STMT (use_p)))
2477 controlled_uses = IPA_UNDESCRIBED_USE;
2478 break;
2481 else
2482 controlled_uses++;
2484 else
2485 controlled_uses = 0;
2487 else
2488 controlled_uses = IPA_UNDESCRIBED_USE;
2489 ipa_set_controlled_uses (info, i, controlled_uses);
2493 /* Free stuff in BI. */
2495 static void
2496 free_ipa_bb_info (struct ipa_bb_info *bi)
2498 bi->cg_edges.release ();
2499 bi->param_aa_statuses.release ();
2502 /* Dominator walker driving the analysis. */
2504 class analysis_dom_walker : public dom_walker
2506 public:
2507 analysis_dom_walker (struct func_body_info *fbi)
2508 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2510 virtual void before_dom_children (basic_block);
2512 private:
2513 struct func_body_info *m_fbi;
2516 void
2517 analysis_dom_walker::before_dom_children (basic_block bb)
2519 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2520 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2523 /* Initialize the array describing properties of of formal parameters
2524 of NODE, analyze their uses and compute jump functions associated
2525 with actual arguments of calls from within NODE. */
2527 void
2528 ipa_analyze_node (struct cgraph_node *node)
2530 struct func_body_info fbi;
2531 struct ipa_node_params *info;
2533 ipa_check_create_node_params ();
2534 ipa_check_create_edge_args ();
2535 info = IPA_NODE_REF (node);
2537 if (info->analysis_done)
2538 return;
2539 info->analysis_done = 1;
2541 if (ipa_func_spec_opts_forbid_analysis_p (node))
2543 for (int i = 0; i < ipa_get_param_count (info); i++)
2545 ipa_set_param_used (info, i, true);
2546 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2548 return;
2551 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2552 push_cfun (func);
2553 calculate_dominance_info (CDI_DOMINATORS);
2554 ipa_initialize_node_params (node);
2555 ipa_analyze_controlled_uses (node);
2557 fbi.node = node;
2558 fbi.info = IPA_NODE_REF (node);
2559 fbi.bb_infos = vNULL;
2560 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2561 fbi.param_count = ipa_get_param_count (info);
2562 fbi.aa_walked = 0;
2564 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2566 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2567 bi->cg_edges.safe_push (cs);
2570 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2572 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2573 bi->cg_edges.safe_push (cs);
2576 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2578 int i;
2579 struct ipa_bb_info *bi;
2580 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
2581 free_ipa_bb_info (bi);
2582 fbi.bb_infos.release ();
2583 free_dominance_info (CDI_DOMINATORS);
2584 pop_cfun ();
2587 /* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2588 attempt a type-based devirtualization. If successful, return the
2589 target function declaration, otherwise return NULL. */
2591 tree
2592 ipa_intraprocedural_devirtualization (gimple call)
2594 tree binfo, token, fndecl;
2595 struct ipa_jump_func jfunc;
2596 tree otr = gimple_call_fn (call);
2598 jfunc.type = IPA_JF_UNKNOWN;
2599 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
2600 call, obj_type_ref_class (otr));
2601 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2602 return NULL_TREE;
2603 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2604 if (!binfo)
2605 return NULL_TREE;
2606 token = OBJ_TYPE_REF_TOKEN (otr);
2607 fndecl = gimple_get_virt_method_for_binfo (tree_to_uhwi (token),
2608 binfo);
2609 #ifdef ENABLE_CHECKING
2610 if (fndecl)
2611 gcc_assert (possible_polymorphic_call_target_p
2612 (otr, cgraph_node::get (fndecl)));
2613 #endif
2614 return fndecl;
2617 /* Update the jump function DST when the call graph edge corresponding to SRC is
2618 is being inlined, knowing that DST is of type ancestor and src of known
2619 type. */
2621 static void
2622 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2623 struct ipa_jump_func *dst)
2625 HOST_WIDE_INT combined_offset;
2626 tree combined_type;
2628 if (!ipa_get_jf_ancestor_type_preserved (dst))
2630 dst->type = IPA_JF_UNKNOWN;
2631 return;
2634 combined_offset = ipa_get_jf_known_type_offset (src)
2635 + ipa_get_jf_ancestor_offset (dst);
2636 combined_type = ipa_get_jf_ancestor_type (dst);
2638 ipa_set_jf_known_type (dst, combined_offset,
2639 ipa_get_jf_known_type_base_type (src),
2640 combined_type);
2643 /* Update the jump functions associated with call graph edge E when the call
2644 graph edge CS is being inlined, assuming that E->caller is already (possibly
2645 indirectly) inlined into CS->callee and that E has not been inlined. */
2647 static void
2648 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2649 struct cgraph_edge *e)
2651 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2652 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2653 int count = ipa_get_cs_argument_count (args);
2654 int i;
2656 for (i = 0; i < count; i++)
2658 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2660 if (dst->type == IPA_JF_ANCESTOR)
2662 struct ipa_jump_func *src;
2663 int dst_fid = dst->value.ancestor.formal_id;
2665 /* Variable number of arguments can cause havoc if we try to access
2666 one that does not exist in the inlined edge. So make sure we
2667 don't. */
2668 if (dst_fid >= ipa_get_cs_argument_count (top))
2670 dst->type = IPA_JF_UNKNOWN;
2671 continue;
2674 src = ipa_get_ith_jump_func (top, dst_fid);
2676 if (src->agg.items
2677 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2679 struct ipa_agg_jf_item *item;
2680 int j;
2682 /* Currently we do not produce clobber aggregate jump functions,
2683 replace with merging when we do. */
2684 gcc_assert (!dst->agg.items);
2686 dst->agg.items = vec_safe_copy (src->agg.items);
2687 dst->agg.by_ref = src->agg.by_ref;
2688 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2689 item->offset -= dst->value.ancestor.offset;
2692 if (src->type == IPA_JF_KNOWN_TYPE)
2693 combine_known_type_and_ancestor_jfs (src, dst);
2694 else if (src->type == IPA_JF_PASS_THROUGH
2695 && src->value.pass_through.operation == NOP_EXPR)
2697 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2698 dst->value.ancestor.agg_preserved &=
2699 src->value.pass_through.agg_preserved;
2700 dst->value.ancestor.type_preserved &=
2701 src->value.pass_through.type_preserved;
2703 else if (src->type == IPA_JF_ANCESTOR)
2705 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2706 dst->value.ancestor.offset += src->value.ancestor.offset;
2707 dst->value.ancestor.agg_preserved &=
2708 src->value.ancestor.agg_preserved;
2709 dst->value.ancestor.type_preserved &=
2710 src->value.ancestor.type_preserved;
2712 else
2713 dst->type = IPA_JF_UNKNOWN;
2715 else if (dst->type == IPA_JF_PASS_THROUGH)
2717 struct ipa_jump_func *src;
2718 /* We must check range due to calls with variable number of arguments
2719 and we cannot combine jump functions with operations. */
2720 if (dst->value.pass_through.operation == NOP_EXPR
2721 && (dst->value.pass_through.formal_id
2722 < ipa_get_cs_argument_count (top)))
2724 int dst_fid = dst->value.pass_through.formal_id;
2725 src = ipa_get_ith_jump_func (top, dst_fid);
2726 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2728 switch (src->type)
2730 case IPA_JF_UNKNOWN:
2731 dst->type = IPA_JF_UNKNOWN;
2732 break;
2733 case IPA_JF_KNOWN_TYPE:
2734 if (ipa_get_jf_pass_through_type_preserved (dst))
2735 ipa_set_jf_known_type (dst,
2736 ipa_get_jf_known_type_offset (src),
2737 ipa_get_jf_known_type_base_type (src),
2738 ipa_get_jf_known_type_component_type (src));
2739 else
2740 dst->type = IPA_JF_UNKNOWN;
2741 break;
2742 case IPA_JF_CONST:
2743 ipa_set_jf_cst_copy (dst, src);
2744 break;
2746 case IPA_JF_PASS_THROUGH:
2748 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2749 enum tree_code operation;
2750 operation = ipa_get_jf_pass_through_operation (src);
2752 if (operation == NOP_EXPR)
2754 bool agg_p, type_p;
2755 agg_p = dst_agg_p
2756 && ipa_get_jf_pass_through_agg_preserved (src);
2757 type_p = ipa_get_jf_pass_through_type_preserved (src)
2758 && ipa_get_jf_pass_through_type_preserved (dst);
2759 ipa_set_jf_simple_pass_through (dst, formal_id,
2760 agg_p, type_p);
2762 else
2764 tree operand = ipa_get_jf_pass_through_operand (src);
2765 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2766 operation);
2768 break;
2770 case IPA_JF_ANCESTOR:
2772 bool agg_p, type_p;
2773 agg_p = dst_agg_p
2774 && ipa_get_jf_ancestor_agg_preserved (src);
2775 type_p = ipa_get_jf_ancestor_type_preserved (src)
2776 && ipa_get_jf_pass_through_type_preserved (dst);
2777 ipa_set_ancestor_jf (dst,
2778 ipa_get_jf_ancestor_offset (src),
2779 ipa_get_jf_ancestor_type (src),
2780 ipa_get_jf_ancestor_formal_id (src),
2781 agg_p, type_p);
2782 break;
2784 default:
2785 gcc_unreachable ();
2788 if (src->agg.items
2789 && (dst_agg_p || !src->agg.by_ref))
2791 /* Currently we do not produce clobber aggregate jump
2792 functions, replace with merging when we do. */
2793 gcc_assert (!dst->agg.items);
2795 dst->agg.by_ref = src->agg.by_ref;
2796 dst->agg.items = vec_safe_copy (src->agg.items);
2799 else
2800 dst->type = IPA_JF_UNKNOWN;
2805 /* If TARGET is an addr_expr of a function declaration, make it the destination
2806 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2808 struct cgraph_edge *
2809 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2811 struct cgraph_node *callee;
2812 struct inline_edge_summary *es = inline_edge_summary (ie);
2813 bool unreachable = false;
2815 if (TREE_CODE (target) == ADDR_EXPR)
2816 target = TREE_OPERAND (target, 0);
2817 if (TREE_CODE (target) != FUNCTION_DECL)
2819 target = canonicalize_constructor_val (target, NULL);
2820 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2822 if (ie->indirect_info->member_ptr)
2823 /* Member pointer call that goes through a VMT lookup. */
2824 return NULL;
2826 if (dump_enabled_p ())
2828 location_t loc = gimple_location_safe (ie->call_stmt);
2829 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2830 "discovered direct call to non-function in %s/%i, "
2831 "making it __builtin_unreachable\n",
2832 ie->caller->name (), ie->caller->order);
2835 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2836 callee = cgraph_node::get_create (target);
2837 unreachable = true;
2839 else
2840 callee = cgraph_node::get (target);
2842 else
2843 callee = cgraph_node::get (target);
2845 /* Because may-edges are not explicitely represented and vtable may be external,
2846 we may create the first reference to the object in the unit. */
2847 if (!callee || callee->global.inlined_to)
2850 /* We are better to ensure we can refer to it.
2851 In the case of static functions we are out of luck, since we already
2852 removed its body. In the case of public functions we may or may
2853 not introduce the reference. */
2854 if (!canonicalize_constructor_val (target, NULL)
2855 || !TREE_PUBLIC (target))
2857 if (dump_file)
2858 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2859 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2860 xstrdup (ie->caller->name ()),
2861 ie->caller->order,
2862 xstrdup (ie->callee->name ()),
2863 ie->callee->order);
2864 return NULL;
2866 callee = cgraph_node::get_create (target);
2869 if (!dbg_cnt (devirt))
2870 return NULL;
2872 ipa_check_create_node_params ();
2874 /* We can not make edges to inline clones. It is bug that someone removed
2875 the cgraph node too early. */
2876 gcc_assert (!callee->global.inlined_to);
2878 if (dump_file && !unreachable)
2880 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2881 "(%s/%i -> %s/%i), for stmt ",
2882 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2883 xstrdup (ie->caller->name ()),
2884 ie->caller->order,
2885 xstrdup (callee->name ()),
2886 callee->order);
2887 if (ie->call_stmt)
2888 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2889 else
2890 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2892 if (dump_enabled_p ())
2894 location_t loc = gimple_location_safe (ie->call_stmt);
2896 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2897 "converting indirect call in %s to direct call to %s\n",
2898 ie->caller->name (), callee->name ());
2900 ie = ie->make_direct (callee);
2901 es = inline_edge_summary (ie);
2902 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2903 - eni_size_weights.call_cost);
2904 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2905 - eni_time_weights.call_cost);
2907 return ie;
2910 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2911 return NULL if there is not any. BY_REF specifies whether the value has to
2912 be passed by reference or by value. */
2914 tree
2915 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2916 HOST_WIDE_INT offset, bool by_ref)
2918 struct ipa_agg_jf_item *item;
2919 int i;
2921 if (by_ref != agg->by_ref)
2922 return NULL;
2924 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2925 if (item->offset == offset)
2927 /* Currently we do not have clobber values, return NULL for them once
2928 we do. */
2929 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2930 return item->value;
2932 return NULL;
2935 /* Remove a reference to SYMBOL from the list of references of a node given by
2936 reference description RDESC. Return true if the reference has been
2937 successfully found and removed. */
2939 static bool
2940 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2942 struct ipa_ref *to_del;
2943 struct cgraph_edge *origin;
2945 origin = rdesc->cs;
2946 if (!origin)
2947 return false;
2948 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
2949 origin->lto_stmt_uid);
2950 if (!to_del)
2951 return false;
2953 to_del->remove_reference ();
2954 if (dump_file)
2955 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2956 xstrdup (origin->caller->name ()),
2957 origin->caller->order, xstrdup (symbol->name ()));
2958 return true;
2961 /* If JFUNC has a reference description with refcount different from
2962 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2963 NULL. JFUNC must be a constant jump function. */
2965 static struct ipa_cst_ref_desc *
2966 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2968 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2969 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2970 return rdesc;
2971 else
2972 return NULL;
2975 /* If the value of constant jump function JFUNC is an address of a function
2976 declaration, return the associated call graph node. Otherwise return
2977 NULL. */
2979 static cgraph_node *
2980 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2982 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2983 tree cst = ipa_get_jf_constant (jfunc);
2984 if (TREE_CODE (cst) != ADDR_EXPR
2985 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2986 return NULL;
2988 return cgraph_node::get (TREE_OPERAND (cst, 0));
2992 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2993 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2994 the edge specified in the rdesc. Return false if either the symbol or the
2995 reference could not be found, otherwise return true. */
2997 static bool
2998 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3000 struct ipa_cst_ref_desc *rdesc;
3001 if (jfunc->type == IPA_JF_CONST
3002 && (rdesc = jfunc_rdesc_usable (jfunc))
3003 && --rdesc->refcount == 0)
3005 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3006 if (!symbol)
3007 return false;
3009 return remove_described_reference (symbol, rdesc);
3011 return true;
3014 /* Try to find a destination for indirect edge IE that corresponds to a simple
3015 call or a call of a member function pointer and where the destination is a
3016 pointer formal parameter described by jump function JFUNC. If it can be
3017 determined, return the newly direct edge, otherwise return NULL.
3018 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
3020 static struct cgraph_edge *
3021 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3022 struct ipa_jump_func *jfunc,
3023 struct ipa_node_params *new_root_info)
3025 struct cgraph_edge *cs;
3026 tree target;
3027 bool agg_contents = ie->indirect_info->agg_contents;
3029 if (ie->indirect_info->agg_contents)
3030 target = ipa_find_agg_cst_for_param (&jfunc->agg,
3031 ie->indirect_info->offset,
3032 ie->indirect_info->by_ref);
3033 else
3034 target = ipa_value_from_jfunc (new_root_info, jfunc);
3035 if (!target)
3036 return NULL;
3037 cs = ipa_make_edge_direct_to_target (ie, target);
3039 if (cs && !agg_contents)
3041 bool ok;
3042 gcc_checking_assert (cs->callee
3043 && (cs != ie
3044 || jfunc->type != IPA_JF_CONST
3045 || !cgraph_node_for_jfunc (jfunc)
3046 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3047 ok = try_decrement_rdesc_refcount (jfunc);
3048 gcc_checking_assert (ok);
3051 return cs;
3054 /* Return the target to be used in cases of impossible devirtualization. IE
3055 and target (the latter can be NULL) are dumped when dumping is enabled. */
3057 tree
3058 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3060 if (dump_file)
3062 if (target)
3063 fprintf (dump_file,
3064 "Type inconsistent devirtualization: %s/%i->%s\n",
3065 ie->caller->name (), ie->caller->order,
3066 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3067 else
3068 fprintf (dump_file,
3069 "No devirtualization target in %s/%i\n",
3070 ie->caller->name (), ie->caller->order);
3072 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3073 cgraph_node::get_create (new_target);
3074 return new_target;
3077 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3078 call based on a formal parameter which is described by jump function JFUNC
3079 and if it can be determined, make it direct and return the direct edge.
3080 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
3081 are relative to. */
3083 static struct cgraph_edge *
3084 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3085 struct ipa_jump_func *jfunc,
3086 struct ipa_node_params *new_root_info)
3088 tree binfo, target;
3090 if (!flag_devirtualize)
3091 return NULL;
3093 /* First try to do lookup via known virtual table pointer value. */
3094 if (!ie->indirect_info->by_ref)
3096 tree vtable;
3097 unsigned HOST_WIDE_INT offset;
3098 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
3099 ie->indirect_info->offset,
3100 true);
3101 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3103 target = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3104 vtable, offset);
3105 if (target)
3107 if ((TREE_CODE (TREE_TYPE (target)) == FUNCTION_TYPE
3108 && DECL_FUNCTION_CODE (target) == BUILT_IN_UNREACHABLE)
3109 || !possible_polymorphic_call_target_p
3110 (ie, cgraph_node::get (target)))
3111 target = ipa_impossible_devirt_target (ie, target);
3112 return ipa_make_edge_direct_to_target (ie, target);
3117 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
3119 if (!binfo)
3120 return NULL;
3122 if (TREE_CODE (binfo) != TREE_BINFO)
3124 ipa_polymorphic_call_context context;
3125 vec <cgraph_node *>targets;
3126 bool final;
3128 if (!get_polymorphic_call_info_from_invariant
3129 (&context, binfo, ie->indirect_info->otr_type,
3130 ie->indirect_info->offset))
3131 return NULL;
3132 targets = possible_polymorphic_call_targets
3133 (ie->indirect_info->otr_type,
3134 ie->indirect_info->otr_token,
3135 context, &final);
3136 if (!final || targets.length () > 1)
3137 return NULL;
3138 if (targets.length () == 1)
3139 target = targets[0]->decl;
3140 else
3141 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3143 else
3145 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
3146 ie->indirect_info->otr_type);
3147 if (binfo)
3148 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
3149 binfo);
3150 else
3151 return NULL;
3154 if (target)
3156 if (!possible_polymorphic_call_target_p (ie, cgraph_node::get (target)))
3157 target = ipa_impossible_devirt_target (ie, target);
3158 return ipa_make_edge_direct_to_target (ie, target);
3160 else
3161 return NULL;
3164 /* Update the param called notes associated with NODE when CS is being inlined,
3165 assuming NODE is (potentially indirectly) inlined into CS->callee.
3166 Moreover, if the callee is discovered to be constant, create a new cgraph
3167 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3168 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3170 static bool
3171 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3172 struct cgraph_node *node,
3173 vec<cgraph_edge *> *new_edges)
3175 struct ipa_edge_args *top;
3176 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3177 struct ipa_node_params *new_root_info;
3178 bool res = false;
3180 ipa_check_create_edge_args ();
3181 top = IPA_EDGE_REF (cs);
3182 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3183 ? cs->caller->global.inlined_to
3184 : cs->caller);
3186 for (ie = node->indirect_calls; ie; ie = next_ie)
3188 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3189 struct ipa_jump_func *jfunc;
3190 int param_index;
3192 next_ie = ie->next_callee;
3194 if (ici->param_index == -1)
3195 continue;
3197 /* We must check range due to calls with variable number of arguments: */
3198 if (ici->param_index >= ipa_get_cs_argument_count (top))
3200 ici->param_index = -1;
3201 continue;
3204 param_index = ici->param_index;
3205 jfunc = ipa_get_ith_jump_func (top, param_index);
3207 if (!flag_indirect_inlining)
3208 new_direct_edge = NULL;
3209 else if (ici->polymorphic)
3210 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
3211 new_root_info);
3212 else
3213 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3214 new_root_info);
3215 /* If speculation was removed, then we need to do nothing. */
3216 if (new_direct_edge && new_direct_edge != ie)
3218 new_direct_edge->indirect_inlining_edge = 1;
3219 top = IPA_EDGE_REF (cs);
3220 res = true;
3222 else if (new_direct_edge)
3224 new_direct_edge->indirect_inlining_edge = 1;
3225 if (new_direct_edge->call_stmt)
3226 new_direct_edge->call_stmt_cannot_inline_p
3227 = !gimple_check_call_matching_types (
3228 new_direct_edge->call_stmt,
3229 new_direct_edge->callee->decl, false);
3230 if (new_edges)
3232 new_edges->safe_push (new_direct_edge);
3233 res = true;
3235 top = IPA_EDGE_REF (cs);
3237 else if (jfunc->type == IPA_JF_PASS_THROUGH
3238 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3240 if ((ici->agg_contents
3241 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
3242 || (ici->polymorphic
3243 && !ipa_get_jf_pass_through_type_preserved (jfunc)))
3244 ici->param_index = -1;
3245 else
3246 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3248 else if (jfunc->type == IPA_JF_ANCESTOR)
3250 if ((ici->agg_contents
3251 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
3252 || (ici->polymorphic
3253 && !ipa_get_jf_ancestor_type_preserved (jfunc)))
3254 ici->param_index = -1;
3255 else
3257 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3258 if (ipa_get_jf_ancestor_offset (jfunc))
3259 ici->outer_type = NULL;
3260 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3263 else
3264 /* Either we can find a destination for this edge now or never. */
3265 ici->param_index = -1;
3268 return res;
3271 /* Recursively traverse subtree of NODE (including node) made of inlined
3272 cgraph_edges when CS has been inlined and invoke
3273 update_indirect_edges_after_inlining on all nodes and
3274 update_jump_functions_after_inlining on all non-inlined edges that lead out
3275 of this subtree. Newly discovered indirect edges will be added to
3276 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3277 created. */
3279 static bool
3280 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3281 struct cgraph_node *node,
3282 vec<cgraph_edge *> *new_edges)
3284 struct cgraph_edge *e;
3285 bool res;
3287 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3289 for (e = node->callees; e; e = e->next_callee)
3290 if (!e->inline_failed)
3291 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3292 else
3293 update_jump_functions_after_inlining (cs, e);
3294 for (e = node->indirect_calls; e; e = e->next_callee)
3295 update_jump_functions_after_inlining (cs, e);
3297 return res;
3300 /* Combine two controlled uses counts as done during inlining. */
3302 static int
3303 combine_controlled_uses_counters (int c, int d)
3305 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3306 return IPA_UNDESCRIBED_USE;
3307 else
3308 return c + d - 1;
3311 /* Propagate number of controlled users from CS->caleee to the new root of the
3312 tree of inlined nodes. */
3314 static void
3315 propagate_controlled_uses (struct cgraph_edge *cs)
3317 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3318 struct cgraph_node *new_root = cs->caller->global.inlined_to
3319 ? cs->caller->global.inlined_to : cs->caller;
3320 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3321 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3322 int count, i;
3324 count = MIN (ipa_get_cs_argument_count (args),
3325 ipa_get_param_count (old_root_info));
3326 for (i = 0; i < count; i++)
3328 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3329 struct ipa_cst_ref_desc *rdesc;
3331 if (jf->type == IPA_JF_PASS_THROUGH)
3333 int src_idx, c, d;
3334 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3335 c = ipa_get_controlled_uses (new_root_info, src_idx);
3336 d = ipa_get_controlled_uses (old_root_info, i);
3338 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3339 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3340 c = combine_controlled_uses_counters (c, d);
3341 ipa_set_controlled_uses (new_root_info, src_idx, c);
3342 if (c == 0 && new_root_info->ipcp_orig_node)
3344 struct cgraph_node *n;
3345 struct ipa_ref *ref;
3346 tree t = new_root_info->known_vals[src_idx];
3348 if (t && TREE_CODE (t) == ADDR_EXPR
3349 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3350 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3351 && (ref = new_root->find_reference (n, NULL, 0)))
3353 if (dump_file)
3354 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3355 "reference from %s/%i to %s/%i.\n",
3356 xstrdup (new_root->name ()),
3357 new_root->order,
3358 xstrdup (n->name ()), n->order);
3359 ref->remove_reference ();
3363 else if (jf->type == IPA_JF_CONST
3364 && (rdesc = jfunc_rdesc_usable (jf)))
3366 int d = ipa_get_controlled_uses (old_root_info, i);
3367 int c = rdesc->refcount;
3368 rdesc->refcount = combine_controlled_uses_counters (c, d);
3369 if (rdesc->refcount == 0)
3371 tree cst = ipa_get_jf_constant (jf);
3372 struct cgraph_node *n;
3373 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3374 && TREE_CODE (TREE_OPERAND (cst, 0))
3375 == FUNCTION_DECL);
3376 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3377 if (n)
3379 struct cgraph_node *clone;
3380 bool ok;
3381 ok = remove_described_reference (n, rdesc);
3382 gcc_checking_assert (ok);
3384 clone = cs->caller;
3385 while (clone->global.inlined_to
3386 && clone != rdesc->cs->caller
3387 && IPA_NODE_REF (clone)->ipcp_orig_node)
3389 struct ipa_ref *ref;
3390 ref = clone->find_reference (n, NULL, 0);
3391 if (ref)
3393 if (dump_file)
3394 fprintf (dump_file, "ipa-prop: Removing "
3395 "cloning-created reference "
3396 "from %s/%i to %s/%i.\n",
3397 xstrdup (clone->name ()),
3398 clone->order,
3399 xstrdup (n->name ()),
3400 n->order);
3401 ref->remove_reference ();
3403 clone = clone->callers->caller;
3410 for (i = ipa_get_param_count (old_root_info);
3411 i < ipa_get_cs_argument_count (args);
3412 i++)
3414 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3416 if (jf->type == IPA_JF_CONST)
3418 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3419 if (rdesc)
3420 rdesc->refcount = IPA_UNDESCRIBED_USE;
3422 else if (jf->type == IPA_JF_PASS_THROUGH)
3423 ipa_set_controlled_uses (new_root_info,
3424 jf->value.pass_through.formal_id,
3425 IPA_UNDESCRIBED_USE);
3429 /* Update jump functions and call note functions on inlining the call site CS.
3430 CS is expected to lead to a node already cloned by
3431 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3432 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3433 created. */
3435 bool
3436 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3437 vec<cgraph_edge *> *new_edges)
3439 bool changed;
3440 /* Do nothing if the preparation phase has not been carried out yet
3441 (i.e. during early inlining). */
3442 if (!ipa_node_params_vector.exists ())
3443 return false;
3444 gcc_assert (ipa_edge_args_vector);
3446 propagate_controlled_uses (cs);
3447 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3449 return changed;
3452 /* Frees all dynamically allocated structures that the argument info points
3453 to. */
3455 void
3456 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3458 vec_free (args->jump_functions);
3459 memset (args, 0, sizeof (*args));
3462 /* Free all ipa_edge structures. */
3464 void
3465 ipa_free_all_edge_args (void)
3467 int i;
3468 struct ipa_edge_args *args;
3470 if (!ipa_edge_args_vector)
3471 return;
3473 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3474 ipa_free_edge_args_substructures (args);
3476 vec_free (ipa_edge_args_vector);
3479 /* Frees all dynamically allocated structures that the param info points
3480 to. */
3482 void
3483 ipa_free_node_params_substructures (struct ipa_node_params *info)
3485 info->descriptors.release ();
3486 free (info->lattices);
3487 /* Lattice values and their sources are deallocated with their alocation
3488 pool. */
3489 info->known_vals.release ();
3490 memset (info, 0, sizeof (*info));
3493 /* Free all ipa_node_params structures. */
3495 void
3496 ipa_free_all_node_params (void)
3498 int i;
3499 struct ipa_node_params *info;
3501 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
3502 ipa_free_node_params_substructures (info);
3504 ipa_node_params_vector.release ();
3507 /* Set the aggregate replacements of NODE to be AGGVALS. */
3509 void
3510 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3511 struct ipa_agg_replacement_value *aggvals)
3513 if (vec_safe_length (ipa_node_agg_replacements)
3514 <= (unsigned) symtab->cgraph_max_uid)
3515 vec_safe_grow_cleared (ipa_node_agg_replacements,
3516 symtab->cgraph_max_uid + 1);
3518 (*ipa_node_agg_replacements)[node->uid] = aggvals;
3521 /* Hook that is called by cgraph.c when an edge is removed. */
3523 static void
3524 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3526 struct ipa_edge_args *args;
3528 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3529 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3530 return;
3532 args = IPA_EDGE_REF (cs);
3533 if (args->jump_functions)
3535 struct ipa_jump_func *jf;
3536 int i;
3537 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3539 struct ipa_cst_ref_desc *rdesc;
3540 try_decrement_rdesc_refcount (jf);
3541 if (jf->type == IPA_JF_CONST
3542 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3543 && rdesc->cs == cs)
3544 rdesc->cs = NULL;
3548 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3551 /* Hook that is called by cgraph.c when a node is removed. */
3553 static void
3554 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3556 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3557 if (ipa_node_params_vector.length () > (unsigned)node->uid)
3558 ipa_free_node_params_substructures (IPA_NODE_REF (node));
3559 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3560 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
3563 /* Hook that is called by cgraph.c when an edge is duplicated. */
3565 static void
3566 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3567 __attribute__((unused)) void *data)
3569 struct ipa_edge_args *old_args, *new_args;
3570 unsigned int i;
3572 ipa_check_create_edge_args ();
3574 old_args = IPA_EDGE_REF (src);
3575 new_args = IPA_EDGE_REF (dst);
3577 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3579 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3581 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3582 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3584 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3586 if (src_jf->type == IPA_JF_CONST)
3588 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3590 if (!src_rdesc)
3591 dst_jf->value.constant.rdesc = NULL;
3592 else if (src->caller == dst->caller)
3594 struct ipa_ref *ref;
3595 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3596 gcc_checking_assert (n);
3597 ref = src->caller->find_reference (n, src->call_stmt,
3598 src->lto_stmt_uid);
3599 gcc_checking_assert (ref);
3600 dst->caller->clone_reference (ref, ref->stmt);
3602 gcc_checking_assert (ipa_refdesc_pool);
3603 struct ipa_cst_ref_desc *dst_rdesc
3604 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3605 dst_rdesc->cs = dst;
3606 dst_rdesc->refcount = src_rdesc->refcount;
3607 dst_rdesc->next_duplicate = NULL;
3608 dst_jf->value.constant.rdesc = dst_rdesc;
3610 else if (src_rdesc->cs == src)
3612 struct ipa_cst_ref_desc *dst_rdesc;
3613 gcc_checking_assert (ipa_refdesc_pool);
3614 dst_rdesc
3615 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3616 dst_rdesc->cs = dst;
3617 dst_rdesc->refcount = src_rdesc->refcount;
3618 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3619 src_rdesc->next_duplicate = dst_rdesc;
3620 dst_jf->value.constant.rdesc = dst_rdesc;
3622 else
3624 struct ipa_cst_ref_desc *dst_rdesc;
3625 /* This can happen during inlining, when a JFUNC can refer to a
3626 reference taken in a function up in the tree of inline clones.
3627 We need to find the duplicate that refers to our tree of
3628 inline clones. */
3630 gcc_assert (dst->caller->global.inlined_to);
3631 for (dst_rdesc = src_rdesc->next_duplicate;
3632 dst_rdesc;
3633 dst_rdesc = dst_rdesc->next_duplicate)
3635 struct cgraph_node *top;
3636 top = dst_rdesc->cs->caller->global.inlined_to
3637 ? dst_rdesc->cs->caller->global.inlined_to
3638 : dst_rdesc->cs->caller;
3639 if (dst->caller->global.inlined_to == top)
3640 break;
3642 gcc_assert (dst_rdesc);
3643 dst_jf->value.constant.rdesc = dst_rdesc;
3649 /* Hook that is called by cgraph.c when a node is duplicated. */
3651 static void
3652 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3653 ATTRIBUTE_UNUSED void *data)
3655 struct ipa_node_params *old_info, *new_info;
3656 struct ipa_agg_replacement_value *old_av, *new_av;
3658 ipa_check_create_node_params ();
3659 old_info = IPA_NODE_REF (src);
3660 new_info = IPA_NODE_REF (dst);
3662 new_info->descriptors = old_info->descriptors.copy ();
3663 new_info->lattices = NULL;
3664 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3666 new_info->analysis_done = old_info->analysis_done;
3667 new_info->node_enqueued = old_info->node_enqueued;
3669 old_av = ipa_get_agg_replacements_for_node (src);
3670 if (!old_av)
3671 return;
3673 new_av = NULL;
3674 while (old_av)
3676 struct ipa_agg_replacement_value *v;
3678 v = ggc_alloc<ipa_agg_replacement_value> ();
3679 memcpy (v, old_av, sizeof (*v));
3680 v->next = new_av;
3681 new_av = v;
3682 old_av = old_av->next;
3684 ipa_set_node_agg_value_chain (dst, new_av);
3688 /* Analyze newly added function into callgraph. */
3690 static void
3691 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3693 if (node->has_gimple_body_p ())
3694 ipa_analyze_node (node);
3697 /* Register our cgraph hooks if they are not already there. */
3699 void
3700 ipa_register_cgraph_hooks (void)
3702 if (!edge_removal_hook_holder)
3703 edge_removal_hook_holder =
3704 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3705 if (!node_removal_hook_holder)
3706 node_removal_hook_holder =
3707 symtab->add_cgraph_removal_hook (&ipa_node_removal_hook, NULL);
3708 if (!edge_duplication_hook_holder)
3709 edge_duplication_hook_holder =
3710 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3711 if (!node_duplication_hook_holder)
3712 node_duplication_hook_holder =
3713 symtab->add_cgraph_duplication_hook (&ipa_node_duplication_hook, NULL);
3714 function_insertion_hook_holder =
3715 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3718 /* Unregister our cgraph hooks if they are not already there. */
3720 static void
3721 ipa_unregister_cgraph_hooks (void)
3723 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
3724 edge_removal_hook_holder = NULL;
3725 symtab->remove_cgraph_removal_hook (node_removal_hook_holder);
3726 node_removal_hook_holder = NULL;
3727 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
3728 edge_duplication_hook_holder = NULL;
3729 symtab->remove_cgraph_duplication_hook (node_duplication_hook_holder);
3730 node_duplication_hook_holder = NULL;
3731 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3732 function_insertion_hook_holder = NULL;
3735 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3736 longer needed after ipa-cp. */
3738 void
3739 ipa_free_all_structures_after_ipa_cp (void)
3741 if (!optimize)
3743 ipa_free_all_edge_args ();
3744 ipa_free_all_node_params ();
3745 free_alloc_pool (ipcp_sources_pool);
3746 free_alloc_pool (ipcp_values_pool);
3747 free_alloc_pool (ipcp_agg_lattice_pool);
3748 ipa_unregister_cgraph_hooks ();
3749 if (ipa_refdesc_pool)
3750 free_alloc_pool (ipa_refdesc_pool);
3754 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3755 longer needed after indirect inlining. */
3757 void
3758 ipa_free_all_structures_after_iinln (void)
3760 ipa_free_all_edge_args ();
3761 ipa_free_all_node_params ();
3762 ipa_unregister_cgraph_hooks ();
3763 if (ipcp_sources_pool)
3764 free_alloc_pool (ipcp_sources_pool);
3765 if (ipcp_values_pool)
3766 free_alloc_pool (ipcp_values_pool);
3767 if (ipcp_agg_lattice_pool)
3768 free_alloc_pool (ipcp_agg_lattice_pool);
3769 if (ipa_refdesc_pool)
3770 free_alloc_pool (ipa_refdesc_pool);
3773 /* Print ipa_tree_map data structures of all functions in the
3774 callgraph to F. */
3776 void
3777 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3779 int i, count;
3780 struct ipa_node_params *info;
3782 if (!node->definition)
3783 return;
3784 info = IPA_NODE_REF (node);
3785 fprintf (f, " function %s/%i parameter descriptors:\n",
3786 node->name (), node->order);
3787 count = ipa_get_param_count (info);
3788 for (i = 0; i < count; i++)
3790 int c;
3792 fprintf (f, " ");
3793 ipa_dump_param (f, info, i);
3794 if (ipa_is_param_used (info, i))
3795 fprintf (f, " used");
3796 c = ipa_get_controlled_uses (info, i);
3797 if (c == IPA_UNDESCRIBED_USE)
3798 fprintf (f, " undescribed_use");
3799 else
3800 fprintf (f, " controlled_uses=%i", c);
3801 fprintf (f, "\n");
3805 /* Print ipa_tree_map data structures of all functions in the
3806 callgraph to F. */
3808 void
3809 ipa_print_all_params (FILE * f)
3811 struct cgraph_node *node;
3813 fprintf (f, "\nFunction parameters:\n");
3814 FOR_EACH_FUNCTION (node)
3815 ipa_print_node_params (f, node);
3818 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3820 vec<tree>
3821 ipa_get_vector_of_formal_parms (tree fndecl)
3823 vec<tree> args;
3824 int count;
3825 tree parm;
3827 gcc_assert (!flag_wpa);
3828 count = count_formal_params (fndecl);
3829 args.create (count);
3830 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3831 args.quick_push (parm);
3833 return args;
3836 /* Return a heap allocated vector containing types of formal parameters of
3837 function type FNTYPE. */
3839 vec<tree>
3840 ipa_get_vector_of_formal_parm_types (tree fntype)
3842 vec<tree> types;
3843 int count = 0;
3844 tree t;
3846 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3847 count++;
3849 types.create (count);
3850 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3851 types.quick_push (TREE_VALUE (t));
3853 return types;
3856 /* Modify the function declaration FNDECL and its type according to the plan in
3857 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3858 to reflect the actual parameters being modified which are determined by the
3859 base_index field. */
3861 void
3862 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3864 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3865 tree orig_type = TREE_TYPE (fndecl);
3866 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3868 /* The following test is an ugly hack, some functions simply don't have any
3869 arguments in their type. This is probably a bug but well... */
3870 bool care_for_types = (old_arg_types != NULL_TREE);
3871 bool last_parm_void;
3872 vec<tree> otypes;
3873 if (care_for_types)
3875 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3876 == void_type_node);
3877 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3878 if (last_parm_void)
3879 gcc_assert (oparms.length () + 1 == otypes.length ());
3880 else
3881 gcc_assert (oparms.length () == otypes.length ());
3883 else
3885 last_parm_void = false;
3886 otypes.create (0);
3889 int len = adjustments.length ();
3890 tree *link = &DECL_ARGUMENTS (fndecl);
3891 tree new_arg_types = NULL;
3892 for (int i = 0; i < len; i++)
3894 struct ipa_parm_adjustment *adj;
3895 gcc_assert (link);
3897 adj = &adjustments[i];
3898 tree parm;
3899 if (adj->op == IPA_PARM_OP_NEW)
3900 parm = NULL;
3901 else
3902 parm = oparms[adj->base_index];
3903 adj->base = parm;
3905 if (adj->op == IPA_PARM_OP_COPY)
3907 if (care_for_types)
3908 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3909 new_arg_types);
3910 *link = parm;
3911 link = &DECL_CHAIN (parm);
3913 else if (adj->op != IPA_PARM_OP_REMOVE)
3915 tree new_parm;
3916 tree ptype;
3918 if (adj->by_ref)
3919 ptype = build_pointer_type (adj->type);
3920 else
3922 ptype = adj->type;
3923 if (is_gimple_reg_type (ptype))
3925 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3926 if (TYPE_ALIGN (ptype) < malign)
3927 ptype = build_aligned_type (ptype, malign);
3931 if (care_for_types)
3932 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3934 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3935 ptype);
3936 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3937 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3938 DECL_ARTIFICIAL (new_parm) = 1;
3939 DECL_ARG_TYPE (new_parm) = ptype;
3940 DECL_CONTEXT (new_parm) = fndecl;
3941 TREE_USED (new_parm) = 1;
3942 DECL_IGNORED_P (new_parm) = 1;
3943 layout_decl (new_parm, 0);
3945 if (adj->op == IPA_PARM_OP_NEW)
3946 adj->base = NULL;
3947 else
3948 adj->base = parm;
3949 adj->new_decl = new_parm;
3951 *link = new_parm;
3952 link = &DECL_CHAIN (new_parm);
3956 *link = NULL_TREE;
3958 tree new_reversed = NULL;
3959 if (care_for_types)
3961 new_reversed = nreverse (new_arg_types);
3962 if (last_parm_void)
3964 if (new_reversed)
3965 TREE_CHAIN (new_arg_types) = void_list_node;
3966 else
3967 new_reversed = void_list_node;
3971 /* Use copy_node to preserve as much as possible from original type
3972 (debug info, attribute lists etc.)
3973 Exception is METHOD_TYPEs must have THIS argument.
3974 When we are asked to remove it, we need to build new FUNCTION_TYPE
3975 instead. */
3976 tree new_type = NULL;
3977 if (TREE_CODE (orig_type) != METHOD_TYPE
3978 || (adjustments[0].op == IPA_PARM_OP_COPY
3979 && adjustments[0].base_index == 0))
3981 new_type = build_distinct_type_copy (orig_type);
3982 TYPE_ARG_TYPES (new_type) = new_reversed;
3984 else
3986 new_type
3987 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3988 new_reversed));
3989 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3990 DECL_VINDEX (fndecl) = NULL_TREE;
3993 /* When signature changes, we need to clear builtin info. */
3994 if (DECL_BUILT_IN (fndecl))
3996 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3997 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
4000 /* This is a new type, not a copy of an old type. Need to reassociate
4001 variants. We can handle everything except the main variant lazily. */
4002 tree t = TYPE_MAIN_VARIANT (orig_type);
4003 if (orig_type != t)
4005 TYPE_MAIN_VARIANT (new_type) = t;
4006 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
4007 TYPE_NEXT_VARIANT (t) = new_type;
4009 else
4011 TYPE_MAIN_VARIANT (new_type) = new_type;
4012 TYPE_NEXT_VARIANT (new_type) = NULL;
4015 TREE_TYPE (fndecl) = new_type;
4016 DECL_VIRTUAL_P (fndecl) = 0;
4017 DECL_LANG_SPECIFIC (fndecl) = NULL;
4018 otypes.release ();
4019 oparms.release ();
4022 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
4023 If this is a directly recursive call, CS must be NULL. Otherwise it must
4024 contain the corresponding call graph edge. */
4026 void
4027 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
4028 ipa_parm_adjustment_vec adjustments)
4030 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
4031 vec<tree> vargs;
4032 vec<tree, va_gc> **debug_args = NULL;
4033 gimple new_stmt;
4034 gimple_stmt_iterator gsi, prev_gsi;
4035 tree callee_decl;
4036 int i, len;
4038 len = adjustments.length ();
4039 vargs.create (len);
4040 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
4041 current_node->remove_stmt_references (stmt);
4043 gsi = gsi_for_stmt (stmt);
4044 prev_gsi = gsi;
4045 gsi_prev (&prev_gsi);
4046 for (i = 0; i < len; i++)
4048 struct ipa_parm_adjustment *adj;
4050 adj = &adjustments[i];
4052 if (adj->op == IPA_PARM_OP_COPY)
4054 tree arg = gimple_call_arg (stmt, adj->base_index);
4056 vargs.quick_push (arg);
4058 else if (adj->op != IPA_PARM_OP_REMOVE)
4060 tree expr, base, off;
4061 location_t loc;
4062 unsigned int deref_align = 0;
4063 bool deref_base = false;
4065 /* We create a new parameter out of the value of the old one, we can
4066 do the following kind of transformations:
4068 - A scalar passed by reference is converted to a scalar passed by
4069 value. (adj->by_ref is false and the type of the original
4070 actual argument is a pointer to a scalar).
4072 - A part of an aggregate is passed instead of the whole aggregate.
4073 The part can be passed either by value or by reference, this is
4074 determined by value of adj->by_ref. Moreover, the code below
4075 handles both situations when the original aggregate is passed by
4076 value (its type is not a pointer) and when it is passed by
4077 reference (it is a pointer to an aggregate).
4079 When the new argument is passed by reference (adj->by_ref is true)
4080 it must be a part of an aggregate and therefore we form it by
4081 simply taking the address of a reference inside the original
4082 aggregate. */
4084 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
4085 base = gimple_call_arg (stmt, adj->base_index);
4086 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
4087 : EXPR_LOCATION (base);
4089 if (TREE_CODE (base) != ADDR_EXPR
4090 && POINTER_TYPE_P (TREE_TYPE (base)))
4091 off = build_int_cst (adj->alias_ptr_type,
4092 adj->offset / BITS_PER_UNIT);
4093 else
4095 HOST_WIDE_INT base_offset;
4096 tree prev_base;
4097 bool addrof;
4099 if (TREE_CODE (base) == ADDR_EXPR)
4101 base = TREE_OPERAND (base, 0);
4102 addrof = true;
4104 else
4105 addrof = false;
4106 prev_base = base;
4107 base = get_addr_base_and_unit_offset (base, &base_offset);
4108 /* Aggregate arguments can have non-invariant addresses. */
4109 if (!base)
4111 base = build_fold_addr_expr (prev_base);
4112 off = build_int_cst (adj->alias_ptr_type,
4113 adj->offset / BITS_PER_UNIT);
4115 else if (TREE_CODE (base) == MEM_REF)
4117 if (!addrof)
4119 deref_base = true;
4120 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4122 off = build_int_cst (adj->alias_ptr_type,
4123 base_offset
4124 + adj->offset / BITS_PER_UNIT);
4125 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
4126 off);
4127 base = TREE_OPERAND (base, 0);
4129 else
4131 off = build_int_cst (adj->alias_ptr_type,
4132 base_offset
4133 + adj->offset / BITS_PER_UNIT);
4134 base = build_fold_addr_expr (base);
4138 if (!adj->by_ref)
4140 tree type = adj->type;
4141 unsigned int align;
4142 unsigned HOST_WIDE_INT misalign;
4144 if (deref_base)
4146 align = deref_align;
4147 misalign = 0;
4149 else
4151 get_pointer_alignment_1 (base, &align, &misalign);
4152 if (TYPE_ALIGN (type) > align)
4153 align = TYPE_ALIGN (type);
4155 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
4156 * BITS_PER_UNIT);
4157 misalign = misalign & (align - 1);
4158 if (misalign != 0)
4159 align = (misalign & -misalign);
4160 if (align < TYPE_ALIGN (type))
4161 type = build_aligned_type (type, align);
4162 base = force_gimple_operand_gsi (&gsi, base,
4163 true, NULL, true, GSI_SAME_STMT);
4164 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4165 /* If expr is not a valid gimple call argument emit
4166 a load into a temporary. */
4167 if (is_gimple_reg_type (TREE_TYPE (expr)))
4169 gimple tem = gimple_build_assign (NULL_TREE, expr);
4170 if (gimple_in_ssa_p (cfun))
4172 gimple_set_vuse (tem, gimple_vuse (stmt));
4173 expr = make_ssa_name (TREE_TYPE (expr), tem);
4175 else
4176 expr = create_tmp_reg (TREE_TYPE (expr), NULL);
4177 gimple_assign_set_lhs (tem, expr);
4178 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4181 else
4183 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4184 expr = build_fold_addr_expr (expr);
4185 expr = force_gimple_operand_gsi (&gsi, expr,
4186 true, NULL, true, GSI_SAME_STMT);
4188 vargs.quick_push (expr);
4190 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4192 unsigned int ix;
4193 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4194 gimple def_temp;
4196 arg = gimple_call_arg (stmt, adj->base_index);
4197 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4199 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4200 continue;
4201 arg = fold_convert_loc (gimple_location (stmt),
4202 TREE_TYPE (origin), arg);
4204 if (debug_args == NULL)
4205 debug_args = decl_debug_args_insert (callee_decl);
4206 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4207 if (ddecl == origin)
4209 ddecl = (**debug_args)[ix + 1];
4210 break;
4212 if (ddecl == NULL)
4214 ddecl = make_node (DEBUG_EXPR_DECL);
4215 DECL_ARTIFICIAL (ddecl) = 1;
4216 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4217 DECL_MODE (ddecl) = DECL_MODE (origin);
4219 vec_safe_push (*debug_args, origin);
4220 vec_safe_push (*debug_args, ddecl);
4222 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4223 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4227 if (dump_file && (dump_flags & TDF_DETAILS))
4229 fprintf (dump_file, "replacing stmt:");
4230 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4233 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4234 vargs.release ();
4235 if (gimple_call_lhs (stmt))
4236 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4238 gimple_set_block (new_stmt, gimple_block (stmt));
4239 if (gimple_has_location (stmt))
4240 gimple_set_location (new_stmt, gimple_location (stmt));
4241 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4242 gimple_call_copy_flags (new_stmt, stmt);
4243 if (gimple_in_ssa_p (cfun))
4245 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4246 if (gimple_vdef (stmt))
4248 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4249 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4253 if (dump_file && (dump_flags & TDF_DETAILS))
4255 fprintf (dump_file, "with stmt:");
4256 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4257 fprintf (dump_file, "\n");
4259 gsi_replace (&gsi, new_stmt, true);
4260 if (cs)
4261 cs->set_call_stmt (new_stmt);
4264 current_node->record_stmt_references (gsi_stmt (gsi));
4265 gsi_prev (&gsi);
4267 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4270 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4271 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4272 specifies whether the function should care about type incompatibility the
4273 current and new expressions. If it is false, the function will leave
4274 incompatibility issues to the caller. Return true iff the expression
4275 was modified. */
4277 bool
4278 ipa_modify_expr (tree *expr, bool convert,
4279 ipa_parm_adjustment_vec adjustments)
4281 struct ipa_parm_adjustment *cand
4282 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4283 if (!cand)
4284 return false;
4286 tree src;
4287 if (cand->by_ref)
4288 src = build_simple_mem_ref (cand->new_decl);
4289 else
4290 src = cand->new_decl;
4292 if (dump_file && (dump_flags & TDF_DETAILS))
4294 fprintf (dump_file, "About to replace expr ");
4295 print_generic_expr (dump_file, *expr, 0);
4296 fprintf (dump_file, " with ");
4297 print_generic_expr (dump_file, src, 0);
4298 fprintf (dump_file, "\n");
4301 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4303 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4304 *expr = vce;
4306 else
4307 *expr = src;
4308 return true;
4311 /* If T is an SSA_NAME, return NULL if it is not a default def or
4312 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4313 the base variable is always returned, regardless if it is a default
4314 def. Return T if it is not an SSA_NAME. */
4316 static tree
4317 get_ssa_base_param (tree t, bool ignore_default_def)
4319 if (TREE_CODE (t) == SSA_NAME)
4321 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4322 return SSA_NAME_VAR (t);
4323 else
4324 return NULL_TREE;
4326 return t;
4329 /* Given an expression, return an adjustment entry specifying the
4330 transformation to be done on EXPR. If no suitable adjustment entry
4331 was found, returns NULL.
4333 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4334 default def, otherwise bail on them.
4336 If CONVERT is non-NULL, this function will set *CONVERT if the
4337 expression provided is a component reference. ADJUSTMENTS is the
4338 adjustments vector. */
4340 ipa_parm_adjustment *
4341 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4342 ipa_parm_adjustment_vec adjustments,
4343 bool ignore_default_def)
4345 if (TREE_CODE (**expr) == BIT_FIELD_REF
4346 || TREE_CODE (**expr) == IMAGPART_EXPR
4347 || TREE_CODE (**expr) == REALPART_EXPR)
4349 *expr = &TREE_OPERAND (**expr, 0);
4350 if (convert)
4351 *convert = true;
4354 HOST_WIDE_INT offset, size, max_size;
4355 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
4356 if (!base || size == -1 || max_size == -1)
4357 return NULL;
4359 if (TREE_CODE (base) == MEM_REF)
4361 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4362 base = TREE_OPERAND (base, 0);
4365 base = get_ssa_base_param (base, ignore_default_def);
4366 if (!base || TREE_CODE (base) != PARM_DECL)
4367 return NULL;
4369 struct ipa_parm_adjustment *cand = NULL;
4370 unsigned int len = adjustments.length ();
4371 for (unsigned i = 0; i < len; i++)
4373 struct ipa_parm_adjustment *adj = &adjustments[i];
4375 if (adj->base == base
4376 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4378 cand = adj;
4379 break;
4383 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4384 return NULL;
4385 return cand;
4388 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4390 static bool
4391 index_in_adjustments_multiple_times_p (int base_index,
4392 ipa_parm_adjustment_vec adjustments)
4394 int i, len = adjustments.length ();
4395 bool one = false;
4397 for (i = 0; i < len; i++)
4399 struct ipa_parm_adjustment *adj;
4400 adj = &adjustments[i];
4402 if (adj->base_index == base_index)
4404 if (one)
4405 return true;
4406 else
4407 one = true;
4410 return false;
4414 /* Return adjustments that should have the same effect on function parameters
4415 and call arguments as if they were first changed according to adjustments in
4416 INNER and then by adjustments in OUTER. */
4418 ipa_parm_adjustment_vec
4419 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4420 ipa_parm_adjustment_vec outer)
4422 int i, outlen = outer.length ();
4423 int inlen = inner.length ();
4424 int removals = 0;
4425 ipa_parm_adjustment_vec adjustments, tmp;
4427 tmp.create (inlen);
4428 for (i = 0; i < inlen; i++)
4430 struct ipa_parm_adjustment *n;
4431 n = &inner[i];
4433 if (n->op == IPA_PARM_OP_REMOVE)
4434 removals++;
4435 else
4437 /* FIXME: Handling of new arguments are not implemented yet. */
4438 gcc_assert (n->op != IPA_PARM_OP_NEW);
4439 tmp.quick_push (*n);
4443 adjustments.create (outlen + removals);
4444 for (i = 0; i < outlen; i++)
4446 struct ipa_parm_adjustment r;
4447 struct ipa_parm_adjustment *out = &outer[i];
4448 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4450 memset (&r, 0, sizeof (r));
4451 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4452 if (out->op == IPA_PARM_OP_REMOVE)
4454 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4456 r.op = IPA_PARM_OP_REMOVE;
4457 adjustments.quick_push (r);
4459 continue;
4461 else
4463 /* FIXME: Handling of new arguments are not implemented yet. */
4464 gcc_assert (out->op != IPA_PARM_OP_NEW);
4467 r.base_index = in->base_index;
4468 r.type = out->type;
4470 /* FIXME: Create nonlocal value too. */
4472 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4473 r.op = IPA_PARM_OP_COPY;
4474 else if (in->op == IPA_PARM_OP_COPY)
4475 r.offset = out->offset;
4476 else if (out->op == IPA_PARM_OP_COPY)
4477 r.offset = in->offset;
4478 else
4479 r.offset = in->offset + out->offset;
4480 adjustments.quick_push (r);
4483 for (i = 0; i < inlen; i++)
4485 struct ipa_parm_adjustment *n = &inner[i];
4487 if (n->op == IPA_PARM_OP_REMOVE)
4488 adjustments.quick_push (*n);
4491 tmp.release ();
4492 return adjustments;
4495 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4496 friendly way, assuming they are meant to be applied to FNDECL. */
4498 void
4499 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4500 tree fndecl)
4502 int i, len = adjustments.length ();
4503 bool first = true;
4504 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4506 fprintf (file, "IPA param adjustments: ");
4507 for (i = 0; i < len; i++)
4509 struct ipa_parm_adjustment *adj;
4510 adj = &adjustments[i];
4512 if (!first)
4513 fprintf (file, " ");
4514 else
4515 first = false;
4517 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4518 print_generic_expr (file, parms[adj->base_index], 0);
4519 if (adj->base)
4521 fprintf (file, ", base: ");
4522 print_generic_expr (file, adj->base, 0);
4524 if (adj->new_decl)
4526 fprintf (file, ", new_decl: ");
4527 print_generic_expr (file, adj->new_decl, 0);
4529 if (adj->new_ssa_base)
4531 fprintf (file, ", new_ssa_base: ");
4532 print_generic_expr (file, adj->new_ssa_base, 0);
4535 if (adj->op == IPA_PARM_OP_COPY)
4536 fprintf (file, ", copy_param");
4537 else if (adj->op == IPA_PARM_OP_REMOVE)
4538 fprintf (file, ", remove_param");
4539 else
4540 fprintf (file, ", offset %li", (long) adj->offset);
4541 if (adj->by_ref)
4542 fprintf (file, ", by_ref");
4543 print_node_brief (file, ", type: ", adj->type, 0);
4544 fprintf (file, "\n");
4546 parms.release ();
4549 /* Dump the AV linked list. */
4551 void
4552 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4554 bool comma = false;
4555 fprintf (f, " Aggregate replacements:");
4556 for (; av; av = av->next)
4558 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4559 av->index, av->offset);
4560 print_generic_expr (f, av->value, 0);
4561 comma = true;
4563 fprintf (f, "\n");
4566 /* Stream out jump function JUMP_FUNC to OB. */
4568 static void
4569 ipa_write_jump_function (struct output_block *ob,
4570 struct ipa_jump_func *jump_func)
4572 struct ipa_agg_jf_item *item;
4573 struct bitpack_d bp;
4574 int i, count;
4576 streamer_write_uhwi (ob, jump_func->type);
4577 switch (jump_func->type)
4579 case IPA_JF_UNKNOWN:
4580 break;
4581 case IPA_JF_KNOWN_TYPE:
4582 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
4583 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
4584 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
4585 break;
4586 case IPA_JF_CONST:
4587 gcc_assert (
4588 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4589 stream_write_tree (ob, jump_func->value.constant.value, true);
4590 break;
4591 case IPA_JF_PASS_THROUGH:
4592 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4593 if (jump_func->value.pass_through.operation == NOP_EXPR)
4595 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4596 bp = bitpack_create (ob->main_stream);
4597 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4598 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
4599 streamer_write_bitpack (&bp);
4601 else
4603 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4604 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4606 break;
4607 case IPA_JF_ANCESTOR:
4608 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4609 stream_write_tree (ob, jump_func->value.ancestor.type, true);
4610 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4611 bp = bitpack_create (ob->main_stream);
4612 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4613 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
4614 streamer_write_bitpack (&bp);
4615 break;
4618 count = vec_safe_length (jump_func->agg.items);
4619 streamer_write_uhwi (ob, count);
4620 if (count)
4622 bp = bitpack_create (ob->main_stream);
4623 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4624 streamer_write_bitpack (&bp);
4627 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4629 streamer_write_uhwi (ob, item->offset);
4630 stream_write_tree (ob, item->value, true);
4634 /* Read in jump function JUMP_FUNC from IB. */
4636 static void
4637 ipa_read_jump_function (struct lto_input_block *ib,
4638 struct ipa_jump_func *jump_func,
4639 struct cgraph_edge *cs,
4640 struct data_in *data_in)
4642 enum jump_func_type jftype;
4643 enum tree_code operation;
4644 int i, count;
4646 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4647 switch (jftype)
4649 case IPA_JF_UNKNOWN:
4650 jump_func->type = IPA_JF_UNKNOWN;
4651 break;
4652 case IPA_JF_KNOWN_TYPE:
4654 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4655 tree base_type = stream_read_tree (ib, data_in);
4656 tree component_type = stream_read_tree (ib, data_in);
4658 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
4659 break;
4661 case IPA_JF_CONST:
4662 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4663 break;
4664 case IPA_JF_PASS_THROUGH:
4665 operation = (enum tree_code) streamer_read_uhwi (ib);
4666 if (operation == NOP_EXPR)
4668 int formal_id = streamer_read_uhwi (ib);
4669 struct bitpack_d bp = streamer_read_bitpack (ib);
4670 bool agg_preserved = bp_unpack_value (&bp, 1);
4671 bool type_preserved = bp_unpack_value (&bp, 1);
4672 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4673 type_preserved);
4675 else
4677 tree operand = stream_read_tree (ib, data_in);
4678 int formal_id = streamer_read_uhwi (ib);
4679 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4680 operation);
4682 break;
4683 case IPA_JF_ANCESTOR:
4685 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4686 tree type = stream_read_tree (ib, data_in);
4687 int formal_id = streamer_read_uhwi (ib);
4688 struct bitpack_d bp = streamer_read_bitpack (ib);
4689 bool agg_preserved = bp_unpack_value (&bp, 1);
4690 bool type_preserved = bp_unpack_value (&bp, 1);
4692 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4693 type_preserved);
4694 break;
4698 count = streamer_read_uhwi (ib);
4699 vec_alloc (jump_func->agg.items, count);
4700 if (count)
4702 struct bitpack_d bp = streamer_read_bitpack (ib);
4703 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4705 for (i = 0; i < count; i++)
4707 struct ipa_agg_jf_item item;
4708 item.offset = streamer_read_uhwi (ib);
4709 item.value = stream_read_tree (ib, data_in);
4710 jump_func->agg.items->quick_push (item);
4714 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4715 relevant to indirect inlining to OB. */
4717 static void
4718 ipa_write_indirect_edge_info (struct output_block *ob,
4719 struct cgraph_edge *cs)
4721 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4722 struct bitpack_d bp;
4724 streamer_write_hwi (ob, ii->param_index);
4725 streamer_write_hwi (ob, ii->offset);
4726 bp = bitpack_create (ob->main_stream);
4727 bp_pack_value (&bp, ii->polymorphic, 1);
4728 bp_pack_value (&bp, ii->agg_contents, 1);
4729 bp_pack_value (&bp, ii->member_ptr, 1);
4730 bp_pack_value (&bp, ii->by_ref, 1);
4731 bp_pack_value (&bp, ii->maybe_in_construction, 1);
4732 bp_pack_value (&bp, ii->maybe_derived_type, 1);
4733 bp_pack_value (&bp, ii->speculative_maybe_derived_type, 1);
4734 streamer_write_bitpack (&bp);
4736 if (ii->polymorphic)
4738 streamer_write_hwi (ob, ii->otr_token);
4739 stream_write_tree (ob, ii->otr_type, true);
4740 stream_write_tree (ob, ii->outer_type, true);
4741 stream_write_tree (ob, ii->speculative_outer_type, true);
4742 if (ii->speculative_outer_type)
4743 streamer_write_hwi (ob, ii->speculative_offset);
4747 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4748 relevant to indirect inlining from IB. */
4750 static void
4751 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4752 struct data_in *data_in ATTRIBUTE_UNUSED,
4753 struct cgraph_edge *cs)
4755 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4756 struct bitpack_d bp;
4758 ii->param_index = (int) streamer_read_hwi (ib);
4759 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4760 bp = streamer_read_bitpack (ib);
4761 ii->polymorphic = bp_unpack_value (&bp, 1);
4762 ii->agg_contents = bp_unpack_value (&bp, 1);
4763 ii->member_ptr = bp_unpack_value (&bp, 1);
4764 ii->by_ref = bp_unpack_value (&bp, 1);
4765 ii->maybe_in_construction = bp_unpack_value (&bp, 1);
4766 ii->maybe_derived_type = bp_unpack_value (&bp, 1);
4767 ii->speculative_maybe_derived_type = bp_unpack_value (&bp, 1);
4768 if (ii->polymorphic)
4770 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4771 ii->otr_type = stream_read_tree (ib, data_in);
4772 ii->outer_type = stream_read_tree (ib, data_in);
4773 ii->speculative_outer_type = stream_read_tree (ib, data_in);
4774 if (ii->speculative_outer_type)
4775 ii->speculative_offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4779 /* Stream out NODE info to OB. */
4781 static void
4782 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4784 int node_ref;
4785 lto_symtab_encoder_t encoder;
4786 struct ipa_node_params *info = IPA_NODE_REF (node);
4787 int j;
4788 struct cgraph_edge *e;
4789 struct bitpack_d bp;
4791 encoder = ob->decl_state->symtab_node_encoder;
4792 node_ref = lto_symtab_encoder_encode (encoder, node);
4793 streamer_write_uhwi (ob, node_ref);
4795 streamer_write_uhwi (ob, ipa_get_param_count (info));
4796 for (j = 0; j < ipa_get_param_count (info); j++)
4797 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4798 bp = bitpack_create (ob->main_stream);
4799 gcc_assert (info->analysis_done
4800 || ipa_get_param_count (info) == 0);
4801 gcc_assert (!info->node_enqueued);
4802 gcc_assert (!info->ipcp_orig_node);
4803 for (j = 0; j < ipa_get_param_count (info); j++)
4804 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4805 streamer_write_bitpack (&bp);
4806 for (j = 0; j < ipa_get_param_count (info); j++)
4807 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4808 for (e = node->callees; e; e = e->next_callee)
4810 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4812 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4813 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4814 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4816 for (e = node->indirect_calls; e; e = e->next_callee)
4818 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4820 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4821 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4822 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4823 ipa_write_indirect_edge_info (ob, e);
4827 /* Stream in NODE info from IB. */
4829 static void
4830 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4831 struct data_in *data_in)
4833 struct ipa_node_params *info = IPA_NODE_REF (node);
4834 int k;
4835 struct cgraph_edge *e;
4836 struct bitpack_d bp;
4838 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4840 for (k = 0; k < ipa_get_param_count (info); k++)
4841 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4843 bp = streamer_read_bitpack (ib);
4844 if (ipa_get_param_count (info) != 0)
4845 info->analysis_done = true;
4846 info->node_enqueued = false;
4847 for (k = 0; k < ipa_get_param_count (info); k++)
4848 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4849 for (k = 0; k < ipa_get_param_count (info); k++)
4850 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4851 for (e = node->callees; e; e = e->next_callee)
4853 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4854 int count = streamer_read_uhwi (ib);
4856 if (!count)
4857 continue;
4858 vec_safe_grow_cleared (args->jump_functions, count);
4860 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4861 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4862 data_in);
4864 for (e = node->indirect_calls; e; e = e->next_callee)
4866 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4867 int count = streamer_read_uhwi (ib);
4869 if (count)
4871 vec_safe_grow_cleared (args->jump_functions, count);
4872 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4873 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4874 data_in);
4876 ipa_read_indirect_edge_info (ib, data_in, e);
4880 /* Write jump functions for nodes in SET. */
4882 void
4883 ipa_prop_write_jump_functions (void)
4885 struct cgraph_node *node;
4886 struct output_block *ob;
4887 unsigned int count = 0;
4888 lto_symtab_encoder_iterator lsei;
4889 lto_symtab_encoder_t encoder;
4892 if (!ipa_node_params_vector.exists ())
4893 return;
4895 ob = create_output_block (LTO_section_jump_functions);
4896 encoder = ob->decl_state->symtab_node_encoder;
4897 ob->symbol = NULL;
4898 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4899 lsei_next_function_in_partition (&lsei))
4901 node = lsei_cgraph_node (lsei);
4902 if (node->has_gimple_body_p ()
4903 && IPA_NODE_REF (node) != NULL)
4904 count++;
4907 streamer_write_uhwi (ob, count);
4909 /* Process all of the functions. */
4910 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4911 lsei_next_function_in_partition (&lsei))
4913 node = lsei_cgraph_node (lsei);
4914 if (node->has_gimple_body_p ()
4915 && IPA_NODE_REF (node) != NULL)
4916 ipa_write_node_info (ob, node);
4918 streamer_write_char_stream (ob->main_stream, 0);
4919 produce_asm (ob, NULL);
4920 destroy_output_block (ob);
4923 /* Read section in file FILE_DATA of length LEN with data DATA. */
4925 static void
4926 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4927 size_t len)
4929 const struct lto_function_header *header =
4930 (const struct lto_function_header *) data;
4931 const int cfg_offset = sizeof (struct lto_function_header);
4932 const int main_offset = cfg_offset + header->cfg_size;
4933 const int string_offset = main_offset + header->main_size;
4934 struct data_in *data_in;
4935 unsigned int i;
4936 unsigned int count;
4938 lto_input_block ib_main ((const char *) data + main_offset,
4939 header->main_size);
4941 data_in =
4942 lto_data_in_create (file_data, (const char *) data + string_offset,
4943 header->string_size, vNULL);
4944 count = streamer_read_uhwi (&ib_main);
4946 for (i = 0; i < count; i++)
4948 unsigned int index;
4949 struct cgraph_node *node;
4950 lto_symtab_encoder_t encoder;
4952 index = streamer_read_uhwi (&ib_main);
4953 encoder = file_data->symtab_node_encoder;
4954 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4955 index));
4956 gcc_assert (node->definition);
4957 ipa_read_node_info (&ib_main, node, data_in);
4959 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4960 len);
4961 lto_data_in_delete (data_in);
4964 /* Read ipcp jump functions. */
4966 void
4967 ipa_prop_read_jump_functions (void)
4969 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4970 struct lto_file_decl_data *file_data;
4971 unsigned int j = 0;
4973 ipa_check_create_node_params ();
4974 ipa_check_create_edge_args ();
4975 ipa_register_cgraph_hooks ();
4977 while ((file_data = file_data_vec[j++]))
4979 size_t len;
4980 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4982 if (data)
4983 ipa_prop_read_section (file_data, data, len);
4987 /* After merging units, we can get mismatch in argument counts.
4988 Also decl merging might've rendered parameter lists obsolete.
4989 Also compute called_with_variable_arg info. */
4991 void
4992 ipa_update_after_lto_read (void)
4994 ipa_check_create_node_params ();
4995 ipa_check_create_edge_args ();
4998 void
4999 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
5001 int node_ref;
5002 unsigned int count = 0;
5003 lto_symtab_encoder_t encoder;
5004 struct ipa_agg_replacement_value *aggvals, *av;
5006 aggvals = ipa_get_agg_replacements_for_node (node);
5007 encoder = ob->decl_state->symtab_node_encoder;
5008 node_ref = lto_symtab_encoder_encode (encoder, node);
5009 streamer_write_uhwi (ob, node_ref);
5011 for (av = aggvals; av; av = av->next)
5012 count++;
5013 streamer_write_uhwi (ob, count);
5015 for (av = aggvals; av; av = av->next)
5017 struct bitpack_d bp;
5019 streamer_write_uhwi (ob, av->offset);
5020 streamer_write_uhwi (ob, av->index);
5021 stream_write_tree (ob, av->value, true);
5023 bp = bitpack_create (ob->main_stream);
5024 bp_pack_value (&bp, av->by_ref, 1);
5025 streamer_write_bitpack (&bp);
5029 /* Stream in the aggregate value replacement chain for NODE from IB. */
5031 static void
5032 read_agg_replacement_chain (struct lto_input_block *ib,
5033 struct cgraph_node *node,
5034 struct data_in *data_in)
5036 struct ipa_agg_replacement_value *aggvals = NULL;
5037 unsigned int count, i;
5039 count = streamer_read_uhwi (ib);
5040 for (i = 0; i <count; i++)
5042 struct ipa_agg_replacement_value *av;
5043 struct bitpack_d bp;
5045 av = ggc_alloc<ipa_agg_replacement_value> ();
5046 av->offset = streamer_read_uhwi (ib);
5047 av->index = streamer_read_uhwi (ib);
5048 av->value = stream_read_tree (ib, data_in);
5049 bp = streamer_read_bitpack (ib);
5050 av->by_ref = bp_unpack_value (&bp, 1);
5051 av->next = aggvals;
5052 aggvals = av;
5054 ipa_set_node_agg_value_chain (node, aggvals);
5057 /* Write all aggregate replacement for nodes in set. */
5059 void
5060 ipa_prop_write_all_agg_replacement (void)
5062 struct cgraph_node *node;
5063 struct output_block *ob;
5064 unsigned int count = 0;
5065 lto_symtab_encoder_iterator lsei;
5066 lto_symtab_encoder_t encoder;
5068 if (!ipa_node_agg_replacements)
5069 return;
5071 ob = create_output_block (LTO_section_ipcp_transform);
5072 encoder = ob->decl_state->symtab_node_encoder;
5073 ob->symbol = NULL;
5074 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5075 lsei_next_function_in_partition (&lsei))
5077 node = lsei_cgraph_node (lsei);
5078 if (node->has_gimple_body_p ()
5079 && ipa_get_agg_replacements_for_node (node) != NULL)
5080 count++;
5083 streamer_write_uhwi (ob, count);
5085 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5086 lsei_next_function_in_partition (&lsei))
5088 node = lsei_cgraph_node (lsei);
5089 if (node->has_gimple_body_p ()
5090 && ipa_get_agg_replacements_for_node (node) != NULL)
5091 write_agg_replacement_chain (ob, node);
5093 streamer_write_char_stream (ob->main_stream, 0);
5094 produce_asm (ob, NULL);
5095 destroy_output_block (ob);
5098 /* Read replacements section in file FILE_DATA of length LEN with data
5099 DATA. */
5101 static void
5102 read_replacements_section (struct lto_file_decl_data *file_data,
5103 const char *data,
5104 size_t len)
5106 const struct lto_function_header *header =
5107 (const struct lto_function_header *) data;
5108 const int cfg_offset = sizeof (struct lto_function_header);
5109 const int main_offset = cfg_offset + header->cfg_size;
5110 const int string_offset = main_offset + header->main_size;
5111 struct data_in *data_in;
5112 unsigned int i;
5113 unsigned int count;
5115 lto_input_block ib_main ((const char *) data + main_offset,
5116 header->main_size);
5118 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5119 header->string_size, vNULL);
5120 count = streamer_read_uhwi (&ib_main);
5122 for (i = 0; i < count; i++)
5124 unsigned int index;
5125 struct cgraph_node *node;
5126 lto_symtab_encoder_t encoder;
5128 index = streamer_read_uhwi (&ib_main);
5129 encoder = file_data->symtab_node_encoder;
5130 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5131 index));
5132 gcc_assert (node->definition);
5133 read_agg_replacement_chain (&ib_main, node, data_in);
5135 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5136 len);
5137 lto_data_in_delete (data_in);
5140 /* Read IPA-CP aggregate replacements. */
5142 void
5143 ipa_prop_read_all_agg_replacement (void)
5145 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5146 struct lto_file_decl_data *file_data;
5147 unsigned int j = 0;
5149 while ((file_data = file_data_vec[j++]))
5151 size_t len;
5152 const char *data = lto_get_section_data (file_data,
5153 LTO_section_ipcp_transform,
5154 NULL, &len);
5155 if (data)
5156 read_replacements_section (file_data, data, len);
5160 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5161 NODE. */
5163 static void
5164 adjust_agg_replacement_values (struct cgraph_node *node,
5165 struct ipa_agg_replacement_value *aggval)
5167 struct ipa_agg_replacement_value *v;
5168 int i, c = 0, d = 0, *adj;
5170 if (!node->clone.combined_args_to_skip)
5171 return;
5173 for (v = aggval; v; v = v->next)
5175 gcc_assert (v->index >= 0);
5176 if (c < v->index)
5177 c = v->index;
5179 c++;
5181 adj = XALLOCAVEC (int, c);
5182 for (i = 0; i < c; i++)
5183 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5185 adj[i] = -1;
5186 d++;
5188 else
5189 adj[i] = i - d;
5191 for (v = aggval; v; v = v->next)
5192 v->index = adj[v->index];
5195 /* Dominator walker driving the ipcp modification phase. */
5197 class ipcp_modif_dom_walker : public dom_walker
5199 public:
5200 ipcp_modif_dom_walker (struct func_body_info *fbi,
5201 vec<ipa_param_descriptor> descs,
5202 struct ipa_agg_replacement_value *av,
5203 bool *sc, bool *cc)
5204 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5205 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5207 virtual void before_dom_children (basic_block);
5209 private:
5210 struct func_body_info *m_fbi;
5211 vec<ipa_param_descriptor> m_descriptors;
5212 struct ipa_agg_replacement_value *m_aggval;
5213 bool *m_something_changed, *m_cfg_changed;
5216 void
5217 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5219 gimple_stmt_iterator gsi;
5220 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5222 struct ipa_agg_replacement_value *v;
5223 gimple stmt = gsi_stmt (gsi);
5224 tree rhs, val, t;
5225 HOST_WIDE_INT offset, size;
5226 int index;
5227 bool by_ref, vce;
5229 if (!gimple_assign_load_p (stmt))
5230 continue;
5231 rhs = gimple_assign_rhs1 (stmt);
5232 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5233 continue;
5235 vce = false;
5236 t = rhs;
5237 while (handled_component_p (t))
5239 /* V_C_E can do things like convert an array of integers to one
5240 bigger integer and similar things we do not handle below. */
5241 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5243 vce = true;
5244 break;
5246 t = TREE_OPERAND (t, 0);
5248 if (vce)
5249 continue;
5251 if (!ipa_load_from_parm_agg_1 (m_fbi, m_descriptors, stmt, rhs, &index,
5252 &offset, &size, &by_ref))
5253 continue;
5254 for (v = m_aggval; v; v = v->next)
5255 if (v->index == index
5256 && v->offset == offset)
5257 break;
5258 if (!v
5259 || v->by_ref != by_ref
5260 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5261 continue;
5263 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5264 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5266 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5267 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5268 else if (TYPE_SIZE (TREE_TYPE (rhs))
5269 == TYPE_SIZE (TREE_TYPE (v->value)))
5270 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5271 else
5273 if (dump_file)
5275 fprintf (dump_file, " const ");
5276 print_generic_expr (dump_file, v->value, 0);
5277 fprintf (dump_file, " can't be converted to type of ");
5278 print_generic_expr (dump_file, rhs, 0);
5279 fprintf (dump_file, "\n");
5281 continue;
5284 else
5285 val = v->value;
5287 if (dump_file && (dump_flags & TDF_DETAILS))
5289 fprintf (dump_file, "Modifying stmt:\n ");
5290 print_gimple_stmt (dump_file, stmt, 0, 0);
5292 gimple_assign_set_rhs_from_tree (&gsi, val);
5293 update_stmt (stmt);
5295 if (dump_file && (dump_flags & TDF_DETAILS))
5297 fprintf (dump_file, "into:\n ");
5298 print_gimple_stmt (dump_file, stmt, 0, 0);
5299 fprintf (dump_file, "\n");
5302 *m_something_changed = true;
5303 if (maybe_clean_eh_stmt (stmt)
5304 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5305 *m_cfg_changed = true;
5310 /* IPCP transformation phase doing propagation of aggregate values. */
5312 unsigned int
5313 ipcp_transform_function (struct cgraph_node *node)
5315 vec<ipa_param_descriptor> descriptors = vNULL;
5316 struct func_body_info fbi;
5317 struct ipa_agg_replacement_value *aggval;
5318 int param_count;
5319 bool cfg_changed = false, something_changed = false;
5321 gcc_checking_assert (cfun);
5322 gcc_checking_assert (current_function_decl);
5324 if (dump_file)
5325 fprintf (dump_file, "Modification phase of node %s/%i\n",
5326 node->name (), node->order);
5328 aggval = ipa_get_agg_replacements_for_node (node);
5329 if (!aggval)
5330 return 0;
5331 param_count = count_formal_params (node->decl);
5332 if (param_count == 0)
5333 return 0;
5334 adjust_agg_replacement_values (node, aggval);
5335 if (dump_file)
5336 ipa_dump_agg_replacement_values (dump_file, aggval);
5338 fbi.node = node;
5339 fbi.info = NULL;
5340 fbi.bb_infos = vNULL;
5341 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5342 fbi.param_count = param_count;
5343 fbi.aa_walked = 0;
5345 descriptors.safe_grow_cleared (param_count);
5346 ipa_populate_param_decls (node, descriptors);
5347 calculate_dominance_info (CDI_DOMINATORS);
5348 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5349 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5351 int i;
5352 struct ipa_bb_info *bi;
5353 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5354 free_ipa_bb_info (bi);
5355 fbi.bb_infos.release ();
5356 free_dominance_info (CDI_DOMINATORS);
5357 (*ipa_node_agg_replacements)[node->uid] = NULL;
5358 descriptors.release ();
5360 if (!something_changed)
5361 return 0;
5362 else if (cfg_changed)
5363 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5364 else
5365 return TODO_update_ssa_only_virtuals;