PR target/61423
[official-gcc.git] / gcc / ipa-prop.c
blobd02093a00f4a6b94940f116f7e3691de1b83b48c
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "basic-block.h"
25 #include "tree-ssa-alias.h"
26 #include "internal-fn.h"
27 #include "gimple-fold.h"
28 #include "tree-eh.h"
29 #include "gimple-expr.h"
30 #include "is-a.h"
31 #include "gimple.h"
32 #include "expr.h"
33 #include "stor-layout.h"
34 #include "print-tree.h"
35 #include "gimplify.h"
36 #include "gimple-iterator.h"
37 #include "gimplify-me.h"
38 #include "gimple-walk.h"
39 #include "langhooks.h"
40 #include "target.h"
41 #include "ipa-prop.h"
42 #include "bitmap.h"
43 #include "gimple-ssa.h"
44 #include "tree-cfg.h"
45 #include "tree-phinodes.h"
46 #include "ssa-iterators.h"
47 #include "tree-into-ssa.h"
48 #include "tree-dfa.h"
49 #include "tree-pass.h"
50 #include "tree-inline.h"
51 #include "ipa-inline.h"
52 #include "flags.h"
53 #include "diagnostic.h"
54 #include "gimple-pretty-print.h"
55 #include "lto-streamer.h"
56 #include "data-streamer.h"
57 #include "tree-streamer.h"
58 #include "params.h"
59 #include "ipa-utils.h"
60 #include "stringpool.h"
61 #include "tree-ssanames.h"
62 #include "dbgcnt.h"
63 #include "domwalk.h"
64 #include "builtins.h"
66 /* Intermediate information that we get from alias analysis about a particular
67 parameter in a particular basic_block. When a parameter or the memory it
68 references is marked modified, we use that information in all dominatd
69 blocks without cosulting alias analysis oracle. */
71 struct param_aa_status
73 /* Set when this structure contains meaningful information. If not, the
74 structure describing a dominating BB should be used instead. */
75 bool valid;
77 /* Whether we have seen something which might have modified the data in
78 question. PARM is for the parameter itself, REF is for data it points to
79 but using the alias type of individual accesses and PT is the same thing
80 but for computing aggregate pass-through functions using a very inclusive
81 ao_ref. */
82 bool parm_modified, ref_modified, pt_modified;
85 /* Information related to a given BB that used only when looking at function
86 body. */
88 struct ipa_bb_info
90 /* Call graph edges going out of this BB. */
91 vec<cgraph_edge_p> cg_edges;
92 /* Alias analysis statuses of each formal parameter at this bb. */
93 vec<param_aa_status> param_aa_statuses;
96 /* Structure with global information that is only used when looking at function
97 body. */
99 struct func_body_info
101 /* The node that is being analyzed. */
102 cgraph_node *node;
104 /* Its info. */
105 struct ipa_node_params *info;
107 /* Information about individual BBs. */
108 vec<ipa_bb_info> bb_infos;
110 /* Number of parameters. */
111 int param_count;
113 /* Number of statements already walked by when analyzing this function. */
114 unsigned int aa_walked;
117 /* Vector where the parameter infos are actually stored. */
118 vec<ipa_node_params> ipa_node_params_vector;
119 /* Vector of known aggregate values in cloned nodes. */
120 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
121 /* Vector where the parameter infos are actually stored. */
122 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
124 /* Holders of ipa cgraph hooks: */
125 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
126 static struct cgraph_node_hook_list *node_removal_hook_holder;
127 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
128 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
129 static struct cgraph_node_hook_list *function_insertion_hook_holder;
131 /* Description of a reference to an IPA constant. */
132 struct ipa_cst_ref_desc
134 /* Edge that corresponds to the statement which took the reference. */
135 struct cgraph_edge *cs;
136 /* Linked list of duplicates created when call graph edges are cloned. */
137 struct ipa_cst_ref_desc *next_duplicate;
138 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
139 if out of control. */
140 int refcount;
143 /* Allocation pool for reference descriptions. */
145 static alloc_pool ipa_refdesc_pool;
147 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
148 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
150 static bool
151 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
153 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
154 struct cl_optimization *os;
156 if (!fs_opts)
157 return false;
158 os = TREE_OPTIMIZATION (fs_opts);
159 return !os->x_optimize || !os->x_flag_ipa_cp;
162 /* Return index of the formal whose tree is PTREE in function which corresponds
163 to INFO. */
165 static int
166 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
168 int i, count;
170 count = descriptors.length ();
171 for (i = 0; i < count; i++)
172 if (descriptors[i].decl == ptree)
173 return i;
175 return -1;
178 /* Return index of the formal whose tree is PTREE in function which corresponds
179 to INFO. */
182 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
184 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
187 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
188 NODE. */
190 static void
191 ipa_populate_param_decls (struct cgraph_node *node,
192 vec<ipa_param_descriptor> &descriptors)
194 tree fndecl;
195 tree fnargs;
196 tree parm;
197 int param_num;
199 fndecl = node->decl;
200 gcc_assert (gimple_has_body_p (fndecl));
201 fnargs = DECL_ARGUMENTS (fndecl);
202 param_num = 0;
203 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
205 descriptors[param_num].decl = parm;
206 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm));
207 param_num++;
211 /* Return how many formal parameters FNDECL has. */
213 static inline int
214 count_formal_params (tree fndecl)
216 tree parm;
217 int count = 0;
218 gcc_assert (gimple_has_body_p (fndecl));
220 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
221 count++;
223 return count;
226 /* Return the declaration of Ith formal parameter of the function corresponding
227 to INFO. Note there is no setter function as this array is built just once
228 using ipa_initialize_node_params. */
230 void
231 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
233 fprintf (file, "param #%i", i);
234 if (info->descriptors[i].decl)
236 fprintf (file, " ");
237 print_generic_expr (file, info->descriptors[i].decl, 0);
241 /* Initialize the ipa_node_params structure associated with NODE
242 to hold PARAM_COUNT parameters. */
244 void
245 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
247 struct ipa_node_params *info = IPA_NODE_REF (node);
249 if (!info->descriptors.exists () && param_count)
250 info->descriptors.safe_grow_cleared (param_count);
253 /* Initialize the ipa_node_params structure associated with NODE by counting
254 the function parameters, creating the descriptors and populating their
255 param_decls. */
257 void
258 ipa_initialize_node_params (struct cgraph_node *node)
260 struct ipa_node_params *info = IPA_NODE_REF (node);
262 if (!info->descriptors.exists ())
264 ipa_alloc_node_params (node, count_formal_params (node->decl));
265 ipa_populate_param_decls (node, info->descriptors);
269 /* Print the jump functions associated with call graph edge CS to file F. */
271 static void
272 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
274 int i, count;
276 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
277 for (i = 0; i < count; i++)
279 struct ipa_jump_func *jump_func;
280 enum jump_func_type type;
282 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
283 type = jump_func->type;
285 fprintf (f, " param %d: ", i);
286 if (type == IPA_JF_UNKNOWN)
287 fprintf (f, "UNKNOWN\n");
288 else if (type == IPA_JF_KNOWN_TYPE)
290 fprintf (f, "KNOWN TYPE: base ");
291 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
292 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
293 jump_func->value.known_type.offset);
294 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
295 fprintf (f, "\n");
297 else if (type == IPA_JF_CONST)
299 tree val = jump_func->value.constant.value;
300 fprintf (f, "CONST: ");
301 print_generic_expr (f, val, 0);
302 if (TREE_CODE (val) == ADDR_EXPR
303 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
305 fprintf (f, " -> ");
306 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
309 fprintf (f, "\n");
311 else if (type == IPA_JF_PASS_THROUGH)
313 fprintf (f, "PASS THROUGH: ");
314 fprintf (f, "%d, op %s",
315 jump_func->value.pass_through.formal_id,
316 get_tree_code_name(jump_func->value.pass_through.operation));
317 if (jump_func->value.pass_through.operation != NOP_EXPR)
319 fprintf (f, " ");
320 print_generic_expr (f,
321 jump_func->value.pass_through.operand, 0);
323 if (jump_func->value.pass_through.agg_preserved)
324 fprintf (f, ", agg_preserved");
325 if (jump_func->value.pass_through.type_preserved)
326 fprintf (f, ", type_preserved");
327 fprintf (f, "\n");
329 else if (type == IPA_JF_ANCESTOR)
331 fprintf (f, "ANCESTOR: ");
332 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
333 jump_func->value.ancestor.formal_id,
334 jump_func->value.ancestor.offset);
335 print_generic_expr (f, jump_func->value.ancestor.type, 0);
336 if (jump_func->value.ancestor.agg_preserved)
337 fprintf (f, ", agg_preserved");
338 if (jump_func->value.ancestor.type_preserved)
339 fprintf (f, ", type_preserved");
340 fprintf (f, "\n");
343 if (jump_func->agg.items)
345 struct ipa_agg_jf_item *item;
346 int j;
348 fprintf (f, " Aggregate passed by %s:\n",
349 jump_func->agg.by_ref ? "reference" : "value");
350 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
352 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
353 item->offset);
354 if (TYPE_P (item->value))
355 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
356 tree_to_uhwi (TYPE_SIZE (item->value)));
357 else
359 fprintf (f, "cst: ");
360 print_generic_expr (f, item->value, 0);
362 fprintf (f, "\n");
369 /* Print the jump functions of all arguments on all call graph edges going from
370 NODE to file F. */
372 void
373 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
375 struct cgraph_edge *cs;
377 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
378 node->order);
379 for (cs = node->callees; cs; cs = cs->next_callee)
381 if (!ipa_edge_args_info_available_for_edge_p (cs))
382 continue;
384 fprintf (f, " callsite %s/%i -> %s/%i : \n",
385 xstrdup (node->name ()), node->order,
386 xstrdup (cs->callee->name ()),
387 cs->callee->order);
388 ipa_print_node_jump_functions_for_edge (f, cs);
391 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
393 struct cgraph_indirect_call_info *ii;
394 if (!ipa_edge_args_info_available_for_edge_p (cs))
395 continue;
397 ii = cs->indirect_info;
398 if (ii->agg_contents)
399 fprintf (f, " indirect %s callsite, calling param %i, "
400 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
401 ii->member_ptr ? "member ptr" : "aggregate",
402 ii->param_index, ii->offset,
403 ii->by_ref ? "by reference" : "by_value");
404 else
405 fprintf (f, " indirect %s callsite, calling param %i, "
406 "offset " HOST_WIDE_INT_PRINT_DEC,
407 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
408 ii->offset);
410 if (cs->call_stmt)
412 fprintf (f, ", for stmt ");
413 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
415 else
416 fprintf (f, "\n");
417 ipa_print_node_jump_functions_for_edge (f, cs);
421 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
423 void
424 ipa_print_all_jump_functions (FILE *f)
426 struct cgraph_node *node;
428 fprintf (f, "\nJump functions:\n");
429 FOR_EACH_FUNCTION (node)
431 ipa_print_node_jump_functions (f, node);
435 /* Set JFUNC to be a known type jump function. */
437 static void
438 ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
439 tree base_type, tree component_type)
441 gcc_assert (TREE_CODE (component_type) == RECORD_TYPE
442 && TYPE_BINFO (component_type));
443 if (!flag_devirtualize)
444 return;
445 gcc_assert (BINFO_VTABLE (TYPE_BINFO (component_type)));
446 jfunc->type = IPA_JF_KNOWN_TYPE;
447 jfunc->value.known_type.offset = offset,
448 jfunc->value.known_type.base_type = base_type;
449 jfunc->value.known_type.component_type = component_type;
450 gcc_assert (component_type);
453 /* Set JFUNC to be a copy of another jmp (to be used by jump function
454 combination code). The two functions will share their rdesc. */
456 static void
457 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
458 struct ipa_jump_func *src)
461 gcc_checking_assert (src->type == IPA_JF_CONST);
462 dst->type = IPA_JF_CONST;
463 dst->value.constant = src->value.constant;
466 /* Set JFUNC to be a constant jmp function. */
468 static void
469 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
470 struct cgraph_edge *cs)
472 constant = unshare_expr (constant);
473 if (constant && EXPR_P (constant))
474 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
475 jfunc->type = IPA_JF_CONST;
476 jfunc->value.constant.value = unshare_expr_without_location (constant);
478 if (TREE_CODE (constant) == ADDR_EXPR
479 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
481 struct ipa_cst_ref_desc *rdesc;
482 if (!ipa_refdesc_pool)
483 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
484 sizeof (struct ipa_cst_ref_desc), 32);
486 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
487 rdesc->cs = cs;
488 rdesc->next_duplicate = NULL;
489 rdesc->refcount = 1;
490 jfunc->value.constant.rdesc = rdesc;
492 else
493 jfunc->value.constant.rdesc = NULL;
496 /* Set JFUNC to be a simple pass-through jump function. */
497 static void
498 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
499 bool agg_preserved, bool type_preserved)
501 jfunc->type = IPA_JF_PASS_THROUGH;
502 jfunc->value.pass_through.operand = NULL_TREE;
503 jfunc->value.pass_through.formal_id = formal_id;
504 jfunc->value.pass_through.operation = NOP_EXPR;
505 jfunc->value.pass_through.agg_preserved = agg_preserved;
506 jfunc->value.pass_through.type_preserved = type_preserved;
509 /* Set JFUNC to be an arithmetic pass through jump function. */
511 static void
512 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
513 tree operand, enum tree_code operation)
515 jfunc->type = IPA_JF_PASS_THROUGH;
516 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
517 jfunc->value.pass_through.formal_id = formal_id;
518 jfunc->value.pass_through.operation = operation;
519 jfunc->value.pass_through.agg_preserved = false;
520 jfunc->value.pass_through.type_preserved = false;
523 /* Set JFUNC to be an ancestor jump function. */
525 static void
526 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
527 tree type, int formal_id, bool agg_preserved,
528 bool type_preserved)
530 if (!flag_devirtualize)
531 type_preserved = false;
532 gcc_assert (!type_preserved
533 || (TREE_CODE (type) == RECORD_TYPE
534 && TYPE_BINFO (type)
535 && BINFO_VTABLE (TYPE_BINFO (type))));
536 jfunc->type = IPA_JF_ANCESTOR;
537 jfunc->value.ancestor.formal_id = formal_id;
538 jfunc->value.ancestor.offset = offset;
539 jfunc->value.ancestor.type = type_preserved ? type : NULL;
540 jfunc->value.ancestor.agg_preserved = agg_preserved;
541 jfunc->value.ancestor.type_preserved = type_preserved;
544 /* Extract the acual BINFO being described by JFUNC which must be a known type
545 jump function. */
547 tree
548 ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
550 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
551 if (!base_binfo)
552 return NULL_TREE;
553 return get_binfo_at_offset (base_binfo,
554 jfunc->value.known_type.offset,
555 jfunc->value.known_type.component_type);
558 /* Get IPA BB information about the given BB. FBI is the context of analyzis
559 of this function body. */
561 static struct ipa_bb_info *
562 ipa_get_bb_info (struct func_body_info *fbi, basic_block bb)
564 gcc_checking_assert (fbi);
565 return &fbi->bb_infos[bb->index];
568 /* Structure to be passed in between detect_type_change and
569 check_stmt_for_type_change. */
571 struct type_change_info
573 /* Offset into the object where there is the virtual method pointer we are
574 looking for. */
575 HOST_WIDE_INT offset;
576 /* The declaration or SSA_NAME pointer of the base that we are checking for
577 type change. */
578 tree object;
579 /* If we actually can tell the type that the object has changed to, it is
580 stored in this field. Otherwise it remains NULL_TREE. */
581 tree known_current_type;
582 /* Set to true if dynamic type change has been detected. */
583 bool type_maybe_changed;
584 /* Set to true if multiple types have been encountered. known_current_type
585 must be disregarded in that case. */
586 bool multiple_types_encountered;
589 /* Return true if STMT can modify a virtual method table pointer.
591 This function makes special assumptions about both constructors and
592 destructors which are all the functions that are allowed to alter the VMT
593 pointers. It assumes that destructors begin with assignment into all VMT
594 pointers and that constructors essentially look in the following way:
596 1) The very first thing they do is that they call constructors of ancestor
597 sub-objects that have them.
599 2) Then VMT pointers of this and all its ancestors is set to new values
600 corresponding to the type corresponding to the constructor.
602 3) Only afterwards, other stuff such as constructor of member sub-objects
603 and the code written by the user is run. Only this may include calling
604 virtual functions, directly or indirectly.
606 There is no way to call a constructor of an ancestor sub-object in any
607 other way.
609 This means that we do not have to care whether constructors get the correct
610 type information because they will always change it (in fact, if we define
611 the type to be given by the VMT pointer, it is undefined).
613 The most important fact to derive from the above is that if, for some
614 statement in the section 3, we try to detect whether the dynamic type has
615 changed, we can safely ignore all calls as we examine the function body
616 backwards until we reach statements in section 2 because these calls cannot
617 be ancestor constructors or destructors (if the input is not bogus) and so
618 do not change the dynamic type (this holds true only for automatically
619 allocated objects but at the moment we devirtualize only these). We then
620 must detect that statements in section 2 change the dynamic type and can try
621 to derive the new type. That is enough and we can stop, we will never see
622 the calls into constructors of sub-objects in this code. Therefore we can
623 safely ignore all call statements that we traverse.
626 static bool
627 stmt_may_be_vtbl_ptr_store (gimple stmt)
629 if (is_gimple_call (stmt))
630 return false;
631 /* TODO: Skip clobbers, doing so triggers problem in PR60306. */
632 else if (is_gimple_assign (stmt))
634 tree lhs = gimple_assign_lhs (stmt);
636 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
638 if (flag_strict_aliasing
639 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
640 return false;
642 if (TREE_CODE (lhs) == COMPONENT_REF
643 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
644 return false;
645 /* In the future we might want to use get_base_ref_and_offset to find
646 if there is a field corresponding to the offset and if so, proceed
647 almost like if it was a component ref. */
650 return true;
653 /* If STMT can be proved to be an assignment to the virtual method table
654 pointer of ANALYZED_OBJ and the type associated with the new table
655 identified, return the type. Otherwise return NULL_TREE. */
657 static tree
658 extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
660 HOST_WIDE_INT offset, size, max_size;
661 tree lhs, rhs, base, binfo;
663 if (!gimple_assign_single_p (stmt))
664 return NULL_TREE;
666 lhs = gimple_assign_lhs (stmt);
667 rhs = gimple_assign_rhs1 (stmt);
668 if (TREE_CODE (lhs) != COMPONENT_REF
669 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
670 return NULL_TREE;
672 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
673 if (offset != tci->offset
674 || size != POINTER_SIZE
675 || max_size != POINTER_SIZE)
676 return NULL_TREE;
677 if (TREE_CODE (base) == MEM_REF)
679 if (TREE_CODE (tci->object) != MEM_REF
680 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
681 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
682 TREE_OPERAND (base, 1)))
683 return NULL_TREE;
685 else if (tci->object != base)
686 return NULL_TREE;
688 binfo = vtable_pointer_value_to_binfo (rhs);
690 /* FIXME: vtable_pointer_value_to_binfo may return BINFO of a
691 base of outer type. In this case we would need to either
692 work on binfos or translate it back to outer type and offset.
693 KNOWN_TYPE jump functions are not ready for that, yet. */
694 if (!binfo || TYPE_BINFO (BINFO_TYPE (binfo)) != binfo)
695 return NULL;
697 return BINFO_TYPE (binfo);
700 /* Callback of walk_aliased_vdefs and a helper function for
701 detect_type_change to check whether a particular statement may modify
702 the virtual table pointer, and if possible also determine the new type of
703 the (sub-)object. It stores its result into DATA, which points to a
704 type_change_info structure. */
706 static bool
707 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
709 gimple stmt = SSA_NAME_DEF_STMT (vdef);
710 struct type_change_info *tci = (struct type_change_info *) data;
712 if (stmt_may_be_vtbl_ptr_store (stmt))
714 tree type;
715 type = extr_type_from_vtbl_ptr_store (stmt, tci);
716 if (tci->type_maybe_changed
717 && type != tci->known_current_type)
718 tci->multiple_types_encountered = true;
719 tci->known_current_type = type;
720 tci->type_maybe_changed = true;
721 return true;
723 else
724 return false;
729 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
730 callsite CALL) by looking for assignments to its virtual table pointer. If
731 it is, return true and fill in the jump function JFUNC with relevant type
732 information or set it to unknown. ARG is the object itself (not a pointer
733 to it, unless dereferenced). BASE is the base of the memory access as
734 returned by get_ref_base_and_extent, as is the offset. */
736 static bool
737 detect_type_change (tree arg, tree base, tree comp_type, gimple call,
738 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
740 struct type_change_info tci;
741 ao_ref ao;
743 gcc_checking_assert (DECL_P (arg)
744 || TREE_CODE (arg) == MEM_REF
745 || handled_component_p (arg));
746 /* Const calls cannot call virtual methods through VMT and so type changes do
747 not matter. */
748 if (!flag_devirtualize || !gimple_vuse (call)
749 /* Be sure expected_type is polymorphic. */
750 || !comp_type
751 || TREE_CODE (comp_type) != RECORD_TYPE
752 || !TYPE_BINFO (comp_type)
753 || !BINFO_VTABLE (TYPE_BINFO (comp_type)))
754 return true;
756 /* C++ methods are not allowed to change THIS pointer unless they
757 are constructors or destructors. */
758 if (TREE_CODE (base) == MEM_REF
759 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
760 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
761 && TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (base, 0))) == PARM_DECL
762 && TREE_CODE (TREE_TYPE (current_function_decl)) == METHOD_TYPE
763 && !DECL_CXX_CONSTRUCTOR_P (current_function_decl)
764 && !DECL_CXX_DESTRUCTOR_P (current_function_decl)
765 && (SSA_NAME_VAR (TREE_OPERAND (base, 0))
766 == DECL_ARGUMENTS (current_function_decl)))
767 return false;
769 ao_ref_init (&ao, arg);
770 ao.base = base;
771 ao.offset = offset;
772 ao.size = POINTER_SIZE;
773 ao.max_size = ao.size;
775 tci.offset = offset;
776 tci.object = get_base_address (arg);
777 tci.known_current_type = NULL_TREE;
778 tci.type_maybe_changed = false;
779 tci.multiple_types_encountered = false;
781 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
782 &tci, NULL);
783 if (!tci.type_maybe_changed)
784 return false;
786 if (!tci.known_current_type
787 || tci.multiple_types_encountered
788 || offset != 0)
789 jfunc->type = IPA_JF_UNKNOWN;
790 else
791 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
793 return true;
796 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
797 SSA name (its dereference will become the base and the offset is assumed to
798 be zero). */
800 static bool
801 detect_type_change_ssa (tree arg, tree comp_type,
802 gimple call, struct ipa_jump_func *jfunc)
804 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
805 if (!flag_devirtualize
806 || !POINTER_TYPE_P (TREE_TYPE (arg)))
807 return false;
809 arg = build2 (MEM_REF, ptr_type_node, arg,
810 build_int_cst (ptr_type_node, 0));
812 return detect_type_change (arg, arg, comp_type, call, jfunc, 0);
815 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
816 boolean variable pointed to by DATA. */
818 static bool
819 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
820 void *data)
822 bool *b = (bool *) data;
823 *b = true;
824 return true;
827 /* Return true if we have already walked so many statements in AA that we
828 should really just start giving up. */
830 static bool
831 aa_overwalked (struct func_body_info *fbi)
833 gcc_checking_assert (fbi);
834 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
837 /* Find the nearest valid aa status for parameter specified by INDEX that
838 dominates BB. */
840 static struct param_aa_status *
841 find_dominating_aa_status (struct func_body_info *fbi, basic_block bb,
842 int index)
844 while (true)
846 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
847 if (!bb)
848 return NULL;
849 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
850 if (!bi->param_aa_statuses.is_empty ()
851 && bi->param_aa_statuses[index].valid)
852 return &bi->param_aa_statuses[index];
856 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
857 structures and/or intialize the result with a dominating description as
858 necessary. */
860 static struct param_aa_status *
861 parm_bb_aa_status_for_bb (struct func_body_info *fbi, basic_block bb,
862 int index)
864 gcc_checking_assert (fbi);
865 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
866 if (bi->param_aa_statuses.is_empty ())
867 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
868 struct param_aa_status *paa = &bi->param_aa_statuses[index];
869 if (!paa->valid)
871 gcc_checking_assert (!paa->parm_modified
872 && !paa->ref_modified
873 && !paa->pt_modified);
874 struct param_aa_status *dom_paa;
875 dom_paa = find_dominating_aa_status (fbi, bb, index);
876 if (dom_paa)
877 *paa = *dom_paa;
878 else
879 paa->valid = true;
882 return paa;
885 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
886 a value known not to be modified in this function before reaching the
887 statement STMT. FBI holds information about the function we have so far
888 gathered but do not survive the summary building stage. */
890 static bool
891 parm_preserved_before_stmt_p (struct func_body_info *fbi, int index,
892 gimple stmt, tree parm_load)
894 struct param_aa_status *paa;
895 bool modified = false;
896 ao_ref refd;
898 /* FIXME: FBI can be NULL if we are being called from outside
899 ipa_node_analysis or ipcp_transform_function, which currently happens
900 during inlining analysis. It would be great to extend fbi's lifetime and
901 always have it. Currently, we are just not afraid of too much walking in
902 that case. */
903 if (fbi)
905 if (aa_overwalked (fbi))
906 return false;
907 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
908 if (paa->parm_modified)
909 return false;
911 else
912 paa = NULL;
914 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
915 ao_ref_init (&refd, parm_load);
916 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
917 &modified, NULL);
918 if (fbi)
919 fbi->aa_walked += walked;
920 if (paa && modified)
921 paa->parm_modified = true;
922 return !modified;
925 /* If STMT is an assignment that loads a value from an parameter declaration,
926 return the index of the parameter in ipa_node_params which has not been
927 modified. Otherwise return -1. */
929 static int
930 load_from_unmodified_param (struct func_body_info *fbi,
931 vec<ipa_param_descriptor> descriptors,
932 gimple stmt)
934 int index;
935 tree op1;
937 if (!gimple_assign_single_p (stmt))
938 return -1;
940 op1 = gimple_assign_rhs1 (stmt);
941 if (TREE_CODE (op1) != PARM_DECL)
942 return -1;
944 index = ipa_get_param_decl_index_1 (descriptors, op1);
945 if (index < 0
946 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
947 return -1;
949 return index;
952 /* Return true if memory reference REF (which must be a load through parameter
953 with INDEX) loads data that are known to be unmodified in this function
954 before reaching statement STMT. */
956 static bool
957 parm_ref_data_preserved_p (struct func_body_info *fbi,
958 int index, gimple stmt, tree ref)
960 struct param_aa_status *paa;
961 bool modified = false;
962 ao_ref refd;
964 /* FIXME: FBI can be NULL if we are being called from outside
965 ipa_node_analysis or ipcp_transform_function, which currently happens
966 during inlining analysis. It would be great to extend fbi's lifetime and
967 always have it. Currently, we are just not afraid of too much walking in
968 that case. */
969 if (fbi)
971 if (aa_overwalked (fbi))
972 return false;
973 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
974 if (paa->ref_modified)
975 return false;
977 else
978 paa = NULL;
980 gcc_checking_assert (gimple_vuse (stmt));
981 ao_ref_init (&refd, ref);
982 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
983 &modified, NULL);
984 if (fbi)
985 fbi->aa_walked += walked;
986 if (paa && modified)
987 paa->ref_modified = true;
988 return !modified;
991 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
992 is known to be unmodified in this function before reaching call statement
993 CALL into which it is passed. FBI describes the function body. */
995 static bool
996 parm_ref_data_pass_through_p (struct func_body_info *fbi, int index,
997 gimple call, tree parm)
999 bool modified = false;
1000 ao_ref refd;
1002 /* It's unnecessary to calculate anything about memory contnets for a const
1003 function because it is not goin to use it. But do not cache the result
1004 either. Also, no such calculations for non-pointers. */
1005 if (!gimple_vuse (call)
1006 || !POINTER_TYPE_P (TREE_TYPE (parm))
1007 || aa_overwalked (fbi))
1008 return false;
1010 struct param_aa_status *paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (call),
1011 index);
1012 if (paa->pt_modified)
1013 return false;
1015 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1016 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1017 &modified, NULL);
1018 fbi->aa_walked += walked;
1019 if (modified)
1020 paa->pt_modified = true;
1021 return !modified;
1024 /* Return true if we can prove that OP is a memory reference loading unmodified
1025 data from an aggregate passed as a parameter and if the aggregate is passed
1026 by reference, that the alias type of the load corresponds to the type of the
1027 formal parameter (so that we can rely on this type for TBAA in callers).
1028 INFO and PARMS_AINFO describe parameters of the current function (but the
1029 latter can be NULL), STMT is the load statement. If function returns true,
1030 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1031 within the aggregate and whether it is a load from a value passed by
1032 reference respectively. */
1034 static bool
1035 ipa_load_from_parm_agg_1 (struct func_body_info *fbi,
1036 vec<ipa_param_descriptor> descriptors,
1037 gimple stmt, tree op, int *index_p,
1038 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1039 bool *by_ref_p)
1041 int index;
1042 HOST_WIDE_INT size, max_size;
1043 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
1045 if (max_size == -1 || max_size != size || *offset_p < 0)
1046 return false;
1048 if (DECL_P (base))
1050 int index = ipa_get_param_decl_index_1 (descriptors, base);
1051 if (index >= 0
1052 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1054 *index_p = index;
1055 *by_ref_p = false;
1056 if (size_p)
1057 *size_p = size;
1058 return true;
1060 return false;
1063 if (TREE_CODE (base) != MEM_REF
1064 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1065 || !integer_zerop (TREE_OPERAND (base, 1)))
1066 return false;
1068 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1070 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1071 index = ipa_get_param_decl_index_1 (descriptors, parm);
1073 else
1075 /* This branch catches situations where a pointer parameter is not a
1076 gimple register, for example:
1078 void hip7(S*) (struct S * p)
1080 void (*<T2e4>) (struct S *) D.1867;
1081 struct S * p.1;
1083 <bb 2>:
1084 p.1_1 = p;
1085 D.1867_2 = p.1_1->f;
1086 D.1867_2 ();
1087 gdp = &p;
1090 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1091 index = load_from_unmodified_param (fbi, descriptors, def);
1094 if (index >= 0
1095 && parm_ref_data_preserved_p (fbi, index, stmt, op))
1097 *index_p = index;
1098 *by_ref_p = true;
1099 if (size_p)
1100 *size_p = size;
1101 return true;
1103 return false;
1106 /* Just like the previous function, just without the param_analysis_info
1107 pointer, for users outside of this file. */
1109 bool
1110 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
1111 tree op, int *index_p, HOST_WIDE_INT *offset_p,
1112 bool *by_ref_p)
1114 return ipa_load_from_parm_agg_1 (NULL, info->descriptors, stmt, op, index_p,
1115 offset_p, NULL, by_ref_p);
1118 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1119 of an assignment statement STMT, try to determine whether we are actually
1120 handling any of the following cases and construct an appropriate jump
1121 function into JFUNC if so:
1123 1) The passed value is loaded from a formal parameter which is not a gimple
1124 register (most probably because it is addressable, the value has to be
1125 scalar) and we can guarantee the value has not changed. This case can
1126 therefore be described by a simple pass-through jump function. For example:
1128 foo (int a)
1130 int a.0;
1132 a.0_2 = a;
1133 bar (a.0_2);
1135 2) The passed value can be described by a simple arithmetic pass-through
1136 jump function. E.g.
1138 foo (int a)
1140 int D.2064;
1142 D.2064_4 = a.1(D) + 4;
1143 bar (D.2064_4);
1145 This case can also occur in combination of the previous one, e.g.:
1147 foo (int a, int z)
1149 int a.0;
1150 int D.2064;
1152 a.0_3 = a;
1153 D.2064_4 = a.0_3 + 4;
1154 foo (D.2064_4);
1156 3) The passed value is an address of an object within another one (which
1157 also passed by reference). Such situations are described by an ancestor
1158 jump function and describe situations such as:
1160 B::foo() (struct B * const this)
1162 struct A * D.1845;
1164 D.1845_2 = &this_1(D)->D.1748;
1165 A::bar (D.1845_2);
1167 INFO is the structure describing individual parameters access different
1168 stages of IPA optimizations. PARMS_AINFO contains the information that is
1169 only needed for intraprocedural analysis. */
1171 static void
1172 compute_complex_assign_jump_func (struct func_body_info *fbi,
1173 struct ipa_node_params *info,
1174 struct ipa_jump_func *jfunc,
1175 gimple call, gimple stmt, tree name,
1176 tree param_type)
1178 HOST_WIDE_INT offset, size, max_size;
1179 tree op1, tc_ssa, base, ssa;
1180 int index;
1182 op1 = gimple_assign_rhs1 (stmt);
1184 if (TREE_CODE (op1) == SSA_NAME)
1186 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1187 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1188 else
1189 index = load_from_unmodified_param (fbi, info->descriptors,
1190 SSA_NAME_DEF_STMT (op1));
1191 tc_ssa = op1;
1193 else
1195 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1196 tc_ssa = gimple_assign_lhs (stmt);
1199 if (index >= 0)
1201 tree op2 = gimple_assign_rhs2 (stmt);
1203 if (op2)
1205 if (!is_gimple_ip_invariant (op2)
1206 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1207 && !useless_type_conversion_p (TREE_TYPE (name),
1208 TREE_TYPE (op1))))
1209 return;
1211 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1212 gimple_assign_rhs_code (stmt));
1214 else if (gimple_assign_single_p (stmt))
1216 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1217 bool type_p = false;
1219 if (param_type && POINTER_TYPE_P (param_type))
1220 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1221 call, jfunc);
1222 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1223 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
1225 return;
1228 if (TREE_CODE (op1) != ADDR_EXPR)
1229 return;
1230 op1 = TREE_OPERAND (op1, 0);
1231 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1232 return;
1233 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1234 if (TREE_CODE (base) != MEM_REF
1235 /* If this is a varying address, punt. */
1236 || max_size == -1
1237 || max_size != size)
1238 return;
1239 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1240 ssa = TREE_OPERAND (base, 0);
1241 if (TREE_CODE (ssa) != SSA_NAME
1242 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1243 || offset < 0)
1244 return;
1246 /* Dynamic types are changed in constructors and destructors. */
1247 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1248 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1250 bool type_p = !detect_type_change (op1, base, TREE_TYPE (param_type),
1251 call, jfunc, offset);
1252 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1253 ipa_set_ancestor_jf (jfunc, offset,
1254 type_p ? TREE_TYPE (param_type) : NULL, index,
1255 parm_ref_data_pass_through_p (fbi, index,
1256 call, ssa), type_p);
1260 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1261 it looks like:
1263 iftmp.1_3 = &obj_2(D)->D.1762;
1265 The base of the MEM_REF must be a default definition SSA NAME of a
1266 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1267 whole MEM_REF expression is returned and the offset calculated from any
1268 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1269 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1271 static tree
1272 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1274 HOST_WIDE_INT size, max_size;
1275 tree expr, parm, obj;
1277 if (!gimple_assign_single_p (assign))
1278 return NULL_TREE;
1279 expr = gimple_assign_rhs1 (assign);
1281 if (TREE_CODE (expr) != ADDR_EXPR)
1282 return NULL_TREE;
1283 expr = TREE_OPERAND (expr, 0);
1284 obj = expr;
1285 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1287 if (TREE_CODE (expr) != MEM_REF
1288 /* If this is a varying address, punt. */
1289 || max_size == -1
1290 || max_size != size
1291 || *offset < 0)
1292 return NULL_TREE;
1293 parm = TREE_OPERAND (expr, 0);
1294 if (TREE_CODE (parm) != SSA_NAME
1295 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1296 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1297 return NULL_TREE;
1299 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1300 *obj_p = obj;
1301 return expr;
1305 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1306 statement PHI, try to find out whether NAME is in fact a
1307 multiple-inheritance typecast from a descendant into an ancestor of a formal
1308 parameter and thus can be described by an ancestor jump function and if so,
1309 write the appropriate function into JFUNC.
1311 Essentially we want to match the following pattern:
1313 if (obj_2(D) != 0B)
1314 goto <bb 3>;
1315 else
1316 goto <bb 4>;
1318 <bb 3>:
1319 iftmp.1_3 = &obj_2(D)->D.1762;
1321 <bb 4>:
1322 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1323 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1324 return D.1879_6; */
1326 static void
1327 compute_complex_ancestor_jump_func (struct func_body_info *fbi,
1328 struct ipa_node_params *info,
1329 struct ipa_jump_func *jfunc,
1330 gimple call, gimple phi, tree param_type)
1332 HOST_WIDE_INT offset;
1333 gimple assign, cond;
1334 basic_block phi_bb, assign_bb, cond_bb;
1335 tree tmp, parm, expr, obj;
1336 int index, i;
1338 if (gimple_phi_num_args (phi) != 2)
1339 return;
1341 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1342 tmp = PHI_ARG_DEF (phi, 0);
1343 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1344 tmp = PHI_ARG_DEF (phi, 1);
1345 else
1346 return;
1347 if (TREE_CODE (tmp) != SSA_NAME
1348 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1349 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1350 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1351 return;
1353 assign = SSA_NAME_DEF_STMT (tmp);
1354 assign_bb = gimple_bb (assign);
1355 if (!single_pred_p (assign_bb))
1356 return;
1357 expr = get_ancestor_addr_info (assign, &obj, &offset);
1358 if (!expr)
1359 return;
1360 parm = TREE_OPERAND (expr, 0);
1361 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1362 if (index < 0)
1363 return;
1365 cond_bb = single_pred (assign_bb);
1366 cond = last_stmt (cond_bb);
1367 if (!cond
1368 || gimple_code (cond) != GIMPLE_COND
1369 || gimple_cond_code (cond) != NE_EXPR
1370 || gimple_cond_lhs (cond) != parm
1371 || !integer_zerop (gimple_cond_rhs (cond)))
1372 return;
1374 phi_bb = gimple_bb (phi);
1375 for (i = 0; i < 2; i++)
1377 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1378 if (pred != assign_bb && pred != cond_bb)
1379 return;
1382 bool type_p = false;
1383 if (param_type && POINTER_TYPE_P (param_type))
1384 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1385 call, jfunc, offset);
1386 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1387 ipa_set_ancestor_jf (jfunc, offset, type_p ? TREE_TYPE (param_type) : NULL,
1388 index,
1389 parm_ref_data_pass_through_p (fbi, index, call, parm),
1390 type_p);
1393 /* Given OP which is passed as an actual argument to a called function,
1394 determine if it is possible to construct a KNOWN_TYPE jump function for it
1395 and if so, create one and store it to JFUNC.
1396 EXPECTED_TYPE represents a type the argument should be in */
1398 static void
1399 compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1400 gimple call, tree expected_type)
1402 HOST_WIDE_INT offset, size, max_size;
1403 tree base;
1405 if (!flag_devirtualize
1406 || TREE_CODE (op) != ADDR_EXPR
1407 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE
1408 /* Be sure expected_type is polymorphic. */
1409 || !expected_type
1410 || TREE_CODE (expected_type) != RECORD_TYPE
1411 || !TYPE_BINFO (expected_type)
1412 || !BINFO_VTABLE (TYPE_BINFO (expected_type)))
1413 return;
1415 op = TREE_OPERAND (op, 0);
1416 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1417 if (!DECL_P (base)
1418 || max_size == -1
1419 || max_size != size
1420 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1421 || is_global_var (base))
1422 return;
1424 if (detect_type_change (op, base, expected_type, call, jfunc, offset))
1425 return;
1427 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1428 expected_type);
1431 /* Inspect the given TYPE and return true iff it has the same structure (the
1432 same number of fields of the same types) as a C++ member pointer. If
1433 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1434 corresponding fields there. */
1436 static bool
1437 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1439 tree fld;
1441 if (TREE_CODE (type) != RECORD_TYPE)
1442 return false;
1444 fld = TYPE_FIELDS (type);
1445 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1446 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1447 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1448 return false;
1450 if (method_ptr)
1451 *method_ptr = fld;
1453 fld = DECL_CHAIN (fld);
1454 if (!fld || INTEGRAL_TYPE_P (fld)
1455 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1456 return false;
1457 if (delta)
1458 *delta = fld;
1460 if (DECL_CHAIN (fld))
1461 return false;
1463 return true;
1466 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1467 return the rhs of its defining statement. Otherwise return RHS as it
1468 is. */
1470 static inline tree
1471 get_ssa_def_if_simple_copy (tree rhs)
1473 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1475 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1477 if (gimple_assign_single_p (def_stmt))
1478 rhs = gimple_assign_rhs1 (def_stmt);
1479 else
1480 break;
1482 return rhs;
1485 /* Simple linked list, describing known contents of an aggregate beforere
1486 call. */
1488 struct ipa_known_agg_contents_list
1490 /* Offset and size of the described part of the aggregate. */
1491 HOST_WIDE_INT offset, size;
1492 /* Known constant value or NULL if the contents is known to be unknown. */
1493 tree constant;
1494 /* Pointer to the next structure in the list. */
1495 struct ipa_known_agg_contents_list *next;
1498 /* Find the proper place in linked list of ipa_known_agg_contents_list
1499 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1500 unless there is a partial overlap, in which case return NULL, or such
1501 element is already there, in which case set *ALREADY_THERE to true. */
1503 static struct ipa_known_agg_contents_list **
1504 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1505 HOST_WIDE_INT lhs_offset,
1506 HOST_WIDE_INT lhs_size,
1507 bool *already_there)
1509 struct ipa_known_agg_contents_list **p = list;
1510 while (*p && (*p)->offset < lhs_offset)
1512 if ((*p)->offset + (*p)->size > lhs_offset)
1513 return NULL;
1514 p = &(*p)->next;
1517 if (*p && (*p)->offset < lhs_offset + lhs_size)
1519 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1520 /* We already know this value is subsequently overwritten with
1521 something else. */
1522 *already_there = true;
1523 else
1524 /* Otherwise this is a partial overlap which we cannot
1525 represent. */
1526 return NULL;
1528 return p;
1531 /* Build aggregate jump function from LIST, assuming there are exactly
1532 CONST_COUNT constant entries there and that th offset of the passed argument
1533 is ARG_OFFSET and store it into JFUNC. */
1535 static void
1536 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1537 int const_count, HOST_WIDE_INT arg_offset,
1538 struct ipa_jump_func *jfunc)
1540 vec_alloc (jfunc->agg.items, const_count);
1541 while (list)
1543 if (list->constant)
1545 struct ipa_agg_jf_item item;
1546 item.offset = list->offset - arg_offset;
1547 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1548 item.value = unshare_expr_without_location (list->constant);
1549 jfunc->agg.items->quick_push (item);
1551 list = list->next;
1555 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1556 in ARG is filled in with constant values. ARG can either be an aggregate
1557 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1558 aggregate. JFUNC is the jump function into which the constants are
1559 subsequently stored. */
1561 static void
1562 determine_locally_known_aggregate_parts (gimple call, tree arg, tree arg_type,
1563 struct ipa_jump_func *jfunc)
1565 struct ipa_known_agg_contents_list *list = NULL;
1566 int item_count = 0, const_count = 0;
1567 HOST_WIDE_INT arg_offset, arg_size;
1568 gimple_stmt_iterator gsi;
1569 tree arg_base;
1570 bool check_ref, by_ref;
1571 ao_ref r;
1573 /* The function operates in three stages. First, we prepare check_ref, r,
1574 arg_base and arg_offset based on what is actually passed as an actual
1575 argument. */
1577 if (POINTER_TYPE_P (arg_type))
1579 by_ref = true;
1580 if (TREE_CODE (arg) == SSA_NAME)
1582 tree type_size;
1583 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1584 return;
1585 check_ref = true;
1586 arg_base = arg;
1587 arg_offset = 0;
1588 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1589 arg_size = tree_to_uhwi (type_size);
1590 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1592 else if (TREE_CODE (arg) == ADDR_EXPR)
1594 HOST_WIDE_INT arg_max_size;
1596 arg = TREE_OPERAND (arg, 0);
1597 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1598 &arg_max_size);
1599 if (arg_max_size == -1
1600 || arg_max_size != arg_size
1601 || arg_offset < 0)
1602 return;
1603 if (DECL_P (arg_base))
1605 check_ref = false;
1606 ao_ref_init (&r, arg_base);
1608 else
1609 return;
1611 else
1612 return;
1614 else
1616 HOST_WIDE_INT arg_max_size;
1618 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1620 by_ref = false;
1621 check_ref = false;
1622 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1623 &arg_max_size);
1624 if (arg_max_size == -1
1625 || arg_max_size != arg_size
1626 || arg_offset < 0)
1627 return;
1629 ao_ref_init (&r, arg);
1632 /* Second stage walks back the BB, looks at individual statements and as long
1633 as it is confident of how the statements affect contents of the
1634 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1635 describing it. */
1636 gsi = gsi_for_stmt (call);
1637 gsi_prev (&gsi);
1638 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1640 struct ipa_known_agg_contents_list *n, **p;
1641 gimple stmt = gsi_stmt (gsi);
1642 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1643 tree lhs, rhs, lhs_base;
1645 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1646 continue;
1647 if (!gimple_assign_single_p (stmt))
1648 break;
1650 lhs = gimple_assign_lhs (stmt);
1651 rhs = gimple_assign_rhs1 (stmt);
1652 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1653 || TREE_CODE (lhs) == BIT_FIELD_REF
1654 || contains_bitfld_component_ref_p (lhs))
1655 break;
1657 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1658 &lhs_max_size);
1659 if (lhs_max_size == -1
1660 || lhs_max_size != lhs_size)
1661 break;
1663 if (check_ref)
1665 if (TREE_CODE (lhs_base) != MEM_REF
1666 || TREE_OPERAND (lhs_base, 0) != arg_base
1667 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1668 break;
1670 else if (lhs_base != arg_base)
1672 if (DECL_P (lhs_base))
1673 continue;
1674 else
1675 break;
1678 bool already_there = false;
1679 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1680 &already_there);
1681 if (!p)
1682 break;
1683 if (already_there)
1684 continue;
1686 rhs = get_ssa_def_if_simple_copy (rhs);
1687 n = XALLOCA (struct ipa_known_agg_contents_list);
1688 n->size = lhs_size;
1689 n->offset = lhs_offset;
1690 if (is_gimple_ip_invariant (rhs))
1692 n->constant = rhs;
1693 const_count++;
1695 else
1696 n->constant = NULL_TREE;
1697 n->next = *p;
1698 *p = n;
1700 item_count++;
1701 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1702 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1703 break;
1706 /* Third stage just goes over the list and creates an appropriate vector of
1707 ipa_agg_jf_item structures out of it, of sourse only if there are
1708 any known constants to begin with. */
1710 if (const_count)
1712 jfunc->agg.by_ref = by_ref;
1713 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1717 static tree
1718 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1720 int n;
1721 tree type = (e->callee
1722 ? TREE_TYPE (e->callee->decl)
1723 : gimple_call_fntype (e->call_stmt));
1724 tree t = TYPE_ARG_TYPES (type);
1726 for (n = 0; n < i; n++)
1728 if (!t)
1729 break;
1730 t = TREE_CHAIN (t);
1732 if (t)
1733 return TREE_VALUE (t);
1734 if (!e->callee)
1735 return NULL;
1736 t = DECL_ARGUMENTS (e->callee->decl);
1737 for (n = 0; n < i; n++)
1739 if (!t)
1740 return NULL;
1741 t = TREE_CHAIN (t);
1743 if (t)
1744 return TREE_TYPE (t);
1745 return NULL;
1748 /* Compute jump function for all arguments of callsite CS and insert the
1749 information in the jump_functions array in the ipa_edge_args corresponding
1750 to this callsite. */
1752 static void
1753 ipa_compute_jump_functions_for_edge (struct func_body_info *fbi,
1754 struct cgraph_edge *cs)
1756 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1757 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1758 gimple call = cs->call_stmt;
1759 int n, arg_num = gimple_call_num_args (call);
1761 if (arg_num == 0 || args->jump_functions)
1762 return;
1763 vec_safe_grow_cleared (args->jump_functions, arg_num);
1765 if (gimple_call_internal_p (call))
1766 return;
1767 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1768 return;
1770 for (n = 0; n < arg_num; n++)
1772 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1773 tree arg = gimple_call_arg (call, n);
1774 tree param_type = ipa_get_callee_param_type (cs, n);
1776 if (is_gimple_ip_invariant (arg))
1777 ipa_set_jf_constant (jfunc, arg, cs);
1778 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1779 && TREE_CODE (arg) == PARM_DECL)
1781 int index = ipa_get_param_decl_index (info, arg);
1783 gcc_assert (index >=0);
1784 /* Aggregate passed by value, check for pass-through, otherwise we
1785 will attempt to fill in aggregate contents later in this
1786 for cycle. */
1787 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1789 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
1790 continue;
1793 else if (TREE_CODE (arg) == SSA_NAME)
1795 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1797 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1798 if (index >= 0)
1800 bool agg_p, type_p;
1801 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1802 if (param_type && POINTER_TYPE_P (param_type))
1803 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1804 call, jfunc);
1805 else
1806 type_p = false;
1807 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1808 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1809 type_p);
1812 else
1814 gimple stmt = SSA_NAME_DEF_STMT (arg);
1815 if (is_gimple_assign (stmt))
1816 compute_complex_assign_jump_func (fbi, info, jfunc,
1817 call, stmt, arg, param_type);
1818 else if (gimple_code (stmt) == GIMPLE_PHI)
1819 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1820 call, stmt, param_type);
1823 else
1824 compute_known_type_jump_func (arg, jfunc, call,
1825 param_type
1826 && POINTER_TYPE_P (param_type)
1827 ? TREE_TYPE (param_type)
1828 : NULL);
1830 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1831 passed (because type conversions are ignored in gimple). Usually we can
1832 safely get type from function declaration, but in case of K&R prototypes or
1833 variadic functions we can try our luck with type of the pointer passed.
1834 TODO: Since we look for actual initialization of the memory object, we may better
1835 work out the type based on the memory stores we find. */
1836 if (!param_type)
1837 param_type = TREE_TYPE (arg);
1839 if ((jfunc->type != IPA_JF_PASS_THROUGH
1840 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1841 && (jfunc->type != IPA_JF_ANCESTOR
1842 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1843 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1844 || POINTER_TYPE_P (param_type)))
1845 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1849 /* Compute jump functions for all edges - both direct and indirect - outgoing
1850 from BB. */
1852 static void
1853 ipa_compute_jump_functions_for_bb (struct func_body_info *fbi, basic_block bb)
1855 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1856 int i;
1857 struct cgraph_edge *cs;
1859 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1861 struct cgraph_node *callee = cs->callee;
1863 if (callee)
1865 cgraph_function_or_thunk_node (callee, NULL);
1866 /* We do not need to bother analyzing calls to unknown functions
1867 unless they may become known during lto/whopr. */
1868 if (!callee->definition && !flag_lto)
1869 continue;
1871 ipa_compute_jump_functions_for_edge (fbi, cs);
1875 /* If STMT looks like a statement loading a value from a member pointer formal
1876 parameter, return that parameter and store the offset of the field to
1877 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1878 might be clobbered). If USE_DELTA, then we look for a use of the delta
1879 field rather than the pfn. */
1881 static tree
1882 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1883 HOST_WIDE_INT *offset_p)
1885 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1887 if (!gimple_assign_single_p (stmt))
1888 return NULL_TREE;
1890 rhs = gimple_assign_rhs1 (stmt);
1891 if (TREE_CODE (rhs) == COMPONENT_REF)
1893 ref_field = TREE_OPERAND (rhs, 1);
1894 rhs = TREE_OPERAND (rhs, 0);
1896 else
1897 ref_field = NULL_TREE;
1898 if (TREE_CODE (rhs) != MEM_REF)
1899 return NULL_TREE;
1900 rec = TREE_OPERAND (rhs, 0);
1901 if (TREE_CODE (rec) != ADDR_EXPR)
1902 return NULL_TREE;
1903 rec = TREE_OPERAND (rec, 0);
1904 if (TREE_CODE (rec) != PARM_DECL
1905 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1906 return NULL_TREE;
1907 ref_offset = TREE_OPERAND (rhs, 1);
1909 if (use_delta)
1910 fld = delta_field;
1911 else
1912 fld = ptr_field;
1913 if (offset_p)
1914 *offset_p = int_bit_position (fld);
1916 if (ref_field)
1918 if (integer_nonzerop (ref_offset))
1919 return NULL_TREE;
1920 return ref_field == fld ? rec : NULL_TREE;
1922 else
1923 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1924 : NULL_TREE;
1927 /* Returns true iff T is an SSA_NAME defined by a statement. */
1929 static bool
1930 ipa_is_ssa_with_stmt_def (tree t)
1932 if (TREE_CODE (t) == SSA_NAME
1933 && !SSA_NAME_IS_DEFAULT_DEF (t))
1934 return true;
1935 else
1936 return false;
1939 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1940 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1941 indirect call graph edge. */
1943 static struct cgraph_edge *
1944 ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
1946 struct cgraph_edge *cs;
1948 cs = cgraph_edge (node, stmt);
1949 cs->indirect_info->param_index = param_index;
1950 cs->indirect_info->agg_contents = 0;
1951 cs->indirect_info->member_ptr = 0;
1952 return cs;
1955 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1956 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1957 intermediate information about each formal parameter. Currently it checks
1958 whether the call calls a pointer that is a formal parameter and if so, the
1959 parameter is marked with the called flag and an indirect call graph edge
1960 describing the call is created. This is very simple for ordinary pointers
1961 represented in SSA but not-so-nice when it comes to member pointers. The
1962 ugly part of this function does nothing more than trying to match the
1963 pattern of such a call. An example of such a pattern is the gimple dump
1964 below, the call is on the last line:
1966 <bb 2>:
1967 f$__delta_5 = f.__delta;
1968 f$__pfn_24 = f.__pfn;
1971 <bb 2>:
1972 f$__delta_5 = MEM[(struct *)&f];
1973 f$__pfn_24 = MEM[(struct *)&f + 4B];
1975 and a few lines below:
1977 <bb 5>
1978 D.2496_3 = (int) f$__pfn_24;
1979 D.2497_4 = D.2496_3 & 1;
1980 if (D.2497_4 != 0)
1981 goto <bb 3>;
1982 else
1983 goto <bb 4>;
1985 <bb 6>:
1986 D.2500_7 = (unsigned int) f$__delta_5;
1987 D.2501_8 = &S + D.2500_7;
1988 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1989 D.2503_10 = *D.2502_9;
1990 D.2504_12 = f$__pfn_24 + -1;
1991 D.2505_13 = (unsigned int) D.2504_12;
1992 D.2506_14 = D.2503_10 + D.2505_13;
1993 D.2507_15 = *D.2506_14;
1994 iftmp.11_16 = (String:: *) D.2507_15;
1996 <bb 7>:
1997 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1998 D.2500_19 = (unsigned int) f$__delta_5;
1999 D.2508_20 = &S + D.2500_19;
2000 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2002 Such patterns are results of simple calls to a member pointer:
2004 int doprinting (int (MyString::* f)(int) const)
2006 MyString S ("somestring");
2008 return (S.*f)(4);
2011 Moreover, the function also looks for called pointers loaded from aggregates
2012 passed by value or reference. */
2014 static void
2015 ipa_analyze_indirect_call_uses (struct func_body_info *fbi, gimple call,
2016 tree target)
2018 struct ipa_node_params *info = fbi->info;
2019 HOST_WIDE_INT offset;
2020 bool by_ref;
2022 if (SSA_NAME_IS_DEFAULT_DEF (target))
2024 tree var = SSA_NAME_VAR (target);
2025 int index = ipa_get_param_decl_index (info, var);
2026 if (index >= 0)
2027 ipa_note_param_call (fbi->node, index, call);
2028 return;
2031 int index;
2032 gimple def = SSA_NAME_DEF_STMT (target);
2033 if (gimple_assign_single_p (def)
2034 && ipa_load_from_parm_agg_1 (fbi, info->descriptors, def,
2035 gimple_assign_rhs1 (def), &index, &offset,
2036 NULL, &by_ref))
2038 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2039 if (cs->indirect_info->offset != offset)
2040 cs->indirect_info->outer_type = NULL;
2041 cs->indirect_info->offset = offset;
2042 cs->indirect_info->agg_contents = 1;
2043 cs->indirect_info->by_ref = by_ref;
2044 return;
2047 /* Now we need to try to match the complex pattern of calling a member
2048 pointer. */
2049 if (gimple_code (def) != GIMPLE_PHI
2050 || gimple_phi_num_args (def) != 2
2051 || !POINTER_TYPE_P (TREE_TYPE (target))
2052 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2053 return;
2055 /* First, we need to check whether one of these is a load from a member
2056 pointer that is a parameter to this function. */
2057 tree n1 = PHI_ARG_DEF (def, 0);
2058 tree n2 = PHI_ARG_DEF (def, 1);
2059 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2060 return;
2061 gimple d1 = SSA_NAME_DEF_STMT (n1);
2062 gimple d2 = SSA_NAME_DEF_STMT (n2);
2064 tree rec;
2065 basic_block bb, virt_bb;
2066 basic_block join = gimple_bb (def);
2067 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2069 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2070 return;
2072 bb = EDGE_PRED (join, 0)->src;
2073 virt_bb = gimple_bb (d2);
2075 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2077 bb = EDGE_PRED (join, 1)->src;
2078 virt_bb = gimple_bb (d1);
2080 else
2081 return;
2083 /* Second, we need to check that the basic blocks are laid out in the way
2084 corresponding to the pattern. */
2086 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2087 || single_pred (virt_bb) != bb
2088 || single_succ (virt_bb) != join)
2089 return;
2091 /* Third, let's see that the branching is done depending on the least
2092 significant bit of the pfn. */
2094 gimple branch = last_stmt (bb);
2095 if (!branch || gimple_code (branch) != GIMPLE_COND)
2096 return;
2098 if ((gimple_cond_code (branch) != NE_EXPR
2099 && gimple_cond_code (branch) != EQ_EXPR)
2100 || !integer_zerop (gimple_cond_rhs (branch)))
2101 return;
2103 tree cond = gimple_cond_lhs (branch);
2104 if (!ipa_is_ssa_with_stmt_def (cond))
2105 return;
2107 def = SSA_NAME_DEF_STMT (cond);
2108 if (!is_gimple_assign (def)
2109 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2110 || !integer_onep (gimple_assign_rhs2 (def)))
2111 return;
2113 cond = gimple_assign_rhs1 (def);
2114 if (!ipa_is_ssa_with_stmt_def (cond))
2115 return;
2117 def = SSA_NAME_DEF_STMT (cond);
2119 if (is_gimple_assign (def)
2120 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2122 cond = gimple_assign_rhs1 (def);
2123 if (!ipa_is_ssa_with_stmt_def (cond))
2124 return;
2125 def = SSA_NAME_DEF_STMT (cond);
2128 tree rec2;
2129 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2130 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2131 == ptrmemfunc_vbit_in_delta),
2132 NULL);
2133 if (rec != rec2)
2134 return;
2136 index = ipa_get_param_decl_index (info, rec);
2137 if (index >= 0
2138 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2140 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2141 if (cs->indirect_info->offset != offset)
2142 cs->indirect_info->outer_type = NULL;
2143 cs->indirect_info->offset = offset;
2144 cs->indirect_info->agg_contents = 1;
2145 cs->indirect_info->member_ptr = 1;
2148 return;
2151 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2152 object referenced in the expression is a formal parameter of the caller
2153 FBI->node (described by FBI->info), create a call note for the
2154 statement. */
2156 static void
2157 ipa_analyze_virtual_call_uses (struct func_body_info *fbi,
2158 gimple call, tree target)
2160 tree obj = OBJ_TYPE_REF_OBJECT (target);
2161 int index;
2162 HOST_WIDE_INT anc_offset;
2164 if (!flag_devirtualize)
2165 return;
2167 if (TREE_CODE (obj) != SSA_NAME)
2168 return;
2170 struct ipa_node_params *info = fbi->info;
2171 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2173 struct ipa_jump_func jfunc;
2174 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2175 return;
2177 anc_offset = 0;
2178 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2179 gcc_assert (index >= 0);
2180 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2181 call, &jfunc))
2182 return;
2184 else
2186 struct ipa_jump_func jfunc;
2187 gimple stmt = SSA_NAME_DEF_STMT (obj);
2188 tree expr;
2190 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2191 if (!expr)
2192 return;
2193 index = ipa_get_param_decl_index (info,
2194 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2195 gcc_assert (index >= 0);
2196 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2197 call, &jfunc, anc_offset))
2198 return;
2201 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2202 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2203 ii->offset = anc_offset;
2204 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2205 ii->otr_type = obj_type_ref_class (target);
2206 ii->polymorphic = 1;
2209 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2210 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2211 containing intermediate information about each formal parameter. */
2213 static void
2214 ipa_analyze_call_uses (struct func_body_info *fbi, gimple call)
2216 tree target = gimple_call_fn (call);
2218 if (!target
2219 || (TREE_CODE (target) != SSA_NAME
2220 && !virtual_method_call_p (target)))
2221 return;
2223 /* If we previously turned the call into a direct call, there is
2224 no need to analyze. */
2225 struct cgraph_edge *cs = cgraph_edge (fbi->node, call);
2226 if (cs && !cs->indirect_unknown_callee)
2227 return;
2228 if (TREE_CODE (target) == SSA_NAME)
2229 ipa_analyze_indirect_call_uses (fbi, call, target);
2230 else if (virtual_method_call_p (target))
2231 ipa_analyze_virtual_call_uses (fbi, call, target);
2235 /* Analyze the call statement STMT with respect to formal parameters (described
2236 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2237 formal parameters are called. */
2239 static void
2240 ipa_analyze_stmt_uses (struct func_body_info *fbi, gimple stmt)
2242 if (is_gimple_call (stmt))
2243 ipa_analyze_call_uses (fbi, stmt);
2246 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2247 If OP is a parameter declaration, mark it as used in the info structure
2248 passed in DATA. */
2250 static bool
2251 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2253 struct ipa_node_params *info = (struct ipa_node_params *) data;
2255 op = get_base_address (op);
2256 if (op
2257 && TREE_CODE (op) == PARM_DECL)
2259 int index = ipa_get_param_decl_index (info, op);
2260 gcc_assert (index >= 0);
2261 ipa_set_param_used (info, index, true);
2264 return false;
2267 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2268 the findings in various structures of the associated ipa_node_params
2269 structure, such as parameter flags, notes etc. FBI holds various data about
2270 the function being analyzed. */
2272 static void
2273 ipa_analyze_params_uses_in_bb (struct func_body_info *fbi, basic_block bb)
2275 gimple_stmt_iterator gsi;
2276 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2278 gimple stmt = gsi_stmt (gsi);
2280 if (is_gimple_debug (stmt))
2281 continue;
2283 ipa_analyze_stmt_uses (fbi, stmt);
2284 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2285 visit_ref_for_mod_analysis,
2286 visit_ref_for_mod_analysis,
2287 visit_ref_for_mod_analysis);
2289 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2290 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2291 visit_ref_for_mod_analysis,
2292 visit_ref_for_mod_analysis,
2293 visit_ref_for_mod_analysis);
2296 /* Calculate controlled uses of parameters of NODE. */
2298 static void
2299 ipa_analyze_controlled_uses (struct cgraph_node *node)
2301 struct ipa_node_params *info = IPA_NODE_REF (node);
2303 for (int i = 0; i < ipa_get_param_count (info); i++)
2305 tree parm = ipa_get_param (info, i);
2306 int controlled_uses = 0;
2308 /* For SSA regs see if parameter is used. For non-SSA we compute
2309 the flag during modification analysis. */
2310 if (is_gimple_reg (parm))
2312 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2313 parm);
2314 if (ddef && !has_zero_uses (ddef))
2316 imm_use_iterator imm_iter;
2317 use_operand_p use_p;
2319 ipa_set_param_used (info, i, true);
2320 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2321 if (!is_gimple_call (USE_STMT (use_p)))
2323 if (!is_gimple_debug (USE_STMT (use_p)))
2325 controlled_uses = IPA_UNDESCRIBED_USE;
2326 break;
2329 else
2330 controlled_uses++;
2332 else
2333 controlled_uses = 0;
2335 else
2336 controlled_uses = IPA_UNDESCRIBED_USE;
2337 ipa_set_controlled_uses (info, i, controlled_uses);
2341 /* Free stuff in BI. */
2343 static void
2344 free_ipa_bb_info (struct ipa_bb_info *bi)
2346 bi->cg_edges.release ();
2347 bi->param_aa_statuses.release ();
2350 /* Dominator walker driving the analysis. */
2352 class analysis_dom_walker : public dom_walker
2354 public:
2355 analysis_dom_walker (struct func_body_info *fbi)
2356 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2358 virtual void before_dom_children (basic_block);
2360 private:
2361 struct func_body_info *m_fbi;
2364 void
2365 analysis_dom_walker::before_dom_children (basic_block bb)
2367 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2368 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2371 /* Initialize the array describing properties of of formal parameters
2372 of NODE, analyze their uses and compute jump functions associated
2373 with actual arguments of calls from within NODE. */
2375 void
2376 ipa_analyze_node (struct cgraph_node *node)
2378 struct func_body_info fbi;
2379 struct ipa_node_params *info;
2381 ipa_check_create_node_params ();
2382 ipa_check_create_edge_args ();
2383 info = IPA_NODE_REF (node);
2385 if (info->analysis_done)
2386 return;
2387 info->analysis_done = 1;
2389 if (ipa_func_spec_opts_forbid_analysis_p (node))
2391 for (int i = 0; i < ipa_get_param_count (info); i++)
2393 ipa_set_param_used (info, i, true);
2394 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2396 return;
2399 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2400 push_cfun (func);
2401 calculate_dominance_info (CDI_DOMINATORS);
2402 ipa_initialize_node_params (node);
2403 ipa_analyze_controlled_uses (node);
2405 fbi.node = node;
2406 fbi.info = IPA_NODE_REF (node);
2407 fbi.bb_infos = vNULL;
2408 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2409 fbi.param_count = ipa_get_param_count (info);
2410 fbi.aa_walked = 0;
2412 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2414 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2415 bi->cg_edges.safe_push (cs);
2418 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2420 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2421 bi->cg_edges.safe_push (cs);
2424 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2426 int i;
2427 struct ipa_bb_info *bi;
2428 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
2429 free_ipa_bb_info (bi);
2430 fbi.bb_infos.release ();
2431 free_dominance_info (CDI_DOMINATORS);
2432 pop_cfun ();
2435 /* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2436 attempt a type-based devirtualization. If successful, return the
2437 target function declaration, otherwise return NULL. */
2439 tree
2440 ipa_intraprocedural_devirtualization (gimple call)
2442 tree binfo, token, fndecl;
2443 struct ipa_jump_func jfunc;
2444 tree otr = gimple_call_fn (call);
2446 jfunc.type = IPA_JF_UNKNOWN;
2447 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
2448 call, obj_type_ref_class (otr));
2449 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2450 return NULL_TREE;
2451 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2452 if (!binfo)
2453 return NULL_TREE;
2454 token = OBJ_TYPE_REF_TOKEN (otr);
2455 fndecl = gimple_get_virt_method_for_binfo (tree_to_uhwi (token),
2456 binfo);
2457 #ifdef ENABLE_CHECKING
2458 if (fndecl)
2459 gcc_assert (possible_polymorphic_call_target_p
2460 (otr, cgraph_get_node (fndecl)));
2461 #endif
2462 return fndecl;
2465 /* Update the jump function DST when the call graph edge corresponding to SRC is
2466 is being inlined, knowing that DST is of type ancestor and src of known
2467 type. */
2469 static void
2470 combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2471 struct ipa_jump_func *dst)
2473 HOST_WIDE_INT combined_offset;
2474 tree combined_type;
2476 if (!ipa_get_jf_ancestor_type_preserved (dst))
2478 dst->type = IPA_JF_UNKNOWN;
2479 return;
2482 combined_offset = ipa_get_jf_known_type_offset (src)
2483 + ipa_get_jf_ancestor_offset (dst);
2484 combined_type = ipa_get_jf_ancestor_type (dst);
2486 ipa_set_jf_known_type (dst, combined_offset,
2487 ipa_get_jf_known_type_base_type (src),
2488 combined_type);
2491 /* Update the jump functions associated with call graph edge E when the call
2492 graph edge CS is being inlined, assuming that E->caller is already (possibly
2493 indirectly) inlined into CS->callee and that E has not been inlined. */
2495 static void
2496 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2497 struct cgraph_edge *e)
2499 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2500 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2501 int count = ipa_get_cs_argument_count (args);
2502 int i;
2504 for (i = 0; i < count; i++)
2506 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2508 if (dst->type == IPA_JF_ANCESTOR)
2510 struct ipa_jump_func *src;
2511 int dst_fid = dst->value.ancestor.formal_id;
2513 /* Variable number of arguments can cause havoc if we try to access
2514 one that does not exist in the inlined edge. So make sure we
2515 don't. */
2516 if (dst_fid >= ipa_get_cs_argument_count (top))
2518 dst->type = IPA_JF_UNKNOWN;
2519 continue;
2522 src = ipa_get_ith_jump_func (top, dst_fid);
2524 if (src->agg.items
2525 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2527 struct ipa_agg_jf_item *item;
2528 int j;
2530 /* Currently we do not produce clobber aggregate jump functions,
2531 replace with merging when we do. */
2532 gcc_assert (!dst->agg.items);
2534 dst->agg.items = vec_safe_copy (src->agg.items);
2535 dst->agg.by_ref = src->agg.by_ref;
2536 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2537 item->offset -= dst->value.ancestor.offset;
2540 if (src->type == IPA_JF_KNOWN_TYPE)
2541 combine_known_type_and_ancestor_jfs (src, dst);
2542 else if (src->type == IPA_JF_PASS_THROUGH
2543 && src->value.pass_through.operation == NOP_EXPR)
2545 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2546 dst->value.ancestor.agg_preserved &=
2547 src->value.pass_through.agg_preserved;
2548 dst->value.ancestor.type_preserved &=
2549 src->value.pass_through.type_preserved;
2551 else if (src->type == IPA_JF_ANCESTOR)
2553 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2554 dst->value.ancestor.offset += src->value.ancestor.offset;
2555 dst->value.ancestor.agg_preserved &=
2556 src->value.ancestor.agg_preserved;
2557 dst->value.ancestor.type_preserved &=
2558 src->value.ancestor.type_preserved;
2560 else
2561 dst->type = IPA_JF_UNKNOWN;
2563 else if (dst->type == IPA_JF_PASS_THROUGH)
2565 struct ipa_jump_func *src;
2566 /* We must check range due to calls with variable number of arguments
2567 and we cannot combine jump functions with operations. */
2568 if (dst->value.pass_through.operation == NOP_EXPR
2569 && (dst->value.pass_through.formal_id
2570 < ipa_get_cs_argument_count (top)))
2572 int dst_fid = dst->value.pass_through.formal_id;
2573 src = ipa_get_ith_jump_func (top, dst_fid);
2574 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2576 switch (src->type)
2578 case IPA_JF_UNKNOWN:
2579 dst->type = IPA_JF_UNKNOWN;
2580 break;
2581 case IPA_JF_KNOWN_TYPE:
2582 if (ipa_get_jf_pass_through_type_preserved (dst))
2583 ipa_set_jf_known_type (dst,
2584 ipa_get_jf_known_type_offset (src),
2585 ipa_get_jf_known_type_base_type (src),
2586 ipa_get_jf_known_type_component_type (src));
2587 else
2588 dst->type = IPA_JF_UNKNOWN;
2589 break;
2590 case IPA_JF_CONST:
2591 ipa_set_jf_cst_copy (dst, src);
2592 break;
2594 case IPA_JF_PASS_THROUGH:
2596 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2597 enum tree_code operation;
2598 operation = ipa_get_jf_pass_through_operation (src);
2600 if (operation == NOP_EXPR)
2602 bool agg_p, type_p;
2603 agg_p = dst_agg_p
2604 && ipa_get_jf_pass_through_agg_preserved (src);
2605 type_p = ipa_get_jf_pass_through_type_preserved (src)
2606 && ipa_get_jf_pass_through_type_preserved (dst);
2607 ipa_set_jf_simple_pass_through (dst, formal_id,
2608 agg_p, type_p);
2610 else
2612 tree operand = ipa_get_jf_pass_through_operand (src);
2613 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2614 operation);
2616 break;
2618 case IPA_JF_ANCESTOR:
2620 bool agg_p, type_p;
2621 agg_p = dst_agg_p
2622 && ipa_get_jf_ancestor_agg_preserved (src);
2623 type_p = ipa_get_jf_ancestor_type_preserved (src)
2624 && ipa_get_jf_pass_through_type_preserved (dst);
2625 ipa_set_ancestor_jf (dst,
2626 ipa_get_jf_ancestor_offset (src),
2627 ipa_get_jf_ancestor_type (src),
2628 ipa_get_jf_ancestor_formal_id (src),
2629 agg_p, type_p);
2630 break;
2632 default:
2633 gcc_unreachable ();
2636 if (src->agg.items
2637 && (dst_agg_p || !src->agg.by_ref))
2639 /* Currently we do not produce clobber aggregate jump
2640 functions, replace with merging when we do. */
2641 gcc_assert (!dst->agg.items);
2643 dst->agg.by_ref = src->agg.by_ref;
2644 dst->agg.items = vec_safe_copy (src->agg.items);
2647 else
2648 dst->type = IPA_JF_UNKNOWN;
2653 /* If TARGET is an addr_expr of a function declaration, make it the destination
2654 of an indirect edge IE and return the edge. Otherwise, return NULL. */
2656 struct cgraph_edge *
2657 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
2659 struct cgraph_node *callee;
2660 struct inline_edge_summary *es = inline_edge_summary (ie);
2661 bool unreachable = false;
2663 if (TREE_CODE (target) == ADDR_EXPR)
2664 target = TREE_OPERAND (target, 0);
2665 if (TREE_CODE (target) != FUNCTION_DECL)
2667 target = canonicalize_constructor_val (target, NULL);
2668 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2670 if (ie->indirect_info->member_ptr)
2671 /* Member pointer call that goes through a VMT lookup. */
2672 return NULL;
2674 if (dump_enabled_p ())
2676 location_t loc = gimple_location (ie->call_stmt);
2677 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2678 "discovered direct call to non-function in %s/%i, "
2679 "making it __builtin_unreachable\n",
2680 ie->caller->name (),
2681 ie->caller->order);
2683 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2684 callee = cgraph_get_create_node (target);
2685 unreachable = true;
2687 else
2688 callee = cgraph_get_node (target);
2690 else
2691 callee = cgraph_get_node (target);
2693 /* Because may-edges are not explicitely represented and vtable may be external,
2694 we may create the first reference to the object in the unit. */
2695 if (!callee || callee->global.inlined_to)
2698 /* We are better to ensure we can refer to it.
2699 In the case of static functions we are out of luck, since we already
2700 removed its body. In the case of public functions we may or may
2701 not introduce the reference. */
2702 if (!canonicalize_constructor_val (target, NULL)
2703 || !TREE_PUBLIC (target))
2705 if (dump_file)
2706 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2707 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2708 xstrdup (ie->caller->name ()),
2709 ie->caller->order,
2710 xstrdup (ie->callee->name ()),
2711 ie->callee->order);
2712 return NULL;
2714 callee = cgraph_get_create_node (target);
2717 if (!dbg_cnt (devirt))
2718 return NULL;
2720 ipa_check_create_node_params ();
2722 /* We can not make edges to inline clones. It is bug that someone removed
2723 the cgraph node too early. */
2724 gcc_assert (!callee->global.inlined_to);
2726 if (dump_file && !unreachable)
2728 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
2729 "(%s/%i -> %s/%i), for stmt ",
2730 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2731 xstrdup (ie->caller->name ()),
2732 ie->caller->order,
2733 xstrdup (callee->name ()),
2734 callee->order);
2735 if (ie->call_stmt)
2736 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2737 else
2738 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2740 if (dump_enabled_p ())
2742 location_t loc = gimple_location (ie->call_stmt);
2743 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2744 "converting indirect call in %s to direct call to %s\n",
2745 ie->caller->name (), callee->name ());
2747 ie = cgraph_make_edge_direct (ie, callee);
2748 es = inline_edge_summary (ie);
2749 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2750 - eni_size_weights.call_cost);
2751 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2752 - eni_time_weights.call_cost);
2754 return ie;
2757 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2758 return NULL if there is not any. BY_REF specifies whether the value has to
2759 be passed by reference or by value. */
2761 tree
2762 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2763 HOST_WIDE_INT offset, bool by_ref)
2765 struct ipa_agg_jf_item *item;
2766 int i;
2768 if (by_ref != agg->by_ref)
2769 return NULL;
2771 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2772 if (item->offset == offset)
2774 /* Currently we do not have clobber values, return NULL for them once
2775 we do. */
2776 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2777 return item->value;
2779 return NULL;
2782 /* Remove a reference to SYMBOL from the list of references of a node given by
2783 reference description RDESC. Return true if the reference has been
2784 successfully found and removed. */
2786 static bool
2787 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2789 struct ipa_ref *to_del;
2790 struct cgraph_edge *origin;
2792 origin = rdesc->cs;
2793 if (!origin)
2794 return false;
2795 to_del = ipa_find_reference (origin->caller, symbol,
2796 origin->call_stmt, origin->lto_stmt_uid);
2797 if (!to_del)
2798 return false;
2800 ipa_remove_reference (to_del);
2801 if (dump_file)
2802 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2803 xstrdup (origin->caller->name ()),
2804 origin->caller->order, xstrdup (symbol->name ()));
2805 return true;
2808 /* If JFUNC has a reference description with refcount different from
2809 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2810 NULL. JFUNC must be a constant jump function. */
2812 static struct ipa_cst_ref_desc *
2813 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2815 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2816 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2817 return rdesc;
2818 else
2819 return NULL;
2822 /* If the value of constant jump function JFUNC is an address of a function
2823 declaration, return the associated call graph node. Otherwise return
2824 NULL. */
2826 static cgraph_node *
2827 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2829 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2830 tree cst = ipa_get_jf_constant (jfunc);
2831 if (TREE_CODE (cst) != ADDR_EXPR
2832 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2833 return NULL;
2835 return cgraph_get_node (TREE_OPERAND (cst, 0));
2839 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2840 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2841 the edge specified in the rdesc. Return false if either the symbol or the
2842 reference could not be found, otherwise return true. */
2844 static bool
2845 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2847 struct ipa_cst_ref_desc *rdesc;
2848 if (jfunc->type == IPA_JF_CONST
2849 && (rdesc = jfunc_rdesc_usable (jfunc))
2850 && --rdesc->refcount == 0)
2852 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
2853 if (!symbol)
2854 return false;
2856 return remove_described_reference (symbol, rdesc);
2858 return true;
2861 /* Try to find a destination for indirect edge IE that corresponds to a simple
2862 call or a call of a member function pointer and where the destination is a
2863 pointer formal parameter described by jump function JFUNC. If it can be
2864 determined, return the newly direct edge, otherwise return NULL.
2865 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2867 static struct cgraph_edge *
2868 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2869 struct ipa_jump_func *jfunc,
2870 struct ipa_node_params *new_root_info)
2872 struct cgraph_edge *cs;
2873 tree target;
2874 bool agg_contents = ie->indirect_info->agg_contents;
2876 if (ie->indirect_info->agg_contents)
2877 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2878 ie->indirect_info->offset,
2879 ie->indirect_info->by_ref);
2880 else
2881 target = ipa_value_from_jfunc (new_root_info, jfunc);
2882 if (!target)
2883 return NULL;
2884 cs = ipa_make_edge_direct_to_target (ie, target);
2886 if (cs && !agg_contents)
2888 bool ok;
2889 gcc_checking_assert (cs->callee
2890 && (cs != ie
2891 || jfunc->type != IPA_JF_CONST
2892 || !cgraph_node_for_jfunc (jfunc)
2893 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2894 ok = try_decrement_rdesc_refcount (jfunc);
2895 gcc_checking_assert (ok);
2898 return cs;
2901 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2902 call based on a formal parameter which is described by jump function JFUNC
2903 and if it can be determined, make it direct and return the direct edge.
2904 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2905 are relative to. */
2907 static struct cgraph_edge *
2908 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2909 struct ipa_jump_func *jfunc,
2910 struct ipa_node_params *new_root_info)
2912 tree binfo, target;
2914 if (!flag_devirtualize)
2915 return NULL;
2917 /* First try to do lookup via known virtual table pointer value. */
2918 if (!ie->indirect_info->by_ref)
2920 tree vtable;
2921 unsigned HOST_WIDE_INT offset;
2922 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
2923 ie->indirect_info->offset,
2924 true);
2925 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
2927 target = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2928 vtable, offset);
2929 if (target)
2931 if ((TREE_CODE (TREE_TYPE (target)) == FUNCTION_TYPE
2932 && DECL_FUNCTION_CODE (target) == BUILT_IN_UNREACHABLE)
2933 || !possible_polymorphic_call_target_p
2934 (ie, cgraph_get_node (target)))
2936 if (dump_file)
2937 fprintf (dump_file,
2938 "Type inconsident devirtualization: %s/%i->%s\n",
2939 ie->caller->name (), ie->caller->order,
2940 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
2941 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2942 cgraph_get_create_node (target);
2944 return ipa_make_edge_direct_to_target (ie, target);
2949 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
2951 if (!binfo)
2952 return NULL;
2954 if (TREE_CODE (binfo) != TREE_BINFO)
2956 ipa_polymorphic_call_context context;
2957 vec <cgraph_node *>targets;
2958 bool final;
2960 if (!get_polymorphic_call_info_from_invariant
2961 (&context, binfo, ie->indirect_info->otr_type,
2962 ie->indirect_info->offset))
2963 return NULL;
2964 targets = possible_polymorphic_call_targets
2965 (ie->indirect_info->otr_type,
2966 ie->indirect_info->otr_token,
2967 context, &final);
2968 if (!final || targets.length () > 1)
2969 return NULL;
2970 if (targets.length () == 1)
2971 target = targets[0]->decl;
2972 else
2974 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2975 cgraph_get_create_node (target);
2978 else
2980 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
2981 ie->indirect_info->otr_type);
2982 if (binfo)
2983 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2984 binfo);
2985 else
2986 return NULL;
2989 if (target)
2991 #ifdef ENABLE_CHECKING
2992 gcc_assert (possible_polymorphic_call_target_p
2993 (ie, cgraph_get_node (target)));
2994 #endif
2995 return ipa_make_edge_direct_to_target (ie, target);
2997 else
2998 return NULL;
3001 /* Update the param called notes associated with NODE when CS is being inlined,
3002 assuming NODE is (potentially indirectly) inlined into CS->callee.
3003 Moreover, if the callee is discovered to be constant, create a new cgraph
3004 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3005 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3007 static bool
3008 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3009 struct cgraph_node *node,
3010 vec<cgraph_edge_p> *new_edges)
3012 struct ipa_edge_args *top;
3013 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3014 struct ipa_node_params *new_root_info;
3015 bool res = false;
3017 ipa_check_create_edge_args ();
3018 top = IPA_EDGE_REF (cs);
3019 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3020 ? cs->caller->global.inlined_to
3021 : cs->caller);
3023 for (ie = node->indirect_calls; ie; ie = next_ie)
3025 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3026 struct ipa_jump_func *jfunc;
3027 int param_index;
3029 next_ie = ie->next_callee;
3031 if (ici->param_index == -1)
3032 continue;
3034 /* We must check range due to calls with variable number of arguments: */
3035 if (ici->param_index >= ipa_get_cs_argument_count (top))
3037 ici->param_index = -1;
3038 continue;
3041 param_index = ici->param_index;
3042 jfunc = ipa_get_ith_jump_func (top, param_index);
3044 if (!flag_indirect_inlining)
3045 new_direct_edge = NULL;
3046 else if (ici->polymorphic)
3047 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
3048 new_root_info);
3049 else
3050 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3051 new_root_info);
3052 /* If speculation was removed, then we need to do nothing. */
3053 if (new_direct_edge && new_direct_edge != ie)
3055 new_direct_edge->indirect_inlining_edge = 1;
3056 top = IPA_EDGE_REF (cs);
3057 res = true;
3059 else if (new_direct_edge)
3061 new_direct_edge->indirect_inlining_edge = 1;
3062 if (new_direct_edge->call_stmt)
3063 new_direct_edge->call_stmt_cannot_inline_p
3064 = !gimple_check_call_matching_types (
3065 new_direct_edge->call_stmt,
3066 new_direct_edge->callee->decl, false);
3067 if (new_edges)
3069 new_edges->safe_push (new_direct_edge);
3070 res = true;
3072 top = IPA_EDGE_REF (cs);
3074 else if (jfunc->type == IPA_JF_PASS_THROUGH
3075 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3077 if ((ici->agg_contents
3078 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
3079 || (ici->polymorphic
3080 && !ipa_get_jf_pass_through_type_preserved (jfunc)))
3081 ici->param_index = -1;
3082 else
3083 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3085 else if (jfunc->type == IPA_JF_ANCESTOR)
3087 if ((ici->agg_contents
3088 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
3089 || (ici->polymorphic
3090 && !ipa_get_jf_ancestor_type_preserved (jfunc)))
3091 ici->param_index = -1;
3092 else
3094 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3095 if (ipa_get_jf_ancestor_offset (jfunc))
3096 ici->outer_type = NULL;
3097 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3100 else
3101 /* Either we can find a destination for this edge now or never. */
3102 ici->param_index = -1;
3105 return res;
3108 /* Recursively traverse subtree of NODE (including node) made of inlined
3109 cgraph_edges when CS has been inlined and invoke
3110 update_indirect_edges_after_inlining on all nodes and
3111 update_jump_functions_after_inlining on all non-inlined edges that lead out
3112 of this subtree. Newly discovered indirect edges will be added to
3113 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3114 created. */
3116 static bool
3117 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3118 struct cgraph_node *node,
3119 vec<cgraph_edge_p> *new_edges)
3121 struct cgraph_edge *e;
3122 bool res;
3124 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3126 for (e = node->callees; e; e = e->next_callee)
3127 if (!e->inline_failed)
3128 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3129 else
3130 update_jump_functions_after_inlining (cs, e);
3131 for (e = node->indirect_calls; e; e = e->next_callee)
3132 update_jump_functions_after_inlining (cs, e);
3134 return res;
3137 /* Combine two controlled uses counts as done during inlining. */
3139 static int
3140 combine_controlled_uses_counters (int c, int d)
3142 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3143 return IPA_UNDESCRIBED_USE;
3144 else
3145 return c + d - 1;
3148 /* Propagate number of controlled users from CS->caleee to the new root of the
3149 tree of inlined nodes. */
3151 static void
3152 propagate_controlled_uses (struct cgraph_edge *cs)
3154 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3155 struct cgraph_node *new_root = cs->caller->global.inlined_to
3156 ? cs->caller->global.inlined_to : cs->caller;
3157 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3158 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3159 int count, i;
3161 count = MIN (ipa_get_cs_argument_count (args),
3162 ipa_get_param_count (old_root_info));
3163 for (i = 0; i < count; i++)
3165 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3166 struct ipa_cst_ref_desc *rdesc;
3168 if (jf->type == IPA_JF_PASS_THROUGH)
3170 int src_idx, c, d;
3171 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3172 c = ipa_get_controlled_uses (new_root_info, src_idx);
3173 d = ipa_get_controlled_uses (old_root_info, i);
3175 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3176 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3177 c = combine_controlled_uses_counters (c, d);
3178 ipa_set_controlled_uses (new_root_info, src_idx, c);
3179 if (c == 0 && new_root_info->ipcp_orig_node)
3181 struct cgraph_node *n;
3182 struct ipa_ref *ref;
3183 tree t = new_root_info->known_vals[src_idx];
3185 if (t && TREE_CODE (t) == ADDR_EXPR
3186 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3187 && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
3188 && (ref = ipa_find_reference (new_root,
3189 n, NULL, 0)))
3191 if (dump_file)
3192 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3193 "reference from %s/%i to %s/%i.\n",
3194 xstrdup (new_root->name ()),
3195 new_root->order,
3196 xstrdup (n->name ()), n->order);
3197 ipa_remove_reference (ref);
3201 else if (jf->type == IPA_JF_CONST
3202 && (rdesc = jfunc_rdesc_usable (jf)))
3204 int d = ipa_get_controlled_uses (old_root_info, i);
3205 int c = rdesc->refcount;
3206 rdesc->refcount = combine_controlled_uses_counters (c, d);
3207 if (rdesc->refcount == 0)
3209 tree cst = ipa_get_jf_constant (jf);
3210 struct cgraph_node *n;
3211 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3212 && TREE_CODE (TREE_OPERAND (cst, 0))
3213 == FUNCTION_DECL);
3214 n = cgraph_get_node (TREE_OPERAND (cst, 0));
3215 if (n)
3217 struct cgraph_node *clone;
3218 bool ok;
3219 ok = remove_described_reference (n, rdesc);
3220 gcc_checking_assert (ok);
3222 clone = cs->caller;
3223 while (clone->global.inlined_to
3224 && clone != rdesc->cs->caller
3225 && IPA_NODE_REF (clone)->ipcp_orig_node)
3227 struct ipa_ref *ref;
3228 ref = ipa_find_reference (clone,
3229 n, NULL, 0);
3230 if (ref)
3232 if (dump_file)
3233 fprintf (dump_file, "ipa-prop: Removing "
3234 "cloning-created reference "
3235 "from %s/%i to %s/%i.\n",
3236 xstrdup (clone->name ()),
3237 clone->order,
3238 xstrdup (n->name ()),
3239 n->order);
3240 ipa_remove_reference (ref);
3242 clone = clone->callers->caller;
3249 for (i = ipa_get_param_count (old_root_info);
3250 i < ipa_get_cs_argument_count (args);
3251 i++)
3253 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3255 if (jf->type == IPA_JF_CONST)
3257 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3258 if (rdesc)
3259 rdesc->refcount = IPA_UNDESCRIBED_USE;
3261 else if (jf->type == IPA_JF_PASS_THROUGH)
3262 ipa_set_controlled_uses (new_root_info,
3263 jf->value.pass_through.formal_id,
3264 IPA_UNDESCRIBED_USE);
3268 /* Update jump functions and call note functions on inlining the call site CS.
3269 CS is expected to lead to a node already cloned by
3270 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3271 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3272 created. */
3274 bool
3275 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3276 vec<cgraph_edge_p> *new_edges)
3278 bool changed;
3279 /* Do nothing if the preparation phase has not been carried out yet
3280 (i.e. during early inlining). */
3281 if (!ipa_node_params_vector.exists ())
3282 return false;
3283 gcc_assert (ipa_edge_args_vector);
3285 propagate_controlled_uses (cs);
3286 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3288 return changed;
3291 /* Frees all dynamically allocated structures that the argument info points
3292 to. */
3294 void
3295 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3297 vec_free (args->jump_functions);
3298 memset (args, 0, sizeof (*args));
3301 /* Free all ipa_edge structures. */
3303 void
3304 ipa_free_all_edge_args (void)
3306 int i;
3307 struct ipa_edge_args *args;
3309 if (!ipa_edge_args_vector)
3310 return;
3312 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3313 ipa_free_edge_args_substructures (args);
3315 vec_free (ipa_edge_args_vector);
3318 /* Frees all dynamically allocated structures that the param info points
3319 to. */
3321 void
3322 ipa_free_node_params_substructures (struct ipa_node_params *info)
3324 info->descriptors.release ();
3325 free (info->lattices);
3326 /* Lattice values and their sources are deallocated with their alocation
3327 pool. */
3328 info->known_vals.release ();
3329 memset (info, 0, sizeof (*info));
3332 /* Free all ipa_node_params structures. */
3334 void
3335 ipa_free_all_node_params (void)
3337 int i;
3338 struct ipa_node_params *info;
3340 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
3341 ipa_free_node_params_substructures (info);
3343 ipa_node_params_vector.release ();
3346 /* Set the aggregate replacements of NODE to be AGGVALS. */
3348 void
3349 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3350 struct ipa_agg_replacement_value *aggvals)
3352 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
3353 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
3355 (*ipa_node_agg_replacements)[node->uid] = aggvals;
3358 /* Hook that is called by cgraph.c when an edge is removed. */
3360 static void
3361 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3363 struct ipa_edge_args *args;
3365 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3366 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3367 return;
3369 args = IPA_EDGE_REF (cs);
3370 if (args->jump_functions)
3372 struct ipa_jump_func *jf;
3373 int i;
3374 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3376 struct ipa_cst_ref_desc *rdesc;
3377 try_decrement_rdesc_refcount (jf);
3378 if (jf->type == IPA_JF_CONST
3379 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3380 && rdesc->cs == cs)
3381 rdesc->cs = NULL;
3385 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3388 /* Hook that is called by cgraph.c when a node is removed. */
3390 static void
3391 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3393 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3394 if (ipa_node_params_vector.length () > (unsigned)node->uid)
3395 ipa_free_node_params_substructures (IPA_NODE_REF (node));
3396 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3397 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
3400 /* Hook that is called by cgraph.c when an edge is duplicated. */
3402 static void
3403 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3404 __attribute__((unused)) void *data)
3406 struct ipa_edge_args *old_args, *new_args;
3407 unsigned int i;
3409 ipa_check_create_edge_args ();
3411 old_args = IPA_EDGE_REF (src);
3412 new_args = IPA_EDGE_REF (dst);
3414 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3416 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3418 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3419 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3421 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3423 if (src_jf->type == IPA_JF_CONST)
3425 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3427 if (!src_rdesc)
3428 dst_jf->value.constant.rdesc = NULL;
3429 else if (src->caller == dst->caller)
3431 struct ipa_ref *ref;
3432 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3433 gcc_checking_assert (n);
3434 ref = ipa_find_reference (src->caller, n,
3435 src->call_stmt, src->lto_stmt_uid);
3436 gcc_checking_assert (ref);
3437 ipa_clone_ref (ref, dst->caller, ref->stmt);
3439 gcc_checking_assert (ipa_refdesc_pool);
3440 struct ipa_cst_ref_desc *dst_rdesc
3441 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3442 dst_rdesc->cs = dst;
3443 dst_rdesc->refcount = src_rdesc->refcount;
3444 dst_rdesc->next_duplicate = NULL;
3445 dst_jf->value.constant.rdesc = dst_rdesc;
3447 else if (src_rdesc->cs == src)
3449 struct ipa_cst_ref_desc *dst_rdesc;
3450 gcc_checking_assert (ipa_refdesc_pool);
3451 dst_rdesc
3452 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3453 dst_rdesc->cs = dst;
3454 dst_rdesc->refcount = src_rdesc->refcount;
3455 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3456 src_rdesc->next_duplicate = dst_rdesc;
3457 dst_jf->value.constant.rdesc = dst_rdesc;
3459 else
3461 struct ipa_cst_ref_desc *dst_rdesc;
3462 /* This can happen during inlining, when a JFUNC can refer to a
3463 reference taken in a function up in the tree of inline clones.
3464 We need to find the duplicate that refers to our tree of
3465 inline clones. */
3467 gcc_assert (dst->caller->global.inlined_to);
3468 for (dst_rdesc = src_rdesc->next_duplicate;
3469 dst_rdesc;
3470 dst_rdesc = dst_rdesc->next_duplicate)
3472 struct cgraph_node *top;
3473 top = dst_rdesc->cs->caller->global.inlined_to
3474 ? dst_rdesc->cs->caller->global.inlined_to
3475 : dst_rdesc->cs->caller;
3476 if (dst->caller->global.inlined_to == top)
3477 break;
3479 gcc_assert (dst_rdesc);
3480 dst_jf->value.constant.rdesc = dst_rdesc;
3486 /* Hook that is called by cgraph.c when a node is duplicated. */
3488 static void
3489 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3490 ATTRIBUTE_UNUSED void *data)
3492 struct ipa_node_params *old_info, *new_info;
3493 struct ipa_agg_replacement_value *old_av, *new_av;
3495 ipa_check_create_node_params ();
3496 old_info = IPA_NODE_REF (src);
3497 new_info = IPA_NODE_REF (dst);
3499 new_info->descriptors = old_info->descriptors.copy ();
3500 new_info->lattices = NULL;
3501 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3503 new_info->analysis_done = old_info->analysis_done;
3504 new_info->node_enqueued = old_info->node_enqueued;
3506 old_av = ipa_get_agg_replacements_for_node (src);
3507 if (!old_av)
3508 return;
3510 new_av = NULL;
3511 while (old_av)
3513 struct ipa_agg_replacement_value *v;
3515 v = ggc_alloc<ipa_agg_replacement_value> ();
3516 memcpy (v, old_av, sizeof (*v));
3517 v->next = new_av;
3518 new_av = v;
3519 old_av = old_av->next;
3521 ipa_set_node_agg_value_chain (dst, new_av);
3525 /* Analyze newly added function into callgraph. */
3527 static void
3528 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3530 if (cgraph_function_with_gimple_body_p (node))
3531 ipa_analyze_node (node);
3534 /* Register our cgraph hooks if they are not already there. */
3536 void
3537 ipa_register_cgraph_hooks (void)
3539 if (!edge_removal_hook_holder)
3540 edge_removal_hook_holder =
3541 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3542 if (!node_removal_hook_holder)
3543 node_removal_hook_holder =
3544 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
3545 if (!edge_duplication_hook_holder)
3546 edge_duplication_hook_holder =
3547 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3548 if (!node_duplication_hook_holder)
3549 node_duplication_hook_holder =
3550 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
3551 function_insertion_hook_holder =
3552 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
3555 /* Unregister our cgraph hooks if they are not already there. */
3557 static void
3558 ipa_unregister_cgraph_hooks (void)
3560 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
3561 edge_removal_hook_holder = NULL;
3562 cgraph_remove_node_removal_hook (node_removal_hook_holder);
3563 node_removal_hook_holder = NULL;
3564 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
3565 edge_duplication_hook_holder = NULL;
3566 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
3567 node_duplication_hook_holder = NULL;
3568 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
3569 function_insertion_hook_holder = NULL;
3572 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3573 longer needed after ipa-cp. */
3575 void
3576 ipa_free_all_structures_after_ipa_cp (void)
3578 if (!optimize)
3580 ipa_free_all_edge_args ();
3581 ipa_free_all_node_params ();
3582 free_alloc_pool (ipcp_sources_pool);
3583 free_alloc_pool (ipcp_values_pool);
3584 free_alloc_pool (ipcp_agg_lattice_pool);
3585 ipa_unregister_cgraph_hooks ();
3586 if (ipa_refdesc_pool)
3587 free_alloc_pool (ipa_refdesc_pool);
3591 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3592 longer needed after indirect inlining. */
3594 void
3595 ipa_free_all_structures_after_iinln (void)
3597 ipa_free_all_edge_args ();
3598 ipa_free_all_node_params ();
3599 ipa_unregister_cgraph_hooks ();
3600 if (ipcp_sources_pool)
3601 free_alloc_pool (ipcp_sources_pool);
3602 if (ipcp_values_pool)
3603 free_alloc_pool (ipcp_values_pool);
3604 if (ipcp_agg_lattice_pool)
3605 free_alloc_pool (ipcp_agg_lattice_pool);
3606 if (ipa_refdesc_pool)
3607 free_alloc_pool (ipa_refdesc_pool);
3610 /* Print ipa_tree_map data structures of all functions in the
3611 callgraph to F. */
3613 void
3614 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3616 int i, count;
3617 struct ipa_node_params *info;
3619 if (!node->definition)
3620 return;
3621 info = IPA_NODE_REF (node);
3622 fprintf (f, " function %s/%i parameter descriptors:\n",
3623 node->name (), node->order);
3624 count = ipa_get_param_count (info);
3625 for (i = 0; i < count; i++)
3627 int c;
3629 fprintf (f, " ");
3630 ipa_dump_param (f, info, i);
3631 if (ipa_is_param_used (info, i))
3632 fprintf (f, " used");
3633 c = ipa_get_controlled_uses (info, i);
3634 if (c == IPA_UNDESCRIBED_USE)
3635 fprintf (f, " undescribed_use");
3636 else
3637 fprintf (f, " controlled_uses=%i", c);
3638 fprintf (f, "\n");
3642 /* Print ipa_tree_map data structures of all functions in the
3643 callgraph to F. */
3645 void
3646 ipa_print_all_params (FILE * f)
3648 struct cgraph_node *node;
3650 fprintf (f, "\nFunction parameters:\n");
3651 FOR_EACH_FUNCTION (node)
3652 ipa_print_node_params (f, node);
3655 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3657 vec<tree>
3658 ipa_get_vector_of_formal_parms (tree fndecl)
3660 vec<tree> args;
3661 int count;
3662 tree parm;
3664 gcc_assert (!flag_wpa);
3665 count = count_formal_params (fndecl);
3666 args.create (count);
3667 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3668 args.quick_push (parm);
3670 return args;
3673 /* Return a heap allocated vector containing types of formal parameters of
3674 function type FNTYPE. */
3676 vec<tree>
3677 ipa_get_vector_of_formal_parm_types (tree fntype)
3679 vec<tree> types;
3680 int count = 0;
3681 tree t;
3683 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3684 count++;
3686 types.create (count);
3687 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3688 types.quick_push (TREE_VALUE (t));
3690 return types;
3693 /* Modify the function declaration FNDECL and its type according to the plan in
3694 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3695 to reflect the actual parameters being modified which are determined by the
3696 base_index field. */
3698 void
3699 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3701 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3702 tree orig_type = TREE_TYPE (fndecl);
3703 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3705 /* The following test is an ugly hack, some functions simply don't have any
3706 arguments in their type. This is probably a bug but well... */
3707 bool care_for_types = (old_arg_types != NULL_TREE);
3708 bool last_parm_void;
3709 vec<tree> otypes;
3710 if (care_for_types)
3712 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3713 == void_type_node);
3714 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3715 if (last_parm_void)
3716 gcc_assert (oparms.length () + 1 == otypes.length ());
3717 else
3718 gcc_assert (oparms.length () == otypes.length ());
3720 else
3722 last_parm_void = false;
3723 otypes.create (0);
3726 int len = adjustments.length ();
3727 tree *link = &DECL_ARGUMENTS (fndecl);
3728 tree new_arg_types = NULL;
3729 for (int i = 0; i < len; i++)
3731 struct ipa_parm_adjustment *adj;
3732 gcc_assert (link);
3734 adj = &adjustments[i];
3735 tree parm;
3736 if (adj->op == IPA_PARM_OP_NEW)
3737 parm = NULL;
3738 else
3739 parm = oparms[adj->base_index];
3740 adj->base = parm;
3742 if (adj->op == IPA_PARM_OP_COPY)
3744 if (care_for_types)
3745 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3746 new_arg_types);
3747 *link = parm;
3748 link = &DECL_CHAIN (parm);
3750 else if (adj->op != IPA_PARM_OP_REMOVE)
3752 tree new_parm;
3753 tree ptype;
3755 if (adj->by_ref)
3756 ptype = build_pointer_type (adj->type);
3757 else
3759 ptype = adj->type;
3760 if (is_gimple_reg_type (ptype))
3762 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3763 if (TYPE_ALIGN (ptype) < malign)
3764 ptype = build_aligned_type (ptype, malign);
3768 if (care_for_types)
3769 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3771 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3772 ptype);
3773 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3774 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3775 DECL_ARTIFICIAL (new_parm) = 1;
3776 DECL_ARG_TYPE (new_parm) = ptype;
3777 DECL_CONTEXT (new_parm) = fndecl;
3778 TREE_USED (new_parm) = 1;
3779 DECL_IGNORED_P (new_parm) = 1;
3780 layout_decl (new_parm, 0);
3782 if (adj->op == IPA_PARM_OP_NEW)
3783 adj->base = NULL;
3784 else
3785 adj->base = parm;
3786 adj->new_decl = new_parm;
3788 *link = new_parm;
3789 link = &DECL_CHAIN (new_parm);
3793 *link = NULL_TREE;
3795 tree new_reversed = NULL;
3796 if (care_for_types)
3798 new_reversed = nreverse (new_arg_types);
3799 if (last_parm_void)
3801 if (new_reversed)
3802 TREE_CHAIN (new_arg_types) = void_list_node;
3803 else
3804 new_reversed = void_list_node;
3808 /* Use copy_node to preserve as much as possible from original type
3809 (debug info, attribute lists etc.)
3810 Exception is METHOD_TYPEs must have THIS argument.
3811 When we are asked to remove it, we need to build new FUNCTION_TYPE
3812 instead. */
3813 tree new_type = NULL;
3814 if (TREE_CODE (orig_type) != METHOD_TYPE
3815 || (adjustments[0].op == IPA_PARM_OP_COPY
3816 && adjustments[0].base_index == 0))
3818 new_type = build_distinct_type_copy (orig_type);
3819 TYPE_ARG_TYPES (new_type) = new_reversed;
3821 else
3823 new_type
3824 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3825 new_reversed));
3826 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3827 DECL_VINDEX (fndecl) = NULL_TREE;
3830 /* When signature changes, we need to clear builtin info. */
3831 if (DECL_BUILT_IN (fndecl))
3833 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3834 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3837 /* This is a new type, not a copy of an old type. Need to reassociate
3838 variants. We can handle everything except the main variant lazily. */
3839 tree t = TYPE_MAIN_VARIANT (orig_type);
3840 if (orig_type != t)
3842 TYPE_MAIN_VARIANT (new_type) = t;
3843 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
3844 TYPE_NEXT_VARIANT (t) = new_type;
3846 else
3848 TYPE_MAIN_VARIANT (new_type) = new_type;
3849 TYPE_NEXT_VARIANT (new_type) = NULL;
3852 TREE_TYPE (fndecl) = new_type;
3853 DECL_VIRTUAL_P (fndecl) = 0;
3854 DECL_LANG_SPECIFIC (fndecl) = NULL;
3855 otypes.release ();
3856 oparms.release ();
3859 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3860 If this is a directly recursive call, CS must be NULL. Otherwise it must
3861 contain the corresponding call graph edge. */
3863 void
3864 ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3865 ipa_parm_adjustment_vec adjustments)
3867 struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
3868 vec<tree> vargs;
3869 vec<tree, va_gc> **debug_args = NULL;
3870 gimple new_stmt;
3871 gimple_stmt_iterator gsi, prev_gsi;
3872 tree callee_decl;
3873 int i, len;
3875 len = adjustments.length ();
3876 vargs.create (len);
3877 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3878 ipa_remove_stmt_references (current_node, stmt);
3880 gsi = gsi_for_stmt (stmt);
3881 prev_gsi = gsi;
3882 gsi_prev (&prev_gsi);
3883 for (i = 0; i < len; i++)
3885 struct ipa_parm_adjustment *adj;
3887 adj = &adjustments[i];
3889 if (adj->op == IPA_PARM_OP_COPY)
3891 tree arg = gimple_call_arg (stmt, adj->base_index);
3893 vargs.quick_push (arg);
3895 else if (adj->op != IPA_PARM_OP_REMOVE)
3897 tree expr, base, off;
3898 location_t loc;
3899 unsigned int deref_align = 0;
3900 bool deref_base = false;
3902 /* We create a new parameter out of the value of the old one, we can
3903 do the following kind of transformations:
3905 - A scalar passed by reference is converted to a scalar passed by
3906 value. (adj->by_ref is false and the type of the original
3907 actual argument is a pointer to a scalar).
3909 - A part of an aggregate is passed instead of the whole aggregate.
3910 The part can be passed either by value or by reference, this is
3911 determined by value of adj->by_ref. Moreover, the code below
3912 handles both situations when the original aggregate is passed by
3913 value (its type is not a pointer) and when it is passed by
3914 reference (it is a pointer to an aggregate).
3916 When the new argument is passed by reference (adj->by_ref is true)
3917 it must be a part of an aggregate and therefore we form it by
3918 simply taking the address of a reference inside the original
3919 aggregate. */
3921 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3922 base = gimple_call_arg (stmt, adj->base_index);
3923 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3924 : EXPR_LOCATION (base);
3926 if (TREE_CODE (base) != ADDR_EXPR
3927 && POINTER_TYPE_P (TREE_TYPE (base)))
3928 off = build_int_cst (adj->alias_ptr_type,
3929 adj->offset / BITS_PER_UNIT);
3930 else
3932 HOST_WIDE_INT base_offset;
3933 tree prev_base;
3934 bool addrof;
3936 if (TREE_CODE (base) == ADDR_EXPR)
3938 base = TREE_OPERAND (base, 0);
3939 addrof = true;
3941 else
3942 addrof = false;
3943 prev_base = base;
3944 base = get_addr_base_and_unit_offset (base, &base_offset);
3945 /* Aggregate arguments can have non-invariant addresses. */
3946 if (!base)
3948 base = build_fold_addr_expr (prev_base);
3949 off = build_int_cst (adj->alias_ptr_type,
3950 adj->offset / BITS_PER_UNIT);
3952 else if (TREE_CODE (base) == MEM_REF)
3954 if (!addrof)
3956 deref_base = true;
3957 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3959 off = build_int_cst (adj->alias_ptr_type,
3960 base_offset
3961 + adj->offset / BITS_PER_UNIT);
3962 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
3963 off);
3964 base = TREE_OPERAND (base, 0);
3966 else
3968 off = build_int_cst (adj->alias_ptr_type,
3969 base_offset
3970 + adj->offset / BITS_PER_UNIT);
3971 base = build_fold_addr_expr (base);
3975 if (!adj->by_ref)
3977 tree type = adj->type;
3978 unsigned int align;
3979 unsigned HOST_WIDE_INT misalign;
3981 if (deref_base)
3983 align = deref_align;
3984 misalign = 0;
3986 else
3988 get_pointer_alignment_1 (base, &align, &misalign);
3989 if (TYPE_ALIGN (type) > align)
3990 align = TYPE_ALIGN (type);
3992 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
3993 * BITS_PER_UNIT);
3994 misalign = misalign & (align - 1);
3995 if (misalign != 0)
3996 align = (misalign & -misalign);
3997 if (align < TYPE_ALIGN (type))
3998 type = build_aligned_type (type, align);
3999 base = force_gimple_operand_gsi (&gsi, base,
4000 true, NULL, true, GSI_SAME_STMT);
4001 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4002 /* If expr is not a valid gimple call argument emit
4003 a load into a temporary. */
4004 if (is_gimple_reg_type (TREE_TYPE (expr)))
4006 gimple tem = gimple_build_assign (NULL_TREE, expr);
4007 if (gimple_in_ssa_p (cfun))
4009 gimple_set_vuse (tem, gimple_vuse (stmt));
4010 expr = make_ssa_name (TREE_TYPE (expr), tem);
4012 else
4013 expr = create_tmp_reg (TREE_TYPE (expr), NULL);
4014 gimple_assign_set_lhs (tem, expr);
4015 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4018 else
4020 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4021 expr = build_fold_addr_expr (expr);
4022 expr = force_gimple_operand_gsi (&gsi, expr,
4023 true, NULL, true, GSI_SAME_STMT);
4025 vargs.quick_push (expr);
4027 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4029 unsigned int ix;
4030 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4031 gimple def_temp;
4033 arg = gimple_call_arg (stmt, adj->base_index);
4034 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4036 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4037 continue;
4038 arg = fold_convert_loc (gimple_location (stmt),
4039 TREE_TYPE (origin), arg);
4041 if (debug_args == NULL)
4042 debug_args = decl_debug_args_insert (callee_decl);
4043 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4044 if (ddecl == origin)
4046 ddecl = (**debug_args)[ix + 1];
4047 break;
4049 if (ddecl == NULL)
4051 ddecl = make_node (DEBUG_EXPR_DECL);
4052 DECL_ARTIFICIAL (ddecl) = 1;
4053 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4054 DECL_MODE (ddecl) = DECL_MODE (origin);
4056 vec_safe_push (*debug_args, origin);
4057 vec_safe_push (*debug_args, ddecl);
4059 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4060 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4064 if (dump_file && (dump_flags & TDF_DETAILS))
4066 fprintf (dump_file, "replacing stmt:");
4067 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4070 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4071 vargs.release ();
4072 if (gimple_call_lhs (stmt))
4073 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4075 gimple_set_block (new_stmt, gimple_block (stmt));
4076 if (gimple_has_location (stmt))
4077 gimple_set_location (new_stmt, gimple_location (stmt));
4078 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4079 gimple_call_copy_flags (new_stmt, stmt);
4080 if (gimple_in_ssa_p (cfun))
4082 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4083 if (gimple_vdef (stmt))
4085 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4086 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4090 if (dump_file && (dump_flags & TDF_DETAILS))
4092 fprintf (dump_file, "with stmt:");
4093 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4094 fprintf (dump_file, "\n");
4096 gsi_replace (&gsi, new_stmt, true);
4097 if (cs)
4098 cgraph_set_call_stmt (cs, new_stmt);
4101 ipa_record_stmt_references (current_node, gsi_stmt (gsi));
4102 gsi_prev (&gsi);
4104 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4107 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4108 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4109 specifies whether the function should care about type incompatibility the
4110 current and new expressions. If it is false, the function will leave
4111 incompatibility issues to the caller. Return true iff the expression
4112 was modified. */
4114 bool
4115 ipa_modify_expr (tree *expr, bool convert,
4116 ipa_parm_adjustment_vec adjustments)
4118 struct ipa_parm_adjustment *cand
4119 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4120 if (!cand)
4121 return false;
4123 tree src;
4124 if (cand->by_ref)
4125 src = build_simple_mem_ref (cand->new_decl);
4126 else
4127 src = cand->new_decl;
4129 if (dump_file && (dump_flags & TDF_DETAILS))
4131 fprintf (dump_file, "About to replace expr ");
4132 print_generic_expr (dump_file, *expr, 0);
4133 fprintf (dump_file, " with ");
4134 print_generic_expr (dump_file, src, 0);
4135 fprintf (dump_file, "\n");
4138 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4140 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4141 *expr = vce;
4143 else
4144 *expr = src;
4145 return true;
4148 /* If T is an SSA_NAME, return NULL if it is not a default def or
4149 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4150 the base variable is always returned, regardless if it is a default
4151 def. Return T if it is not an SSA_NAME. */
4153 static tree
4154 get_ssa_base_param (tree t, bool ignore_default_def)
4156 if (TREE_CODE (t) == SSA_NAME)
4158 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4159 return SSA_NAME_VAR (t);
4160 else
4161 return NULL_TREE;
4163 return t;
4166 /* Given an expression, return an adjustment entry specifying the
4167 transformation to be done on EXPR. If no suitable adjustment entry
4168 was found, returns NULL.
4170 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4171 default def, otherwise bail on them.
4173 If CONVERT is non-NULL, this function will set *CONVERT if the
4174 expression provided is a component reference. ADJUSTMENTS is the
4175 adjustments vector. */
4177 ipa_parm_adjustment *
4178 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4179 ipa_parm_adjustment_vec adjustments,
4180 bool ignore_default_def)
4182 if (TREE_CODE (**expr) == BIT_FIELD_REF
4183 || TREE_CODE (**expr) == IMAGPART_EXPR
4184 || TREE_CODE (**expr) == REALPART_EXPR)
4186 *expr = &TREE_OPERAND (**expr, 0);
4187 if (convert)
4188 *convert = true;
4191 HOST_WIDE_INT offset, size, max_size;
4192 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
4193 if (!base || size == -1 || max_size == -1)
4194 return NULL;
4196 if (TREE_CODE (base) == MEM_REF)
4198 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4199 base = TREE_OPERAND (base, 0);
4202 base = get_ssa_base_param (base, ignore_default_def);
4203 if (!base || TREE_CODE (base) != PARM_DECL)
4204 return NULL;
4206 struct ipa_parm_adjustment *cand = NULL;
4207 unsigned int len = adjustments.length ();
4208 for (unsigned i = 0; i < len; i++)
4210 struct ipa_parm_adjustment *adj = &adjustments[i];
4212 if (adj->base == base
4213 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4215 cand = adj;
4216 break;
4220 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4221 return NULL;
4222 return cand;
4225 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4227 static bool
4228 index_in_adjustments_multiple_times_p (int base_index,
4229 ipa_parm_adjustment_vec adjustments)
4231 int i, len = adjustments.length ();
4232 bool one = false;
4234 for (i = 0; i < len; i++)
4236 struct ipa_parm_adjustment *adj;
4237 adj = &adjustments[i];
4239 if (adj->base_index == base_index)
4241 if (one)
4242 return true;
4243 else
4244 one = true;
4247 return false;
4251 /* Return adjustments that should have the same effect on function parameters
4252 and call arguments as if they were first changed according to adjustments in
4253 INNER and then by adjustments in OUTER. */
4255 ipa_parm_adjustment_vec
4256 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4257 ipa_parm_adjustment_vec outer)
4259 int i, outlen = outer.length ();
4260 int inlen = inner.length ();
4261 int removals = 0;
4262 ipa_parm_adjustment_vec adjustments, tmp;
4264 tmp.create (inlen);
4265 for (i = 0; i < inlen; i++)
4267 struct ipa_parm_adjustment *n;
4268 n = &inner[i];
4270 if (n->op == IPA_PARM_OP_REMOVE)
4271 removals++;
4272 else
4274 /* FIXME: Handling of new arguments are not implemented yet. */
4275 gcc_assert (n->op != IPA_PARM_OP_NEW);
4276 tmp.quick_push (*n);
4280 adjustments.create (outlen + removals);
4281 for (i = 0; i < outlen; i++)
4283 struct ipa_parm_adjustment r;
4284 struct ipa_parm_adjustment *out = &outer[i];
4285 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4287 memset (&r, 0, sizeof (r));
4288 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4289 if (out->op == IPA_PARM_OP_REMOVE)
4291 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4293 r.op = IPA_PARM_OP_REMOVE;
4294 adjustments.quick_push (r);
4296 continue;
4298 else
4300 /* FIXME: Handling of new arguments are not implemented yet. */
4301 gcc_assert (out->op != IPA_PARM_OP_NEW);
4304 r.base_index = in->base_index;
4305 r.type = out->type;
4307 /* FIXME: Create nonlocal value too. */
4309 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4310 r.op = IPA_PARM_OP_COPY;
4311 else if (in->op == IPA_PARM_OP_COPY)
4312 r.offset = out->offset;
4313 else if (out->op == IPA_PARM_OP_COPY)
4314 r.offset = in->offset;
4315 else
4316 r.offset = in->offset + out->offset;
4317 adjustments.quick_push (r);
4320 for (i = 0; i < inlen; i++)
4322 struct ipa_parm_adjustment *n = &inner[i];
4324 if (n->op == IPA_PARM_OP_REMOVE)
4325 adjustments.quick_push (*n);
4328 tmp.release ();
4329 return adjustments;
4332 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4333 friendly way, assuming they are meant to be applied to FNDECL. */
4335 void
4336 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4337 tree fndecl)
4339 int i, len = adjustments.length ();
4340 bool first = true;
4341 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4343 fprintf (file, "IPA param adjustments: ");
4344 for (i = 0; i < len; i++)
4346 struct ipa_parm_adjustment *adj;
4347 adj = &adjustments[i];
4349 if (!first)
4350 fprintf (file, " ");
4351 else
4352 first = false;
4354 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4355 print_generic_expr (file, parms[adj->base_index], 0);
4356 if (adj->base)
4358 fprintf (file, ", base: ");
4359 print_generic_expr (file, adj->base, 0);
4361 if (adj->new_decl)
4363 fprintf (file, ", new_decl: ");
4364 print_generic_expr (file, adj->new_decl, 0);
4366 if (adj->new_ssa_base)
4368 fprintf (file, ", new_ssa_base: ");
4369 print_generic_expr (file, adj->new_ssa_base, 0);
4372 if (adj->op == IPA_PARM_OP_COPY)
4373 fprintf (file, ", copy_param");
4374 else if (adj->op == IPA_PARM_OP_REMOVE)
4375 fprintf (file, ", remove_param");
4376 else
4377 fprintf (file, ", offset %li", (long) adj->offset);
4378 if (adj->by_ref)
4379 fprintf (file, ", by_ref");
4380 print_node_brief (file, ", type: ", adj->type, 0);
4381 fprintf (file, "\n");
4383 parms.release ();
4386 /* Dump the AV linked list. */
4388 void
4389 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4391 bool comma = false;
4392 fprintf (f, " Aggregate replacements:");
4393 for (; av; av = av->next)
4395 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4396 av->index, av->offset);
4397 print_generic_expr (f, av->value, 0);
4398 comma = true;
4400 fprintf (f, "\n");
4403 /* Stream out jump function JUMP_FUNC to OB. */
4405 static void
4406 ipa_write_jump_function (struct output_block *ob,
4407 struct ipa_jump_func *jump_func)
4409 struct ipa_agg_jf_item *item;
4410 struct bitpack_d bp;
4411 int i, count;
4413 streamer_write_uhwi (ob, jump_func->type);
4414 switch (jump_func->type)
4416 case IPA_JF_UNKNOWN:
4417 break;
4418 case IPA_JF_KNOWN_TYPE:
4419 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
4420 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
4421 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
4422 break;
4423 case IPA_JF_CONST:
4424 gcc_assert (
4425 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4426 stream_write_tree (ob, jump_func->value.constant.value, true);
4427 break;
4428 case IPA_JF_PASS_THROUGH:
4429 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4430 if (jump_func->value.pass_through.operation == NOP_EXPR)
4432 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4433 bp = bitpack_create (ob->main_stream);
4434 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4435 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
4436 streamer_write_bitpack (&bp);
4438 else
4440 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4441 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4443 break;
4444 case IPA_JF_ANCESTOR:
4445 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4446 stream_write_tree (ob, jump_func->value.ancestor.type, true);
4447 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4448 bp = bitpack_create (ob->main_stream);
4449 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4450 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
4451 streamer_write_bitpack (&bp);
4452 break;
4455 count = vec_safe_length (jump_func->agg.items);
4456 streamer_write_uhwi (ob, count);
4457 if (count)
4459 bp = bitpack_create (ob->main_stream);
4460 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4461 streamer_write_bitpack (&bp);
4464 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4466 streamer_write_uhwi (ob, item->offset);
4467 stream_write_tree (ob, item->value, true);
4471 /* Read in jump function JUMP_FUNC from IB. */
4473 static void
4474 ipa_read_jump_function (struct lto_input_block *ib,
4475 struct ipa_jump_func *jump_func,
4476 struct cgraph_edge *cs,
4477 struct data_in *data_in)
4479 enum jump_func_type jftype;
4480 enum tree_code operation;
4481 int i, count;
4483 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4484 switch (jftype)
4486 case IPA_JF_UNKNOWN:
4487 jump_func->type = IPA_JF_UNKNOWN;
4488 break;
4489 case IPA_JF_KNOWN_TYPE:
4491 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4492 tree base_type = stream_read_tree (ib, data_in);
4493 tree component_type = stream_read_tree (ib, data_in);
4495 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
4496 break;
4498 case IPA_JF_CONST:
4499 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4500 break;
4501 case IPA_JF_PASS_THROUGH:
4502 operation = (enum tree_code) streamer_read_uhwi (ib);
4503 if (operation == NOP_EXPR)
4505 int formal_id = streamer_read_uhwi (ib);
4506 struct bitpack_d bp = streamer_read_bitpack (ib);
4507 bool agg_preserved = bp_unpack_value (&bp, 1);
4508 bool type_preserved = bp_unpack_value (&bp, 1);
4509 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4510 type_preserved);
4512 else
4514 tree operand = stream_read_tree (ib, data_in);
4515 int formal_id = streamer_read_uhwi (ib);
4516 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4517 operation);
4519 break;
4520 case IPA_JF_ANCESTOR:
4522 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4523 tree type = stream_read_tree (ib, data_in);
4524 int formal_id = streamer_read_uhwi (ib);
4525 struct bitpack_d bp = streamer_read_bitpack (ib);
4526 bool agg_preserved = bp_unpack_value (&bp, 1);
4527 bool type_preserved = bp_unpack_value (&bp, 1);
4529 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4530 type_preserved);
4531 break;
4535 count = streamer_read_uhwi (ib);
4536 vec_alloc (jump_func->agg.items, count);
4537 if (count)
4539 struct bitpack_d bp = streamer_read_bitpack (ib);
4540 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4542 for (i = 0; i < count; i++)
4544 struct ipa_agg_jf_item item;
4545 item.offset = streamer_read_uhwi (ib);
4546 item.value = stream_read_tree (ib, data_in);
4547 jump_func->agg.items->quick_push (item);
4551 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4552 relevant to indirect inlining to OB. */
4554 static void
4555 ipa_write_indirect_edge_info (struct output_block *ob,
4556 struct cgraph_edge *cs)
4558 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4559 struct bitpack_d bp;
4561 streamer_write_hwi (ob, ii->param_index);
4562 streamer_write_hwi (ob, ii->offset);
4563 bp = bitpack_create (ob->main_stream);
4564 bp_pack_value (&bp, ii->polymorphic, 1);
4565 bp_pack_value (&bp, ii->agg_contents, 1);
4566 bp_pack_value (&bp, ii->member_ptr, 1);
4567 bp_pack_value (&bp, ii->by_ref, 1);
4568 bp_pack_value (&bp, ii->maybe_in_construction, 1);
4569 bp_pack_value (&bp, ii->maybe_derived_type, 1);
4570 streamer_write_bitpack (&bp);
4572 if (ii->polymorphic)
4574 streamer_write_hwi (ob, ii->otr_token);
4575 stream_write_tree (ob, ii->otr_type, true);
4576 stream_write_tree (ob, ii->outer_type, true);
4580 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4581 relevant to indirect inlining from IB. */
4583 static void
4584 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4585 struct data_in *data_in ATTRIBUTE_UNUSED,
4586 struct cgraph_edge *cs)
4588 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4589 struct bitpack_d bp;
4591 ii->param_index = (int) streamer_read_hwi (ib);
4592 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4593 bp = streamer_read_bitpack (ib);
4594 ii->polymorphic = bp_unpack_value (&bp, 1);
4595 ii->agg_contents = bp_unpack_value (&bp, 1);
4596 ii->member_ptr = bp_unpack_value (&bp, 1);
4597 ii->by_ref = bp_unpack_value (&bp, 1);
4598 ii->maybe_in_construction = bp_unpack_value (&bp, 1);
4599 ii->maybe_derived_type = bp_unpack_value (&bp, 1);
4600 if (ii->polymorphic)
4602 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4603 ii->otr_type = stream_read_tree (ib, data_in);
4604 ii->outer_type = stream_read_tree (ib, data_in);
4608 /* Stream out NODE info to OB. */
4610 static void
4611 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4613 int node_ref;
4614 lto_symtab_encoder_t encoder;
4615 struct ipa_node_params *info = IPA_NODE_REF (node);
4616 int j;
4617 struct cgraph_edge *e;
4618 struct bitpack_d bp;
4620 encoder = ob->decl_state->symtab_node_encoder;
4621 node_ref = lto_symtab_encoder_encode (encoder, node);
4622 streamer_write_uhwi (ob, node_ref);
4624 streamer_write_uhwi (ob, ipa_get_param_count (info));
4625 for (j = 0; j < ipa_get_param_count (info); j++)
4626 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4627 bp = bitpack_create (ob->main_stream);
4628 gcc_assert (info->analysis_done
4629 || ipa_get_param_count (info) == 0);
4630 gcc_assert (!info->node_enqueued);
4631 gcc_assert (!info->ipcp_orig_node);
4632 for (j = 0; j < ipa_get_param_count (info); j++)
4633 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4634 streamer_write_bitpack (&bp);
4635 for (j = 0; j < ipa_get_param_count (info); j++)
4636 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4637 for (e = node->callees; e; e = e->next_callee)
4639 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4641 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4642 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4643 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4645 for (e = node->indirect_calls; e; e = e->next_callee)
4647 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4649 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
4650 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4651 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4652 ipa_write_indirect_edge_info (ob, e);
4656 /* Stream in NODE info from IB. */
4658 static void
4659 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4660 struct data_in *data_in)
4662 struct ipa_node_params *info = IPA_NODE_REF (node);
4663 int k;
4664 struct cgraph_edge *e;
4665 struct bitpack_d bp;
4667 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4669 for (k = 0; k < ipa_get_param_count (info); k++)
4670 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4672 bp = streamer_read_bitpack (ib);
4673 if (ipa_get_param_count (info) != 0)
4674 info->analysis_done = true;
4675 info->node_enqueued = false;
4676 for (k = 0; k < ipa_get_param_count (info); k++)
4677 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4678 for (k = 0; k < ipa_get_param_count (info); k++)
4679 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4680 for (e = node->callees; e; e = e->next_callee)
4682 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4683 int count = streamer_read_uhwi (ib);
4685 if (!count)
4686 continue;
4687 vec_safe_grow_cleared (args->jump_functions, count);
4689 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4690 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4691 data_in);
4693 for (e = node->indirect_calls; e; e = e->next_callee)
4695 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4696 int count = streamer_read_uhwi (ib);
4698 if (count)
4700 vec_safe_grow_cleared (args->jump_functions, count);
4701 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4702 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4703 data_in);
4705 ipa_read_indirect_edge_info (ib, data_in, e);
4709 /* Write jump functions for nodes in SET. */
4711 void
4712 ipa_prop_write_jump_functions (void)
4714 struct cgraph_node *node;
4715 struct output_block *ob;
4716 unsigned int count = 0;
4717 lto_symtab_encoder_iterator lsei;
4718 lto_symtab_encoder_t encoder;
4721 if (!ipa_node_params_vector.exists ())
4722 return;
4724 ob = create_output_block (LTO_section_jump_functions);
4725 encoder = ob->decl_state->symtab_node_encoder;
4726 ob->cgraph_node = NULL;
4727 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4728 lsei_next_function_in_partition (&lsei))
4730 node = lsei_cgraph_node (lsei);
4731 if (cgraph_function_with_gimple_body_p (node)
4732 && IPA_NODE_REF (node) != NULL)
4733 count++;
4736 streamer_write_uhwi (ob, count);
4738 /* Process all of the functions. */
4739 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4740 lsei_next_function_in_partition (&lsei))
4742 node = lsei_cgraph_node (lsei);
4743 if (cgraph_function_with_gimple_body_p (node)
4744 && IPA_NODE_REF (node) != NULL)
4745 ipa_write_node_info (ob, node);
4747 streamer_write_char_stream (ob->main_stream, 0);
4748 produce_asm (ob, NULL);
4749 destroy_output_block (ob);
4752 /* Read section in file FILE_DATA of length LEN with data DATA. */
4754 static void
4755 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4756 size_t len)
4758 const struct lto_function_header *header =
4759 (const struct lto_function_header *) data;
4760 const int cfg_offset = sizeof (struct lto_function_header);
4761 const int main_offset = cfg_offset + header->cfg_size;
4762 const int string_offset = main_offset + header->main_size;
4763 struct data_in *data_in;
4764 struct lto_input_block ib_main;
4765 unsigned int i;
4766 unsigned int count;
4768 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4769 header->main_size);
4771 data_in =
4772 lto_data_in_create (file_data, (const char *) data + string_offset,
4773 header->string_size, vNULL);
4774 count = streamer_read_uhwi (&ib_main);
4776 for (i = 0; i < count; i++)
4778 unsigned int index;
4779 struct cgraph_node *node;
4780 lto_symtab_encoder_t encoder;
4782 index = streamer_read_uhwi (&ib_main);
4783 encoder = file_data->symtab_node_encoder;
4784 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4785 gcc_assert (node->definition);
4786 ipa_read_node_info (&ib_main, node, data_in);
4788 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4789 len);
4790 lto_data_in_delete (data_in);
4793 /* Read ipcp jump functions. */
4795 void
4796 ipa_prop_read_jump_functions (void)
4798 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4799 struct lto_file_decl_data *file_data;
4800 unsigned int j = 0;
4802 ipa_check_create_node_params ();
4803 ipa_check_create_edge_args ();
4804 ipa_register_cgraph_hooks ();
4806 while ((file_data = file_data_vec[j++]))
4808 size_t len;
4809 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4811 if (data)
4812 ipa_prop_read_section (file_data, data, len);
4816 /* After merging units, we can get mismatch in argument counts.
4817 Also decl merging might've rendered parameter lists obsolete.
4818 Also compute called_with_variable_arg info. */
4820 void
4821 ipa_update_after_lto_read (void)
4823 ipa_check_create_node_params ();
4824 ipa_check_create_edge_args ();
4827 void
4828 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4830 int node_ref;
4831 unsigned int count = 0;
4832 lto_symtab_encoder_t encoder;
4833 struct ipa_agg_replacement_value *aggvals, *av;
4835 aggvals = ipa_get_agg_replacements_for_node (node);
4836 encoder = ob->decl_state->symtab_node_encoder;
4837 node_ref = lto_symtab_encoder_encode (encoder, node);
4838 streamer_write_uhwi (ob, node_ref);
4840 for (av = aggvals; av; av = av->next)
4841 count++;
4842 streamer_write_uhwi (ob, count);
4844 for (av = aggvals; av; av = av->next)
4846 struct bitpack_d bp;
4848 streamer_write_uhwi (ob, av->offset);
4849 streamer_write_uhwi (ob, av->index);
4850 stream_write_tree (ob, av->value, true);
4852 bp = bitpack_create (ob->main_stream);
4853 bp_pack_value (&bp, av->by_ref, 1);
4854 streamer_write_bitpack (&bp);
4858 /* Stream in the aggregate value replacement chain for NODE from IB. */
4860 static void
4861 read_agg_replacement_chain (struct lto_input_block *ib,
4862 struct cgraph_node *node,
4863 struct data_in *data_in)
4865 struct ipa_agg_replacement_value *aggvals = NULL;
4866 unsigned int count, i;
4868 count = streamer_read_uhwi (ib);
4869 for (i = 0; i <count; i++)
4871 struct ipa_agg_replacement_value *av;
4872 struct bitpack_d bp;
4874 av = ggc_alloc<ipa_agg_replacement_value> ();
4875 av->offset = streamer_read_uhwi (ib);
4876 av->index = streamer_read_uhwi (ib);
4877 av->value = stream_read_tree (ib, data_in);
4878 bp = streamer_read_bitpack (ib);
4879 av->by_ref = bp_unpack_value (&bp, 1);
4880 av->next = aggvals;
4881 aggvals = av;
4883 ipa_set_node_agg_value_chain (node, aggvals);
4886 /* Write all aggregate replacement for nodes in set. */
4888 void
4889 ipa_prop_write_all_agg_replacement (void)
4891 struct cgraph_node *node;
4892 struct output_block *ob;
4893 unsigned int count = 0;
4894 lto_symtab_encoder_iterator lsei;
4895 lto_symtab_encoder_t encoder;
4897 if (!ipa_node_agg_replacements)
4898 return;
4900 ob = create_output_block (LTO_section_ipcp_transform);
4901 encoder = ob->decl_state->symtab_node_encoder;
4902 ob->cgraph_node = NULL;
4903 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4904 lsei_next_function_in_partition (&lsei))
4906 node = lsei_cgraph_node (lsei);
4907 if (cgraph_function_with_gimple_body_p (node)
4908 && ipa_get_agg_replacements_for_node (node) != NULL)
4909 count++;
4912 streamer_write_uhwi (ob, count);
4914 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4915 lsei_next_function_in_partition (&lsei))
4917 node = lsei_cgraph_node (lsei);
4918 if (cgraph_function_with_gimple_body_p (node)
4919 && ipa_get_agg_replacements_for_node (node) != NULL)
4920 write_agg_replacement_chain (ob, node);
4922 streamer_write_char_stream (ob->main_stream, 0);
4923 produce_asm (ob, NULL);
4924 destroy_output_block (ob);
4927 /* Read replacements section in file FILE_DATA of length LEN with data
4928 DATA. */
4930 static void
4931 read_replacements_section (struct lto_file_decl_data *file_data,
4932 const char *data,
4933 size_t len)
4935 const struct lto_function_header *header =
4936 (const struct lto_function_header *) data;
4937 const int cfg_offset = sizeof (struct lto_function_header);
4938 const int main_offset = cfg_offset + header->cfg_size;
4939 const int string_offset = main_offset + header->main_size;
4940 struct data_in *data_in;
4941 struct lto_input_block ib_main;
4942 unsigned int i;
4943 unsigned int count;
4945 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4946 header->main_size);
4948 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4949 header->string_size, vNULL);
4950 count = streamer_read_uhwi (&ib_main);
4952 for (i = 0; i < count; i++)
4954 unsigned int index;
4955 struct cgraph_node *node;
4956 lto_symtab_encoder_t encoder;
4958 index = streamer_read_uhwi (&ib_main);
4959 encoder = file_data->symtab_node_encoder;
4960 node = cgraph (lto_symtab_encoder_deref (encoder, index));
4961 gcc_assert (node->definition);
4962 read_agg_replacement_chain (&ib_main, node, data_in);
4964 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4965 len);
4966 lto_data_in_delete (data_in);
4969 /* Read IPA-CP aggregate replacements. */
4971 void
4972 ipa_prop_read_all_agg_replacement (void)
4974 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4975 struct lto_file_decl_data *file_data;
4976 unsigned int j = 0;
4978 while ((file_data = file_data_vec[j++]))
4980 size_t len;
4981 const char *data = lto_get_section_data (file_data,
4982 LTO_section_ipcp_transform,
4983 NULL, &len);
4984 if (data)
4985 read_replacements_section (file_data, data, len);
4989 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4990 NODE. */
4992 static void
4993 adjust_agg_replacement_values (struct cgraph_node *node,
4994 struct ipa_agg_replacement_value *aggval)
4996 struct ipa_agg_replacement_value *v;
4997 int i, c = 0, d = 0, *adj;
4999 if (!node->clone.combined_args_to_skip)
5000 return;
5002 for (v = aggval; v; v = v->next)
5004 gcc_assert (v->index >= 0);
5005 if (c < v->index)
5006 c = v->index;
5008 c++;
5010 adj = XALLOCAVEC (int, c);
5011 for (i = 0; i < c; i++)
5012 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5014 adj[i] = -1;
5015 d++;
5017 else
5018 adj[i] = i - d;
5020 for (v = aggval; v; v = v->next)
5021 v->index = adj[v->index];
5024 /* Dominator walker driving the ipcp modification phase. */
5026 class ipcp_modif_dom_walker : public dom_walker
5028 public:
5029 ipcp_modif_dom_walker (struct func_body_info *fbi,
5030 vec<ipa_param_descriptor> descs,
5031 struct ipa_agg_replacement_value *av,
5032 bool *sc, bool *cc)
5033 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5034 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5036 virtual void before_dom_children (basic_block);
5038 private:
5039 struct func_body_info *m_fbi;
5040 vec<ipa_param_descriptor> m_descriptors;
5041 struct ipa_agg_replacement_value *m_aggval;
5042 bool *m_something_changed, *m_cfg_changed;
5045 void
5046 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5048 gimple_stmt_iterator gsi;
5049 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5051 struct ipa_agg_replacement_value *v;
5052 gimple stmt = gsi_stmt (gsi);
5053 tree rhs, val, t;
5054 HOST_WIDE_INT offset, size;
5055 int index;
5056 bool by_ref, vce;
5058 if (!gimple_assign_load_p (stmt))
5059 continue;
5060 rhs = gimple_assign_rhs1 (stmt);
5061 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5062 continue;
5064 vce = false;
5065 t = rhs;
5066 while (handled_component_p (t))
5068 /* V_C_E can do things like convert an array of integers to one
5069 bigger integer and similar things we do not handle below. */
5070 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5072 vce = true;
5073 break;
5075 t = TREE_OPERAND (t, 0);
5077 if (vce)
5078 continue;
5080 if (!ipa_load_from_parm_agg_1 (m_fbi, m_descriptors, stmt, rhs, &index,
5081 &offset, &size, &by_ref))
5082 continue;
5083 for (v = m_aggval; v; v = v->next)
5084 if (v->index == index
5085 && v->offset == offset)
5086 break;
5087 if (!v
5088 || v->by_ref != by_ref
5089 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5090 continue;
5092 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5093 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5095 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5096 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5097 else if (TYPE_SIZE (TREE_TYPE (rhs))
5098 == TYPE_SIZE (TREE_TYPE (v->value)))
5099 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5100 else
5102 if (dump_file)
5104 fprintf (dump_file, " const ");
5105 print_generic_expr (dump_file, v->value, 0);
5106 fprintf (dump_file, " can't be converted to type of ");
5107 print_generic_expr (dump_file, rhs, 0);
5108 fprintf (dump_file, "\n");
5110 continue;
5113 else
5114 val = v->value;
5116 if (dump_file && (dump_flags & TDF_DETAILS))
5118 fprintf (dump_file, "Modifying stmt:\n ");
5119 print_gimple_stmt (dump_file, stmt, 0, 0);
5121 gimple_assign_set_rhs_from_tree (&gsi, val);
5122 update_stmt (stmt);
5124 if (dump_file && (dump_flags & TDF_DETAILS))
5126 fprintf (dump_file, "into:\n ");
5127 print_gimple_stmt (dump_file, stmt, 0, 0);
5128 fprintf (dump_file, "\n");
5131 *m_something_changed = true;
5132 if (maybe_clean_eh_stmt (stmt)
5133 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5134 *m_cfg_changed = true;
5139 /* IPCP transformation phase doing propagation of aggregate values. */
5141 unsigned int
5142 ipcp_transform_function (struct cgraph_node *node)
5144 vec<ipa_param_descriptor> descriptors = vNULL;
5145 struct func_body_info fbi;
5146 struct ipa_agg_replacement_value *aggval;
5147 int param_count;
5148 bool cfg_changed = false, something_changed = false;
5150 gcc_checking_assert (cfun);
5151 gcc_checking_assert (current_function_decl);
5153 if (dump_file)
5154 fprintf (dump_file, "Modification phase of node %s/%i\n",
5155 node->name (), node->order);
5157 aggval = ipa_get_agg_replacements_for_node (node);
5158 if (!aggval)
5159 return 0;
5160 param_count = count_formal_params (node->decl);
5161 if (param_count == 0)
5162 return 0;
5163 adjust_agg_replacement_values (node, aggval);
5164 if (dump_file)
5165 ipa_dump_agg_replacement_values (dump_file, aggval);
5167 fbi.node = node;
5168 fbi.info = NULL;
5169 fbi.bb_infos = vNULL;
5170 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5171 fbi.param_count = param_count;
5172 fbi.aa_walked = 0;
5174 descriptors.safe_grow_cleared (param_count);
5175 ipa_populate_param_decls (node, descriptors);
5176 calculate_dominance_info (CDI_DOMINATORS);
5177 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5178 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5180 int i;
5181 struct ipa_bb_info *bi;
5182 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5183 free_ipa_bb_info (bi);
5184 fbi.bb_infos.release ();
5185 free_dominance_info (CDI_DOMINATORS);
5186 (*ipa_node_agg_replacements)[node->uid] = NULL;
5187 descriptors.release ();
5189 if (!something_changed)
5190 return 0;
5191 else if (cfg_changed)
5192 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5193 else
5194 return TODO_update_ssa_only_virtuals;