Fixups after merge
[official-gcc.git] / gcc / ipa-prop.c
blob6a70e1618288a0c344dab58720d223d3e03eb5fe
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "predict.h"
25 #include "vec.h"
26 #include "hashtab.h"
27 #include "hash-set.h"
28 #include "machmode.h"
29 #include "tm.h"
30 #include "hard-reg-set.h"
31 #include "input.h"
32 #include "function.h"
33 #include "dominance.h"
34 #include "cfg.h"
35 #include "basic-block.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-fold.h"
39 #include "tree-eh.h"
40 #include "gimple-expr.h"
41 #include "is-a.h"
42 #include "gimple.h"
43 #include "expr.h"
44 #include "stor-layout.h"
45 #include "print-tree.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "langhooks.h"
51 #include "target.h"
52 #include "hash-map.h"
53 #include "plugin-api.h"
54 #include "ipa-ref.h"
55 #include "cgraph.h"
56 #include "alloc-pool.h"
57 #include "ipa-prop.h"
58 #include "bitmap.h"
59 #include "gimple-ssa.h"
60 #include "tree-cfg.h"
61 #include "tree-phinodes.h"
62 #include "ssa-iterators.h"
63 #include "tree-into-ssa.h"
64 #include "tree-dfa.h"
65 #include "tree-pass.h"
66 #include "tree-inline.h"
67 #include "ipa-inline.h"
68 #include "flags.h"
69 #include "diagnostic.h"
70 #include "gimple-pretty-print.h"
71 #include "lto-streamer.h"
72 #include "data-streamer.h"
73 #include "tree-streamer.h"
74 #include "params.h"
75 #include "ipa-utils.h"
76 #include "stringpool.h"
77 #include "tree-ssanames.h"
78 #include "dbgcnt.h"
79 #include "domwalk.h"
80 #include "builtins.h"
81 #include "calls.h"
83 /* Intermediate information that we get from alias analysis about a particular
84 parameter in a particular basic_block. When a parameter or the memory it
85 references is marked modified, we use that information in all dominatd
86 blocks without cosulting alias analysis oracle. */
88 struct param_aa_status
90 /* Set when this structure contains meaningful information. If not, the
91 structure describing a dominating BB should be used instead. */
92 bool valid;
94 /* Whether we have seen something which might have modified the data in
95 question. PARM is for the parameter itself, REF is for data it points to
96 but using the alias type of individual accesses and PT is the same thing
97 but for computing aggregate pass-through functions using a very inclusive
98 ao_ref. */
99 bool parm_modified, ref_modified, pt_modified;
102 /* Information related to a given BB that used only when looking at function
103 body. */
105 struct ipa_bb_info
107 /* Call graph edges going out of this BB. */
108 vec<cgraph_edge *> cg_edges;
109 /* Alias analysis statuses of each formal parameter at this bb. */
110 vec<param_aa_status> param_aa_statuses;
113 /* Structure with global information that is only used when looking at function
114 body. */
116 struct func_body_info
118 /* The node that is being analyzed. */
119 cgraph_node *node;
121 /* Its info. */
122 struct ipa_node_params *info;
124 /* Information about individual BBs. */
125 vec<ipa_bb_info> bb_infos;
127 /* Number of parameters. */
128 int param_count;
130 /* Number of statements already walked by when analyzing this function. */
131 unsigned int aa_walked;
134 /* Vector where the parameter infos are actually stored. */
135 vec<ipa_node_params> ipa_node_params_vector;
136 /* Vector of known aggregate values in cloned nodes. */
137 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
138 /* Vector where the parameter infos are actually stored. */
139 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
141 /* Holders of ipa cgraph hooks: */
142 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
143 static struct cgraph_node_hook_list *node_removal_hook_holder;
144 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
145 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
146 static struct cgraph_node_hook_list *function_insertion_hook_holder;
148 /* Description of a reference to an IPA constant. */
149 struct ipa_cst_ref_desc
151 /* Edge that corresponds to the statement which took the reference. */
152 struct cgraph_edge *cs;
153 /* Linked list of duplicates created when call graph edges are cloned. */
154 struct ipa_cst_ref_desc *next_duplicate;
155 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
156 if out of control. */
157 int refcount;
160 /* Allocation pool for reference descriptions. */
162 static alloc_pool ipa_refdesc_pool;
164 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
165 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
167 static bool
168 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
170 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
171 struct cl_optimization *os;
173 if (!fs_opts)
174 return false;
175 os = TREE_OPTIMIZATION (fs_opts);
176 return !os->x_optimize || !os->x_flag_ipa_cp;
179 /* Return index of the formal whose tree is PTREE in function which corresponds
180 to INFO. */
182 static int
183 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
185 int i, count;
187 count = descriptors.length ();
188 for (i = 0; i < count; i++)
189 if (descriptors[i].decl == ptree)
190 return i;
192 return -1;
195 /* Return index of the formal whose tree is PTREE in function which corresponds
196 to INFO. */
199 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
201 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
204 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
205 NODE. */
207 static void
208 ipa_populate_param_decls (struct cgraph_node *node,
209 vec<ipa_param_descriptor> &descriptors)
211 tree fndecl;
212 tree fnargs;
213 tree parm;
214 int param_num;
216 fndecl = node->decl;
217 gcc_assert (gimple_has_body_p (fndecl));
218 fnargs = DECL_ARGUMENTS (fndecl);
219 param_num = 0;
220 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
222 descriptors[param_num].decl = parm;
223 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
224 true);
225 param_num++;
229 /* Return how many formal parameters FNDECL has. */
232 count_formal_params (tree fndecl)
234 tree parm;
235 int count = 0;
236 gcc_assert (gimple_has_body_p (fndecl));
238 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
239 count++;
241 return count;
244 /* Return the declaration of Ith formal parameter of the function corresponding
245 to INFO. Note there is no setter function as this array is built just once
246 using ipa_initialize_node_params. */
248 void
249 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
251 fprintf (file, "param #%i", i);
252 if (info->descriptors[i].decl)
254 fprintf (file, " ");
255 print_generic_expr (file, info->descriptors[i].decl, 0);
259 /* Initialize the ipa_node_params structure associated with NODE
260 to hold PARAM_COUNT parameters. */
262 void
263 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
265 struct ipa_node_params *info = IPA_NODE_REF (node);
267 if (!info->descriptors.exists () && param_count)
268 info->descriptors.safe_grow_cleared (param_count);
271 /* Initialize the ipa_node_params structure associated with NODE by counting
272 the function parameters, creating the descriptors and populating their
273 param_decls. */
275 void
276 ipa_initialize_node_params (struct cgraph_node *node)
278 struct ipa_node_params *info = IPA_NODE_REF (node);
280 if (!info->descriptors.exists ())
282 ipa_alloc_node_params (node, count_formal_params (node->decl));
283 ipa_populate_param_decls (node, info->descriptors);
287 /* Print the jump functions associated with call graph edge CS to file F. */
289 static void
290 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
292 int i, count;
294 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
295 for (i = 0; i < count; i++)
297 struct ipa_jump_func *jump_func;
298 enum jump_func_type type;
300 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
301 type = jump_func->type;
303 fprintf (f, " param %d: ", i);
304 if (type == IPA_JF_UNKNOWN)
305 fprintf (f, "UNKNOWN\n");
306 else if (type == IPA_JF_CONST)
308 tree val = jump_func->value.constant.value;
309 fprintf (f, "CONST: ");
310 print_generic_expr (f, val, 0);
311 if (TREE_CODE (val) == ADDR_EXPR
312 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
314 fprintf (f, " -> ");
315 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
318 fprintf (f, "\n");
320 else if (type == IPA_JF_PASS_THROUGH)
322 fprintf (f, "PASS THROUGH: ");
323 fprintf (f, "%d, op %s",
324 jump_func->value.pass_through.formal_id,
325 get_tree_code_name(jump_func->value.pass_through.operation));
326 if (jump_func->value.pass_through.operation != NOP_EXPR)
328 fprintf (f, " ");
329 print_generic_expr (f,
330 jump_func->value.pass_through.operand, 0);
332 if (jump_func->value.pass_through.agg_preserved)
333 fprintf (f, ", agg_preserved");
334 fprintf (f, "\n");
336 else if (type == IPA_JF_ANCESTOR)
338 fprintf (f, "ANCESTOR: ");
339 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC,
340 jump_func->value.ancestor.formal_id,
341 jump_func->value.ancestor.offset);
342 if (jump_func->value.ancestor.agg_preserved)
343 fprintf (f, ", agg_preserved");
344 fprintf (f, "\n");
347 if (jump_func->agg.items)
349 struct ipa_agg_jf_item *item;
350 int j;
352 fprintf (f, " Aggregate passed by %s:\n",
353 jump_func->agg.by_ref ? "reference" : "value");
354 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
356 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
357 item->offset);
358 if (TYPE_P (item->value))
359 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
360 tree_to_uhwi (TYPE_SIZE (item->value)));
361 else
363 fprintf (f, "cst: ");
364 print_generic_expr (f, item->value, 0);
366 fprintf (f, "\n");
370 struct ipa_polymorphic_call_context *ctx
371 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
372 if (ctx && !ctx->useless_p ())
374 fprintf (f, " Context: ");
375 ctx->dump (dump_file);
381 /* Print the jump functions of all arguments on all call graph edges going from
382 NODE to file F. */
384 void
385 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
387 struct cgraph_edge *cs;
389 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
390 node->order);
391 for (cs = node->callees; cs; cs = cs->next_callee)
393 if (!ipa_edge_args_info_available_for_edge_p (cs))
394 continue;
396 fprintf (f, " callsite %s/%i -> %s/%i : \n",
397 xstrdup (node->name ()), node->order,
398 xstrdup (cs->callee->name ()),
399 cs->callee->order);
400 ipa_print_node_jump_functions_for_edge (f, cs);
403 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
405 struct cgraph_indirect_call_info *ii;
406 if (!ipa_edge_args_info_available_for_edge_p (cs))
407 continue;
409 ii = cs->indirect_info;
410 if (ii->agg_contents)
411 fprintf (f, " indirect %s callsite, calling param %i, "
412 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
413 ii->member_ptr ? "member ptr" : "aggregate",
414 ii->param_index, ii->offset,
415 ii->by_ref ? "by reference" : "by_value");
416 else
417 fprintf (f, " indirect %s callsite, calling param %i, "
418 "offset " HOST_WIDE_INT_PRINT_DEC,
419 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
420 ii->offset);
422 if (cs->call_stmt)
424 fprintf (f, ", for stmt ");
425 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
427 else
428 fprintf (f, "\n");
429 if (ii->polymorphic)
430 ii->context.dump (f);
431 ipa_print_node_jump_functions_for_edge (f, cs);
435 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
437 void
438 ipa_print_all_jump_functions (FILE *f)
440 struct cgraph_node *node;
442 fprintf (f, "\nJump functions:\n");
443 FOR_EACH_FUNCTION (node)
445 ipa_print_node_jump_functions (f, node);
449 /* Set JFUNC to be a copy of another jmp (to be used by jump function
450 combination code). The two functions will share their rdesc. */
452 static void
453 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
454 struct ipa_jump_func *src)
457 gcc_checking_assert (src->type == IPA_JF_CONST);
458 dst->type = IPA_JF_CONST;
459 dst->value.constant = src->value.constant;
462 /* Set JFUNC to be a constant jmp function. */
464 static void
465 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
466 struct cgraph_edge *cs)
468 constant = unshare_expr (constant);
469 if (constant && EXPR_P (constant))
470 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
471 jfunc->type = IPA_JF_CONST;
472 jfunc->value.constant.value = unshare_expr_without_location (constant);
474 if (TREE_CODE (constant) == ADDR_EXPR
475 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
477 struct ipa_cst_ref_desc *rdesc;
478 if (!ipa_refdesc_pool)
479 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
480 sizeof (struct ipa_cst_ref_desc), 32);
482 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
483 rdesc->cs = cs;
484 rdesc->next_duplicate = NULL;
485 rdesc->refcount = 1;
486 jfunc->value.constant.rdesc = rdesc;
488 else
489 jfunc->value.constant.rdesc = NULL;
492 /* Set JFUNC to be a simple pass-through jump function. */
493 static void
494 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
495 bool agg_preserved)
497 jfunc->type = IPA_JF_PASS_THROUGH;
498 jfunc->value.pass_through.operand = NULL_TREE;
499 jfunc->value.pass_through.formal_id = formal_id;
500 jfunc->value.pass_through.operation = NOP_EXPR;
501 jfunc->value.pass_through.agg_preserved = agg_preserved;
504 /* Set JFUNC to be an arithmetic pass through jump function. */
506 static void
507 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
508 tree operand, enum tree_code operation)
510 jfunc->type = IPA_JF_PASS_THROUGH;
511 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
512 jfunc->value.pass_through.formal_id = formal_id;
513 jfunc->value.pass_through.operation = operation;
514 jfunc->value.pass_through.agg_preserved = false;
517 /* Set JFUNC to be an ancestor jump function. */
519 static void
520 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
521 int formal_id, bool agg_preserved)
523 jfunc->type = IPA_JF_ANCESTOR;
524 jfunc->value.ancestor.formal_id = formal_id;
525 jfunc->value.ancestor.offset = offset;
526 jfunc->value.ancestor.agg_preserved = agg_preserved;
529 /* Get IPA BB information about the given BB. FBI is the context of analyzis
530 of this function body. */
532 static struct ipa_bb_info *
533 ipa_get_bb_info (struct func_body_info *fbi, basic_block bb)
535 gcc_checking_assert (fbi);
536 return &fbi->bb_infos[bb->index];
539 /* Structure to be passed in between detect_type_change and
540 check_stmt_for_type_change. */
542 struct prop_type_change_info
544 /* Offset into the object where there is the virtual method pointer we are
545 looking for. */
546 HOST_WIDE_INT offset;
547 /* The declaration or SSA_NAME pointer of the base that we are checking for
548 type change. */
549 tree object;
550 /* Set to true if dynamic type change has been detected. */
551 bool type_maybe_changed;
554 /* Return true if STMT can modify a virtual method table pointer.
556 This function makes special assumptions about both constructors and
557 destructors which are all the functions that are allowed to alter the VMT
558 pointers. It assumes that destructors begin with assignment into all VMT
559 pointers and that constructors essentially look in the following way:
561 1) The very first thing they do is that they call constructors of ancestor
562 sub-objects that have them.
564 2) Then VMT pointers of this and all its ancestors is set to new values
565 corresponding to the type corresponding to the constructor.
567 3) Only afterwards, other stuff such as constructor of member sub-objects
568 and the code written by the user is run. Only this may include calling
569 virtual functions, directly or indirectly.
571 There is no way to call a constructor of an ancestor sub-object in any
572 other way.
574 This means that we do not have to care whether constructors get the correct
575 type information because they will always change it (in fact, if we define
576 the type to be given by the VMT pointer, it is undefined).
578 The most important fact to derive from the above is that if, for some
579 statement in the section 3, we try to detect whether the dynamic type has
580 changed, we can safely ignore all calls as we examine the function body
581 backwards until we reach statements in section 2 because these calls cannot
582 be ancestor constructors or destructors (if the input is not bogus) and so
583 do not change the dynamic type (this holds true only for automatically
584 allocated objects but at the moment we devirtualize only these). We then
585 must detect that statements in section 2 change the dynamic type and can try
586 to derive the new type. That is enough and we can stop, we will never see
587 the calls into constructors of sub-objects in this code. Therefore we can
588 safely ignore all call statements that we traverse.
591 static bool
592 stmt_may_be_vtbl_ptr_store (gimple stmt)
594 if (is_gimple_call (stmt))
595 return false;
596 if (gimple_clobber_p (stmt))
597 return false;
598 else if (is_gimple_assign (stmt))
600 tree lhs = gimple_assign_lhs (stmt);
602 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
604 if (flag_strict_aliasing
605 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
606 return false;
608 if (TREE_CODE (lhs) == COMPONENT_REF
609 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
610 return false;
611 /* In the future we might want to use get_base_ref_and_offset to find
612 if there is a field corresponding to the offset and if so, proceed
613 almost like if it was a component ref. */
616 return true;
619 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
620 to check whether a particular statement may modify the virtual table
621 pointerIt stores its result into DATA, which points to a
622 prop_type_change_info structure. */
624 static bool
625 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
627 gimple stmt = SSA_NAME_DEF_STMT (vdef);
628 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
630 if (stmt_may_be_vtbl_ptr_store (stmt))
632 tci->type_maybe_changed = true;
633 return true;
635 else
636 return false;
639 /* See if ARG is PARAM_DECl describing instance passed by pointer
640 or reference in FUNCTION. Return false if the dynamic type may change
641 in between beggining of the function until CALL is invoked.
643 Generally functions are not allowed to change type of such instances,
644 but they call destructors. We assume that methods can not destroy the THIS
645 pointer. Also as a special cases, constructor and destructors may change
646 type of the THIS pointer. */
648 static bool
649 param_type_may_change_p (tree function, tree arg, gimple call)
651 /* Pure functions can not do any changes on the dynamic type;
652 that require writting to memory. */
653 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
654 return false;
655 /* We need to check if we are within inlined consturctor
656 or destructor (ideally we would have way to check that the
657 inline cdtor is actually working on ARG, but we don't have
658 easy tie on this, so punt on all non-pure cdtors.
659 We may also record the types of cdtors and once we know type
660 of the instance match them.
662 Also code unification optimizations may merge calls from
663 different blocks making return values unreliable. So
664 do nothing during late optimization. */
665 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
666 return true;
667 if (TREE_CODE (arg) == SSA_NAME
668 && SSA_NAME_IS_DEFAULT_DEF (arg)
669 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
671 /* Normal (non-THIS) argument. */
672 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
673 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
674 /* THIS pointer of an method - here we we want to watch constructors
675 and destructors as those definitely may change the dynamic
676 type. */
677 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
678 && !DECL_CXX_CONSTRUCTOR_P (function)
679 && !DECL_CXX_DESTRUCTOR_P (function)
680 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
682 /* Walk the inline stack and watch out for ctors/dtors. */
683 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
684 block = BLOCK_SUPERCONTEXT (block))
685 if (BLOCK_ABSTRACT_ORIGIN (block)
686 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
688 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
690 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
691 continue;
692 if (TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
693 && (DECL_CXX_CONSTRUCTOR_P (fn)
694 || DECL_CXX_DESTRUCTOR_P (fn)))
695 return true;
697 return false;
700 return true;
703 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
704 callsite CALL) by looking for assignments to its virtual table pointer. If
705 it is, return true and fill in the jump function JFUNC with relevant type
706 information or set it to unknown. ARG is the object itself (not a pointer
707 to it, unless dereferenced). BASE is the base of the memory access as
708 returned by get_ref_base_and_extent, as is the offset.
710 This is helper function for detect_type_change and detect_type_change_ssa
711 that does the heavy work which is usually unnecesary. */
713 static bool
714 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
715 gcall *call, struct ipa_jump_func *jfunc,
716 HOST_WIDE_INT offset)
718 struct prop_type_change_info tci;
719 ao_ref ao;
720 bool entry_reached = false;
722 gcc_checking_assert (DECL_P (arg)
723 || TREE_CODE (arg) == MEM_REF
724 || handled_component_p (arg));
726 comp_type = TYPE_MAIN_VARIANT (comp_type);
728 /* Const calls cannot call virtual methods through VMT and so type changes do
729 not matter. */
730 if (!flag_devirtualize || !gimple_vuse (call)
731 /* Be sure expected_type is polymorphic. */
732 || !comp_type
733 || TREE_CODE (comp_type) != RECORD_TYPE
734 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
735 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
736 return true;
738 ao_ref_init (&ao, arg);
739 ao.base = base;
740 ao.offset = offset;
741 ao.size = POINTER_SIZE;
742 ao.max_size = ao.size;
744 tci.offset = offset;
745 tci.object = get_base_address (arg);
746 tci.type_maybe_changed = false;
748 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
749 &tci, NULL, &entry_reached);
750 if (!tci.type_maybe_changed)
751 return false;
753 jfunc->type = IPA_JF_UNKNOWN;
754 return true;
757 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
758 If it is, return true and fill in the jump function JFUNC with relevant type
759 information or set it to unknown. ARG is the object itself (not a pointer
760 to it, unless dereferenced). BASE is the base of the memory access as
761 returned by get_ref_base_and_extent, as is the offset. */
763 static bool
764 detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
765 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
767 if (!flag_devirtualize)
768 return false;
770 if (TREE_CODE (base) == MEM_REF
771 && !param_type_may_change_p (current_function_decl,
772 TREE_OPERAND (base, 0),
773 call))
774 return false;
775 return detect_type_change_from_memory_writes (arg, base, comp_type,
776 call, jfunc, offset);
779 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
780 SSA name (its dereference will become the base and the offset is assumed to
781 be zero). */
783 static bool
784 detect_type_change_ssa (tree arg, tree comp_type,
785 gcall *call, struct ipa_jump_func *jfunc)
787 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
788 if (!flag_devirtualize
789 || !POINTER_TYPE_P (TREE_TYPE (arg)))
790 return false;
792 if (!param_type_may_change_p (current_function_decl, arg, call))
793 return false;
795 arg = build2 (MEM_REF, ptr_type_node, arg,
796 build_int_cst (ptr_type_node, 0));
798 return detect_type_change_from_memory_writes (arg, arg, comp_type,
799 call, jfunc, 0);
802 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
803 boolean variable pointed to by DATA. */
805 static bool
806 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
807 void *data)
809 bool *b = (bool *) data;
810 *b = true;
811 return true;
814 /* Return true if we have already walked so many statements in AA that we
815 should really just start giving up. */
817 static bool
818 aa_overwalked (struct func_body_info *fbi)
820 gcc_checking_assert (fbi);
821 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
824 /* Find the nearest valid aa status for parameter specified by INDEX that
825 dominates BB. */
827 static struct param_aa_status *
828 find_dominating_aa_status (struct func_body_info *fbi, basic_block bb,
829 int index)
831 while (true)
833 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
834 if (!bb)
835 return NULL;
836 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
837 if (!bi->param_aa_statuses.is_empty ()
838 && bi->param_aa_statuses[index].valid)
839 return &bi->param_aa_statuses[index];
843 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
844 structures and/or intialize the result with a dominating description as
845 necessary. */
847 static struct param_aa_status *
848 parm_bb_aa_status_for_bb (struct func_body_info *fbi, basic_block bb,
849 int index)
851 gcc_checking_assert (fbi);
852 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
853 if (bi->param_aa_statuses.is_empty ())
854 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
855 struct param_aa_status *paa = &bi->param_aa_statuses[index];
856 if (!paa->valid)
858 gcc_checking_assert (!paa->parm_modified
859 && !paa->ref_modified
860 && !paa->pt_modified);
861 struct param_aa_status *dom_paa;
862 dom_paa = find_dominating_aa_status (fbi, bb, index);
863 if (dom_paa)
864 *paa = *dom_paa;
865 else
866 paa->valid = true;
869 return paa;
872 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
873 a value known not to be modified in this function before reaching the
874 statement STMT. FBI holds information about the function we have so far
875 gathered but do not survive the summary building stage. */
877 static bool
878 parm_preserved_before_stmt_p (struct func_body_info *fbi, int index,
879 gimple stmt, tree parm_load)
881 struct param_aa_status *paa;
882 bool modified = false;
883 ao_ref refd;
885 /* FIXME: FBI can be NULL if we are being called from outside
886 ipa_node_analysis or ipcp_transform_function, which currently happens
887 during inlining analysis. It would be great to extend fbi's lifetime and
888 always have it. Currently, we are just not afraid of too much walking in
889 that case. */
890 if (fbi)
892 if (aa_overwalked (fbi))
893 return false;
894 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
895 if (paa->parm_modified)
896 return false;
898 else
899 paa = NULL;
901 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
902 ao_ref_init (&refd, parm_load);
903 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
904 &modified, NULL);
905 if (fbi)
906 fbi->aa_walked += walked;
907 if (paa && modified)
908 paa->parm_modified = true;
909 return !modified;
912 /* If STMT is an assignment that loads a value from an parameter declaration,
913 return the index of the parameter in ipa_node_params which has not been
914 modified. Otherwise return -1. */
916 static int
917 load_from_unmodified_param (struct func_body_info *fbi,
918 vec<ipa_param_descriptor> descriptors,
919 gimple stmt)
921 int index;
922 tree op1;
924 if (!gimple_assign_single_p (stmt))
925 return -1;
927 op1 = gimple_assign_rhs1 (stmt);
928 if (TREE_CODE (op1) != PARM_DECL)
929 return -1;
931 index = ipa_get_param_decl_index_1 (descriptors, op1);
932 if (index < 0
933 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
934 return -1;
936 return index;
939 /* Return true if memory reference REF (which must be a load through parameter
940 with INDEX) loads data that are known to be unmodified in this function
941 before reaching statement STMT. */
943 static bool
944 parm_ref_data_preserved_p (struct func_body_info *fbi,
945 int index, gimple stmt, tree ref)
947 struct param_aa_status *paa;
948 bool modified = false;
949 ao_ref refd;
951 /* FIXME: FBI can be NULL if we are being called from outside
952 ipa_node_analysis or ipcp_transform_function, which currently happens
953 during inlining analysis. It would be great to extend fbi's lifetime and
954 always have it. Currently, we are just not afraid of too much walking in
955 that case. */
956 if (fbi)
958 if (aa_overwalked (fbi))
959 return false;
960 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
961 if (paa->ref_modified)
962 return false;
964 else
965 paa = NULL;
967 gcc_checking_assert (gimple_vuse (stmt));
968 ao_ref_init (&refd, ref);
969 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
970 &modified, NULL);
971 if (fbi)
972 fbi->aa_walked += walked;
973 if (paa && modified)
974 paa->ref_modified = true;
975 return !modified;
978 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
979 is known to be unmodified in this function before reaching call statement
980 CALL into which it is passed. FBI describes the function body. */
982 static bool
983 parm_ref_data_pass_through_p (struct func_body_info *fbi, int index,
984 gimple call, tree parm)
986 bool modified = false;
987 ao_ref refd;
989 /* It's unnecessary to calculate anything about memory contnets for a const
990 function because it is not goin to use it. But do not cache the result
991 either. Also, no such calculations for non-pointers. */
992 if (!gimple_vuse (call)
993 || !POINTER_TYPE_P (TREE_TYPE (parm))
994 || aa_overwalked (fbi))
995 return false;
997 struct param_aa_status *paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (call),
998 index);
999 if (paa->pt_modified)
1000 return false;
1002 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1003 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1004 &modified, NULL);
1005 fbi->aa_walked += walked;
1006 if (modified)
1007 paa->pt_modified = true;
1008 return !modified;
1011 /* Return true if we can prove that OP is a memory reference loading unmodified
1012 data from an aggregate passed as a parameter and if the aggregate is passed
1013 by reference, that the alias type of the load corresponds to the type of the
1014 formal parameter (so that we can rely on this type for TBAA in callers).
1015 INFO and PARMS_AINFO describe parameters of the current function (but the
1016 latter can be NULL), STMT is the load statement. If function returns true,
1017 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1018 within the aggregate and whether it is a load from a value passed by
1019 reference respectively. */
1021 static bool
1022 ipa_load_from_parm_agg_1 (struct func_body_info *fbi,
1023 vec<ipa_param_descriptor> descriptors,
1024 gimple stmt, tree op, int *index_p,
1025 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1026 bool *by_ref_p)
1028 int index;
1029 HOST_WIDE_INT size, max_size;
1030 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
1032 if (max_size == -1 || max_size != size || *offset_p < 0)
1033 return false;
1035 if (DECL_P (base))
1037 int index = ipa_get_param_decl_index_1 (descriptors, base);
1038 if (index >= 0
1039 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1041 *index_p = index;
1042 *by_ref_p = false;
1043 if (size_p)
1044 *size_p = size;
1045 return true;
1047 return false;
1050 if (TREE_CODE (base) != MEM_REF
1051 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1052 || !integer_zerop (TREE_OPERAND (base, 1)))
1053 return false;
1055 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1057 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1058 index = ipa_get_param_decl_index_1 (descriptors, parm);
1060 else
1062 /* This branch catches situations where a pointer parameter is not a
1063 gimple register, for example:
1065 void hip7(S*) (struct S * p)
1067 void (*<T2e4>) (struct S *) D.1867;
1068 struct S * p.1;
1070 <bb 2>:
1071 p.1_1 = p;
1072 D.1867_2 = p.1_1->f;
1073 D.1867_2 ();
1074 gdp = &p;
1077 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1078 index = load_from_unmodified_param (fbi, descriptors, def);
1081 if (index >= 0
1082 && parm_ref_data_preserved_p (fbi, index, stmt, op))
1084 *index_p = index;
1085 *by_ref_p = true;
1086 if (size_p)
1087 *size_p = size;
1088 return true;
1090 return false;
1093 /* Just like the previous function, just without the param_analysis_info
1094 pointer, for users outside of this file. */
1096 bool
1097 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
1098 tree op, int *index_p, HOST_WIDE_INT *offset_p,
1099 bool *by_ref_p)
1101 return ipa_load_from_parm_agg_1 (NULL, info->descriptors, stmt, op, index_p,
1102 offset_p, NULL, by_ref_p);
1105 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1106 of an assignment statement STMT, try to determine whether we are actually
1107 handling any of the following cases and construct an appropriate jump
1108 function into JFUNC if so:
1110 1) The passed value is loaded from a formal parameter which is not a gimple
1111 register (most probably because it is addressable, the value has to be
1112 scalar) and we can guarantee the value has not changed. This case can
1113 therefore be described by a simple pass-through jump function. For example:
1115 foo (int a)
1117 int a.0;
1119 a.0_2 = a;
1120 bar (a.0_2);
1122 2) The passed value can be described by a simple arithmetic pass-through
1123 jump function. E.g.
1125 foo (int a)
1127 int D.2064;
1129 D.2064_4 = a.1(D) + 4;
1130 bar (D.2064_4);
1132 This case can also occur in combination of the previous one, e.g.:
1134 foo (int a, int z)
1136 int a.0;
1137 int D.2064;
1139 a.0_3 = a;
1140 D.2064_4 = a.0_3 + 4;
1141 foo (D.2064_4);
1143 3) The passed value is an address of an object within another one (which
1144 also passed by reference). Such situations are described by an ancestor
1145 jump function and describe situations such as:
1147 B::foo() (struct B * const this)
1149 struct A * D.1845;
1151 D.1845_2 = &this_1(D)->D.1748;
1152 A::bar (D.1845_2);
1154 INFO is the structure describing individual parameters access different
1155 stages of IPA optimizations. PARMS_AINFO contains the information that is
1156 only needed for intraprocedural analysis. */
1158 static void
1159 compute_complex_assign_jump_func (struct func_body_info *fbi,
1160 struct ipa_node_params *info,
1161 struct ipa_jump_func *jfunc,
1162 gcall *call, gimple stmt, tree name,
1163 tree param_type)
1165 HOST_WIDE_INT offset, size, max_size;
1166 tree op1, tc_ssa, base, ssa;
1167 int index;
1169 op1 = gimple_assign_rhs1 (stmt);
1171 if (TREE_CODE (op1) == SSA_NAME)
1173 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1174 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1175 else
1176 index = load_from_unmodified_param (fbi, info->descriptors,
1177 SSA_NAME_DEF_STMT (op1));
1178 tc_ssa = op1;
1180 else
1182 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1183 tc_ssa = gimple_assign_lhs (stmt);
1186 if (index >= 0)
1188 tree op2 = gimple_assign_rhs2 (stmt);
1190 if (op2)
1192 if (!is_gimple_ip_invariant (op2)
1193 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1194 && !useless_type_conversion_p (TREE_TYPE (name),
1195 TREE_TYPE (op1))))
1196 return;
1198 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1199 gimple_assign_rhs_code (stmt));
1201 else if (gimple_assign_single_p (stmt))
1203 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1204 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1206 return;
1209 if (TREE_CODE (op1) != ADDR_EXPR)
1210 return;
1211 op1 = TREE_OPERAND (op1, 0);
1212 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1213 return;
1214 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1215 if (TREE_CODE (base) != MEM_REF
1216 /* If this is a varying address, punt. */
1217 || max_size == -1
1218 || max_size != size)
1219 return;
1220 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1221 ssa = TREE_OPERAND (base, 0);
1222 if (TREE_CODE (ssa) != SSA_NAME
1223 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1224 || offset < 0)
1225 return;
1227 /* Dynamic types are changed in constructors and destructors. */
1228 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1229 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1230 ipa_set_ancestor_jf (jfunc, offset, index,
1231 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1234 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1235 it looks like:
1237 iftmp.1_3 = &obj_2(D)->D.1762;
1239 The base of the MEM_REF must be a default definition SSA NAME of a
1240 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1241 whole MEM_REF expression is returned and the offset calculated from any
1242 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1243 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1245 static tree
1246 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1248 HOST_WIDE_INT size, max_size;
1249 tree expr, parm, obj;
1251 if (!gimple_assign_single_p (assign))
1252 return NULL_TREE;
1253 expr = gimple_assign_rhs1 (assign);
1255 if (TREE_CODE (expr) != ADDR_EXPR)
1256 return NULL_TREE;
1257 expr = TREE_OPERAND (expr, 0);
1258 obj = expr;
1259 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1261 if (TREE_CODE (expr) != MEM_REF
1262 /* If this is a varying address, punt. */
1263 || max_size == -1
1264 || max_size != size
1265 || *offset < 0)
1266 return NULL_TREE;
1267 parm = TREE_OPERAND (expr, 0);
1268 if (TREE_CODE (parm) != SSA_NAME
1269 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1270 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1271 return NULL_TREE;
1273 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1274 *obj_p = obj;
1275 return expr;
1279 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1280 statement PHI, try to find out whether NAME is in fact a
1281 multiple-inheritance typecast from a descendant into an ancestor of a formal
1282 parameter and thus can be described by an ancestor jump function and if so,
1283 write the appropriate function into JFUNC.
1285 Essentially we want to match the following pattern:
1287 if (obj_2(D) != 0B)
1288 goto <bb 3>;
1289 else
1290 goto <bb 4>;
1292 <bb 3>:
1293 iftmp.1_3 = &obj_2(D)->D.1762;
1295 <bb 4>:
1296 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1297 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1298 return D.1879_6; */
1300 static void
1301 compute_complex_ancestor_jump_func (struct func_body_info *fbi,
1302 struct ipa_node_params *info,
1303 struct ipa_jump_func *jfunc,
1304 gcall *call, gphi *phi)
1306 HOST_WIDE_INT offset;
1307 gimple assign, cond;
1308 basic_block phi_bb, assign_bb, cond_bb;
1309 tree tmp, parm, expr, obj;
1310 int index, i;
1312 if (gimple_phi_num_args (phi) != 2)
1313 return;
1315 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1316 tmp = PHI_ARG_DEF (phi, 0);
1317 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1318 tmp = PHI_ARG_DEF (phi, 1);
1319 else
1320 return;
1321 if (TREE_CODE (tmp) != SSA_NAME
1322 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1323 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1324 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1325 return;
1327 assign = SSA_NAME_DEF_STMT (tmp);
1328 assign_bb = gimple_bb (assign);
1329 if (!single_pred_p (assign_bb))
1330 return;
1331 expr = get_ancestor_addr_info (assign, &obj, &offset);
1332 if (!expr)
1333 return;
1334 parm = TREE_OPERAND (expr, 0);
1335 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1336 if (index < 0)
1337 return;
1339 cond_bb = single_pred (assign_bb);
1340 cond = last_stmt (cond_bb);
1341 if (!cond
1342 || gimple_code (cond) != GIMPLE_COND
1343 || gimple_cond_code (cond) != NE_EXPR
1344 || gimple_cond_lhs (cond) != parm
1345 || !integer_zerop (gimple_cond_rhs (cond)))
1346 return;
1348 phi_bb = gimple_bb (phi);
1349 for (i = 0; i < 2; i++)
1351 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1352 if (pred != assign_bb && pred != cond_bb)
1353 return;
1356 ipa_set_ancestor_jf (jfunc, offset, index,
1357 parm_ref_data_pass_through_p (fbi, index, call, parm));
1360 /* Inspect the given TYPE and return true iff it has the same structure (the
1361 same number of fields of the same types) as a C++ member pointer. If
1362 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1363 corresponding fields there. */
1365 static bool
1366 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1368 tree fld;
1370 if (TREE_CODE (type) != RECORD_TYPE)
1371 return false;
1373 fld = TYPE_FIELDS (type);
1374 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1375 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1376 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1377 return false;
1379 if (method_ptr)
1380 *method_ptr = fld;
1382 fld = DECL_CHAIN (fld);
1383 if (!fld || INTEGRAL_TYPE_P (fld)
1384 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1385 return false;
1386 if (delta)
1387 *delta = fld;
1389 if (DECL_CHAIN (fld))
1390 return false;
1392 return true;
1395 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1396 return the rhs of its defining statement. Otherwise return RHS as it
1397 is. */
1399 static inline tree
1400 get_ssa_def_if_simple_copy (tree rhs)
1402 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1404 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1406 if (gimple_assign_single_p (def_stmt))
1407 rhs = gimple_assign_rhs1 (def_stmt);
1408 else
1409 break;
1411 return rhs;
1414 /* Simple linked list, describing known contents of an aggregate beforere
1415 call. */
1417 struct ipa_known_agg_contents_list
1419 /* Offset and size of the described part of the aggregate. */
1420 HOST_WIDE_INT offset, size;
1421 /* Known constant value or NULL if the contents is known to be unknown. */
1422 tree constant;
1423 /* Pointer to the next structure in the list. */
1424 struct ipa_known_agg_contents_list *next;
1427 /* Find the proper place in linked list of ipa_known_agg_contents_list
1428 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1429 unless there is a partial overlap, in which case return NULL, or such
1430 element is already there, in which case set *ALREADY_THERE to true. */
1432 static struct ipa_known_agg_contents_list **
1433 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1434 HOST_WIDE_INT lhs_offset,
1435 HOST_WIDE_INT lhs_size,
1436 bool *already_there)
1438 struct ipa_known_agg_contents_list **p = list;
1439 while (*p && (*p)->offset < lhs_offset)
1441 if ((*p)->offset + (*p)->size > lhs_offset)
1442 return NULL;
1443 p = &(*p)->next;
1446 if (*p && (*p)->offset < lhs_offset + lhs_size)
1448 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1449 /* We already know this value is subsequently overwritten with
1450 something else. */
1451 *already_there = true;
1452 else
1453 /* Otherwise this is a partial overlap which we cannot
1454 represent. */
1455 return NULL;
1457 return p;
1460 /* Build aggregate jump function from LIST, assuming there are exactly
1461 CONST_COUNT constant entries there and that th offset of the passed argument
1462 is ARG_OFFSET and store it into JFUNC. */
1464 static void
1465 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1466 int const_count, HOST_WIDE_INT arg_offset,
1467 struct ipa_jump_func *jfunc)
1469 vec_alloc (jfunc->agg.items, const_count);
1470 while (list)
1472 if (list->constant)
1474 struct ipa_agg_jf_item item;
1475 item.offset = list->offset - arg_offset;
1476 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1477 item.value = unshare_expr_without_location (list->constant);
1478 jfunc->agg.items->quick_push (item);
1480 list = list->next;
1484 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1485 in ARG is filled in with constant values. ARG can either be an aggregate
1486 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1487 aggregate. JFUNC is the jump function into which the constants are
1488 subsequently stored. */
1490 static void
1491 determine_locally_known_aggregate_parts (gcall *call, tree arg,
1492 tree arg_type,
1493 struct ipa_jump_func *jfunc)
1495 struct ipa_known_agg_contents_list *list = NULL;
1496 int item_count = 0, const_count = 0;
1497 HOST_WIDE_INT arg_offset, arg_size;
1498 gimple_stmt_iterator gsi;
1499 tree arg_base;
1500 bool check_ref, by_ref;
1501 ao_ref r;
1503 /* The function operates in three stages. First, we prepare check_ref, r,
1504 arg_base and arg_offset based on what is actually passed as an actual
1505 argument. */
1507 if (POINTER_TYPE_P (arg_type))
1509 by_ref = true;
1510 if (TREE_CODE (arg) == SSA_NAME)
1512 tree type_size;
1513 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1514 return;
1515 check_ref = true;
1516 arg_base = arg;
1517 arg_offset = 0;
1518 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1519 arg_size = tree_to_uhwi (type_size);
1520 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1522 else if (TREE_CODE (arg) == ADDR_EXPR)
1524 HOST_WIDE_INT arg_max_size;
1526 arg = TREE_OPERAND (arg, 0);
1527 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1528 &arg_max_size);
1529 if (arg_max_size == -1
1530 || arg_max_size != arg_size
1531 || arg_offset < 0)
1532 return;
1533 if (DECL_P (arg_base))
1535 check_ref = false;
1536 ao_ref_init (&r, arg_base);
1538 else
1539 return;
1541 else
1542 return;
1544 else
1546 HOST_WIDE_INT arg_max_size;
1548 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1550 by_ref = false;
1551 check_ref = false;
1552 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1553 &arg_max_size);
1554 if (arg_max_size == -1
1555 || arg_max_size != arg_size
1556 || arg_offset < 0)
1557 return;
1559 ao_ref_init (&r, arg);
1562 /* Second stage walks back the BB, looks at individual statements and as long
1563 as it is confident of how the statements affect contents of the
1564 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1565 describing it. */
1566 gsi = gsi_for_stmt (call);
1567 gsi_prev (&gsi);
1568 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1570 struct ipa_known_agg_contents_list *n, **p;
1571 gimple stmt = gsi_stmt (gsi);
1572 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1573 tree lhs, rhs, lhs_base;
1575 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1576 continue;
1577 if (!gimple_assign_single_p (stmt))
1578 break;
1580 lhs = gimple_assign_lhs (stmt);
1581 rhs = gimple_assign_rhs1 (stmt);
1582 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1583 || TREE_CODE (lhs) == BIT_FIELD_REF
1584 || contains_bitfld_component_ref_p (lhs))
1585 break;
1587 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1588 &lhs_max_size);
1589 if (lhs_max_size == -1
1590 || lhs_max_size != lhs_size)
1591 break;
1593 if (check_ref)
1595 if (TREE_CODE (lhs_base) != MEM_REF
1596 || TREE_OPERAND (lhs_base, 0) != arg_base
1597 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1598 break;
1600 else if (lhs_base != arg_base)
1602 if (DECL_P (lhs_base))
1603 continue;
1604 else
1605 break;
1608 bool already_there = false;
1609 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1610 &already_there);
1611 if (!p)
1612 break;
1613 if (already_there)
1614 continue;
1616 rhs = get_ssa_def_if_simple_copy (rhs);
1617 n = XALLOCA (struct ipa_known_agg_contents_list);
1618 n->size = lhs_size;
1619 n->offset = lhs_offset;
1620 if (is_gimple_ip_invariant (rhs))
1622 n->constant = rhs;
1623 const_count++;
1625 else
1626 n->constant = NULL_TREE;
1627 n->next = *p;
1628 *p = n;
1630 item_count++;
1631 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1632 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1633 break;
1636 /* Third stage just goes over the list and creates an appropriate vector of
1637 ipa_agg_jf_item structures out of it, of sourse only if there are
1638 any known constants to begin with. */
1640 if (const_count)
1642 jfunc->agg.by_ref = by_ref;
1643 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1647 static tree
1648 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1650 int n;
1651 tree type = (e->callee
1652 ? TREE_TYPE (e->callee->decl)
1653 : gimple_call_fntype (e->call_stmt));
1654 tree t = TYPE_ARG_TYPES (type);
1656 for (n = 0; n < i; n++)
1658 if (!t)
1659 break;
1660 t = TREE_CHAIN (t);
1662 if (t)
1663 return TREE_VALUE (t);
1664 if (!e->callee)
1665 return NULL;
1666 t = DECL_ARGUMENTS (e->callee->decl);
1667 for (n = 0; n < i; n++)
1669 if (!t)
1670 return NULL;
1671 t = TREE_CHAIN (t);
1673 if (t)
1674 return TREE_TYPE (t);
1675 return NULL;
1678 /* Compute jump function for all arguments of callsite CS and insert the
1679 information in the jump_functions array in the ipa_edge_args corresponding
1680 to this callsite. */
1682 static void
1683 ipa_compute_jump_functions_for_edge (struct func_body_info *fbi,
1684 struct cgraph_edge *cs)
1686 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1687 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1688 gcall *call = cs->call_stmt;
1689 int n, arg_num = gimple_call_num_args (call);
1690 bool useful_context = false;
1692 if (arg_num == 0 || args->jump_functions)
1693 return;
1694 vec_safe_grow_cleared (args->jump_functions, arg_num);
1695 if (flag_devirtualize)
1696 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1698 if (gimple_call_internal_p (call))
1699 return;
1700 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1701 return;
1703 for (n = 0; n < arg_num; n++)
1705 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1706 tree arg = gimple_call_arg (call, n);
1707 tree param_type = ipa_get_callee_param_type (cs, n);
1708 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1710 tree instance;
1711 struct ipa_polymorphic_call_context context (cs->caller->decl,
1712 arg, cs->call_stmt,
1713 &instance);
1714 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1715 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1716 if (!context.useless_p ())
1717 useful_context = true;
1720 if (is_gimple_ip_invariant (arg))
1721 ipa_set_jf_constant (jfunc, arg, cs);
1722 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1723 && TREE_CODE (arg) == PARM_DECL)
1725 int index = ipa_get_param_decl_index (info, arg);
1727 gcc_assert (index >=0);
1728 /* Aggregate passed by value, check for pass-through, otherwise we
1729 will attempt to fill in aggregate contents later in this
1730 for cycle. */
1731 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1733 ipa_set_jf_simple_pass_through (jfunc, index, false);
1734 continue;
1737 else if (TREE_CODE (arg) == SSA_NAME)
1739 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1741 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1742 if (index >= 0)
1744 bool agg_p;
1745 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1746 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1749 else
1751 gimple stmt = SSA_NAME_DEF_STMT (arg);
1752 if (is_gimple_assign (stmt))
1753 compute_complex_assign_jump_func (fbi, info, jfunc,
1754 call, stmt, arg, param_type);
1755 else if (gimple_code (stmt) == GIMPLE_PHI)
1756 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1757 call,
1758 as_a <gphi *> (stmt));
1762 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1763 passed (because type conversions are ignored in gimple). Usually we can
1764 safely get type from function declaration, but in case of K&R prototypes or
1765 variadic functions we can try our luck with type of the pointer passed.
1766 TODO: Since we look for actual initialization of the memory object, we may better
1767 work out the type based on the memory stores we find. */
1768 if (!param_type)
1769 param_type = TREE_TYPE (arg);
1771 if ((jfunc->type != IPA_JF_PASS_THROUGH
1772 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1773 && (jfunc->type != IPA_JF_ANCESTOR
1774 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1775 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1776 || POINTER_TYPE_P (param_type)))
1777 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1779 if (!useful_context)
1780 vec_free (args->polymorphic_call_contexts);
1783 /* Compute jump functions for all edges - both direct and indirect - outgoing
1784 from BB. */
1786 static void
1787 ipa_compute_jump_functions_for_bb (struct func_body_info *fbi, basic_block bb)
1789 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1790 int i;
1791 struct cgraph_edge *cs;
1793 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1795 struct cgraph_node *callee = cs->callee;
1797 if (callee)
1799 callee->ultimate_alias_target ();
1800 /* We do not need to bother analyzing calls to unknown functions
1801 unless they may become known during lto/whopr. */
1802 if (!callee->definition && !flag_lto)
1803 continue;
1805 ipa_compute_jump_functions_for_edge (fbi, cs);
1809 /* If STMT looks like a statement loading a value from a member pointer formal
1810 parameter, return that parameter and store the offset of the field to
1811 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1812 might be clobbered). If USE_DELTA, then we look for a use of the delta
1813 field rather than the pfn. */
1815 static tree
1816 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1817 HOST_WIDE_INT *offset_p)
1819 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1821 if (!gimple_assign_single_p (stmt))
1822 return NULL_TREE;
1824 rhs = gimple_assign_rhs1 (stmt);
1825 if (TREE_CODE (rhs) == COMPONENT_REF)
1827 ref_field = TREE_OPERAND (rhs, 1);
1828 rhs = TREE_OPERAND (rhs, 0);
1830 else
1831 ref_field = NULL_TREE;
1832 if (TREE_CODE (rhs) != MEM_REF)
1833 return NULL_TREE;
1834 rec = TREE_OPERAND (rhs, 0);
1835 if (TREE_CODE (rec) != ADDR_EXPR)
1836 return NULL_TREE;
1837 rec = TREE_OPERAND (rec, 0);
1838 if (TREE_CODE (rec) != PARM_DECL
1839 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1840 return NULL_TREE;
1841 ref_offset = TREE_OPERAND (rhs, 1);
1843 if (use_delta)
1844 fld = delta_field;
1845 else
1846 fld = ptr_field;
1847 if (offset_p)
1848 *offset_p = int_bit_position (fld);
1850 if (ref_field)
1852 if (integer_nonzerop (ref_offset))
1853 return NULL_TREE;
1854 return ref_field == fld ? rec : NULL_TREE;
1856 else
1857 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1858 : NULL_TREE;
1861 /* Returns true iff T is an SSA_NAME defined by a statement. */
1863 static bool
1864 ipa_is_ssa_with_stmt_def (tree t)
1866 if (TREE_CODE (t) == SSA_NAME
1867 && !SSA_NAME_IS_DEFAULT_DEF (t))
1868 return true;
1869 else
1870 return false;
1873 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1874 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1875 indirect call graph edge. */
1877 static struct cgraph_edge *
1878 ipa_note_param_call (struct cgraph_node *node, int param_index,
1879 gcall *stmt)
1881 struct cgraph_edge *cs;
1883 cs = node->get_edge (stmt);
1884 cs->indirect_info->param_index = param_index;
1885 cs->indirect_info->agg_contents = 0;
1886 cs->indirect_info->member_ptr = 0;
1887 return cs;
1890 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1891 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1892 intermediate information about each formal parameter. Currently it checks
1893 whether the call calls a pointer that is a formal parameter and if so, the
1894 parameter is marked with the called flag and an indirect call graph edge
1895 describing the call is created. This is very simple for ordinary pointers
1896 represented in SSA but not-so-nice when it comes to member pointers. The
1897 ugly part of this function does nothing more than trying to match the
1898 pattern of such a call. An example of such a pattern is the gimple dump
1899 below, the call is on the last line:
1901 <bb 2>:
1902 f$__delta_5 = f.__delta;
1903 f$__pfn_24 = f.__pfn;
1906 <bb 2>:
1907 f$__delta_5 = MEM[(struct *)&f];
1908 f$__pfn_24 = MEM[(struct *)&f + 4B];
1910 and a few lines below:
1912 <bb 5>
1913 D.2496_3 = (int) f$__pfn_24;
1914 D.2497_4 = D.2496_3 & 1;
1915 if (D.2497_4 != 0)
1916 goto <bb 3>;
1917 else
1918 goto <bb 4>;
1920 <bb 6>:
1921 D.2500_7 = (unsigned int) f$__delta_5;
1922 D.2501_8 = &S + D.2500_7;
1923 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1924 D.2503_10 = *D.2502_9;
1925 D.2504_12 = f$__pfn_24 + -1;
1926 D.2505_13 = (unsigned int) D.2504_12;
1927 D.2506_14 = D.2503_10 + D.2505_13;
1928 D.2507_15 = *D.2506_14;
1929 iftmp.11_16 = (String:: *) D.2507_15;
1931 <bb 7>:
1932 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1933 D.2500_19 = (unsigned int) f$__delta_5;
1934 D.2508_20 = &S + D.2500_19;
1935 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1937 Such patterns are results of simple calls to a member pointer:
1939 int doprinting (int (MyString::* f)(int) const)
1941 MyString S ("somestring");
1943 return (S.*f)(4);
1946 Moreover, the function also looks for called pointers loaded from aggregates
1947 passed by value or reference. */
1949 static void
1950 ipa_analyze_indirect_call_uses (struct func_body_info *fbi, gcall *call,
1951 tree target)
1953 struct ipa_node_params *info = fbi->info;
1954 HOST_WIDE_INT offset;
1955 bool by_ref;
1957 if (SSA_NAME_IS_DEFAULT_DEF (target))
1959 tree var = SSA_NAME_VAR (target);
1960 int index = ipa_get_param_decl_index (info, var);
1961 if (index >= 0)
1962 ipa_note_param_call (fbi->node, index, call);
1963 return;
1966 int index;
1967 gimple def = SSA_NAME_DEF_STMT (target);
1968 if (gimple_assign_single_p (def)
1969 && ipa_load_from_parm_agg_1 (fbi, info->descriptors, def,
1970 gimple_assign_rhs1 (def), &index, &offset,
1971 NULL, &by_ref))
1973 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
1974 cs->indirect_info->offset = offset;
1975 cs->indirect_info->agg_contents = 1;
1976 cs->indirect_info->by_ref = by_ref;
1977 return;
1980 /* Now we need to try to match the complex pattern of calling a member
1981 pointer. */
1982 if (gimple_code (def) != GIMPLE_PHI
1983 || gimple_phi_num_args (def) != 2
1984 || !POINTER_TYPE_P (TREE_TYPE (target))
1985 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1986 return;
1988 /* First, we need to check whether one of these is a load from a member
1989 pointer that is a parameter to this function. */
1990 tree n1 = PHI_ARG_DEF (def, 0);
1991 tree n2 = PHI_ARG_DEF (def, 1);
1992 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1993 return;
1994 gimple d1 = SSA_NAME_DEF_STMT (n1);
1995 gimple d2 = SSA_NAME_DEF_STMT (n2);
1997 tree rec;
1998 basic_block bb, virt_bb;
1999 basic_block join = gimple_bb (def);
2000 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2002 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2003 return;
2005 bb = EDGE_PRED (join, 0)->src;
2006 virt_bb = gimple_bb (d2);
2008 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2010 bb = EDGE_PRED (join, 1)->src;
2011 virt_bb = gimple_bb (d1);
2013 else
2014 return;
2016 /* Second, we need to check that the basic blocks are laid out in the way
2017 corresponding to the pattern. */
2019 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2020 || single_pred (virt_bb) != bb
2021 || single_succ (virt_bb) != join)
2022 return;
2024 /* Third, let's see that the branching is done depending on the least
2025 significant bit of the pfn. */
2027 gimple branch = last_stmt (bb);
2028 if (!branch || gimple_code (branch) != GIMPLE_COND)
2029 return;
2031 if ((gimple_cond_code (branch) != NE_EXPR
2032 && gimple_cond_code (branch) != EQ_EXPR)
2033 || !integer_zerop (gimple_cond_rhs (branch)))
2034 return;
2036 tree cond = gimple_cond_lhs (branch);
2037 if (!ipa_is_ssa_with_stmt_def (cond))
2038 return;
2040 def = SSA_NAME_DEF_STMT (cond);
2041 if (!is_gimple_assign (def)
2042 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2043 || !integer_onep (gimple_assign_rhs2 (def)))
2044 return;
2046 cond = gimple_assign_rhs1 (def);
2047 if (!ipa_is_ssa_with_stmt_def (cond))
2048 return;
2050 def = SSA_NAME_DEF_STMT (cond);
2052 if (is_gimple_assign (def)
2053 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2055 cond = gimple_assign_rhs1 (def);
2056 if (!ipa_is_ssa_with_stmt_def (cond))
2057 return;
2058 def = SSA_NAME_DEF_STMT (cond);
2061 tree rec2;
2062 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2063 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2064 == ptrmemfunc_vbit_in_delta),
2065 NULL);
2066 if (rec != rec2)
2067 return;
2069 index = ipa_get_param_decl_index (info, rec);
2070 if (index >= 0
2071 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2073 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2074 cs->indirect_info->offset = offset;
2075 cs->indirect_info->agg_contents = 1;
2076 cs->indirect_info->member_ptr = 1;
2079 return;
2082 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2083 object referenced in the expression is a formal parameter of the caller
2084 FBI->node (described by FBI->info), create a call note for the
2085 statement. */
2087 static void
2088 ipa_analyze_virtual_call_uses (struct func_body_info *fbi,
2089 gcall *call, tree target)
2091 tree obj = OBJ_TYPE_REF_OBJECT (target);
2092 int index;
2093 HOST_WIDE_INT anc_offset;
2095 if (!flag_devirtualize)
2096 return;
2098 if (TREE_CODE (obj) != SSA_NAME)
2099 return;
2101 struct ipa_node_params *info = fbi->info;
2102 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2104 struct ipa_jump_func jfunc;
2105 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2106 return;
2108 anc_offset = 0;
2109 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2110 gcc_assert (index >= 0);
2111 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2112 call, &jfunc))
2113 return;
2115 else
2117 struct ipa_jump_func jfunc;
2118 gimple stmt = SSA_NAME_DEF_STMT (obj);
2119 tree expr;
2121 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2122 if (!expr)
2123 return;
2124 index = ipa_get_param_decl_index (info,
2125 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2126 gcc_assert (index >= 0);
2127 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2128 call, &jfunc, anc_offset))
2129 return;
2132 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2133 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2134 ii->offset = anc_offset;
2135 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2136 ii->otr_type = obj_type_ref_class (target);
2137 ii->polymorphic = 1;
2140 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2141 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2142 containing intermediate information about each formal parameter. */
2144 static void
2145 ipa_analyze_call_uses (struct func_body_info *fbi, gcall *call)
2147 tree target = gimple_call_fn (call);
2149 if (!target
2150 || (TREE_CODE (target) != SSA_NAME
2151 && !virtual_method_call_p (target)))
2152 return;
2154 struct cgraph_edge *cs = fbi->node->get_edge (call);
2155 /* If we previously turned the call into a direct call, there is
2156 no need to analyze. */
2157 if (cs && !cs->indirect_unknown_callee)
2158 return;
2160 if (cs->indirect_info->polymorphic)
2162 tree instance;
2163 tree target = gimple_call_fn (call);
2164 ipa_polymorphic_call_context context (current_function_decl,
2165 target, call, &instance);
2167 gcc_checking_assert (cs->indirect_info->otr_type
2168 == obj_type_ref_class (target));
2169 gcc_checking_assert (cs->indirect_info->otr_token
2170 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2172 cs->indirect_info->vptr_changed
2173 = !context.get_dynamic_type (instance,
2174 OBJ_TYPE_REF_OBJECT (target),
2175 obj_type_ref_class (target), call);
2176 cs->indirect_info->context = context;
2179 if (TREE_CODE (target) == SSA_NAME)
2180 ipa_analyze_indirect_call_uses (fbi, call, target);
2181 else if (virtual_method_call_p (target))
2182 ipa_analyze_virtual_call_uses (fbi, call, target);
2186 /* Analyze the call statement STMT with respect to formal parameters (described
2187 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2188 formal parameters are called. */
2190 static void
2191 ipa_analyze_stmt_uses (struct func_body_info *fbi, gimple stmt)
2193 if (is_gimple_call (stmt))
2194 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2197 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2198 If OP is a parameter declaration, mark it as used in the info structure
2199 passed in DATA. */
2201 static bool
2202 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2204 struct ipa_node_params *info = (struct ipa_node_params *) data;
2206 op = get_base_address (op);
2207 if (op
2208 && TREE_CODE (op) == PARM_DECL)
2210 int index = ipa_get_param_decl_index (info, op);
2211 gcc_assert (index >= 0);
2212 ipa_set_param_used (info, index, true);
2215 return false;
2218 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2219 the findings in various structures of the associated ipa_node_params
2220 structure, such as parameter flags, notes etc. FBI holds various data about
2221 the function being analyzed. */
2223 static void
2224 ipa_analyze_params_uses_in_bb (struct func_body_info *fbi, basic_block bb)
2226 gimple_stmt_iterator gsi;
2227 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2229 gimple stmt = gsi_stmt (gsi);
2231 if (is_gimple_debug (stmt))
2232 continue;
2234 ipa_analyze_stmt_uses (fbi, stmt);
2235 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2236 visit_ref_for_mod_analysis,
2237 visit_ref_for_mod_analysis,
2238 visit_ref_for_mod_analysis);
2240 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2241 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2242 visit_ref_for_mod_analysis,
2243 visit_ref_for_mod_analysis,
2244 visit_ref_for_mod_analysis);
2247 /* Calculate controlled uses of parameters of NODE. */
2249 static void
2250 ipa_analyze_controlled_uses (struct cgraph_node *node)
2252 struct ipa_node_params *info = IPA_NODE_REF (node);
2254 for (int i = 0; i < ipa_get_param_count (info); i++)
2256 tree parm = ipa_get_param (info, i);
2257 int controlled_uses = 0;
2259 /* For SSA regs see if parameter is used. For non-SSA we compute
2260 the flag during modification analysis. */
2261 if (is_gimple_reg (parm))
2263 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2264 parm);
2265 if (ddef && !has_zero_uses (ddef))
2267 imm_use_iterator imm_iter;
2268 use_operand_p use_p;
2270 ipa_set_param_used (info, i, true);
2271 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2272 if (!is_gimple_call (USE_STMT (use_p)))
2274 if (!is_gimple_debug (USE_STMT (use_p)))
2276 controlled_uses = IPA_UNDESCRIBED_USE;
2277 break;
2280 else
2281 controlled_uses++;
2283 else
2284 controlled_uses = 0;
2286 else
2287 controlled_uses = IPA_UNDESCRIBED_USE;
2288 ipa_set_controlled_uses (info, i, controlled_uses);
2292 /* Free stuff in BI. */
2294 static void
2295 free_ipa_bb_info (struct ipa_bb_info *bi)
2297 bi->cg_edges.release ();
2298 bi->param_aa_statuses.release ();
2301 /* Dominator walker driving the analysis. */
2303 class analysis_dom_walker : public dom_walker
2305 public:
2306 analysis_dom_walker (struct func_body_info *fbi)
2307 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2309 virtual void before_dom_children (basic_block);
2311 private:
2312 struct func_body_info *m_fbi;
2315 void
2316 analysis_dom_walker::before_dom_children (basic_block bb)
2318 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2319 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2322 /* Initialize the array describing properties of of formal parameters
2323 of NODE, analyze their uses and compute jump functions associated
2324 with actual arguments of calls from within NODE. */
2326 void
2327 ipa_analyze_node (struct cgraph_node *node)
2329 struct func_body_info fbi;
2330 struct ipa_node_params *info;
2332 ipa_check_create_node_params ();
2333 ipa_check_create_edge_args ();
2334 info = IPA_NODE_REF (node);
2336 if (info->analysis_done)
2337 return;
2338 info->analysis_done = 1;
2340 if (ipa_func_spec_opts_forbid_analysis_p (node))
2342 for (int i = 0; i < ipa_get_param_count (info); i++)
2344 ipa_set_param_used (info, i, true);
2345 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2347 return;
2350 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2351 push_cfun (func);
2352 calculate_dominance_info (CDI_DOMINATORS);
2353 ipa_initialize_node_params (node);
2354 ipa_analyze_controlled_uses (node);
2356 fbi.node = node;
2357 fbi.info = IPA_NODE_REF (node);
2358 fbi.bb_infos = vNULL;
2359 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2360 fbi.param_count = ipa_get_param_count (info);
2361 fbi.aa_walked = 0;
2363 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2365 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2366 bi->cg_edges.safe_push (cs);
2369 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2371 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2372 bi->cg_edges.safe_push (cs);
2375 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2377 int i;
2378 struct ipa_bb_info *bi;
2379 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
2380 free_ipa_bb_info (bi);
2381 fbi.bb_infos.release ();
2382 free_dominance_info (CDI_DOMINATORS);
2383 pop_cfun ();
2386 /* Update the jump functions associated with call graph edge E when the call
2387 graph edge CS is being inlined, assuming that E->caller is already (possibly
2388 indirectly) inlined into CS->callee and that E has not been inlined. */
2390 static void
2391 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2392 struct cgraph_edge *e)
2394 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2395 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2396 int count = ipa_get_cs_argument_count (args);
2397 int i;
2399 for (i = 0; i < count; i++)
2401 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2402 struct ipa_polymorphic_call_context *dst_ctx
2403 = ipa_get_ith_polymorhic_call_context (args, i);
2405 if (dst->type == IPA_JF_ANCESTOR)
2407 struct ipa_jump_func *src;
2408 int dst_fid = dst->value.ancestor.formal_id;
2409 struct ipa_polymorphic_call_context *src_ctx
2410 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2412 /* Variable number of arguments can cause havoc if we try to access
2413 one that does not exist in the inlined edge. So make sure we
2414 don't. */
2415 if (dst_fid >= ipa_get_cs_argument_count (top))
2417 dst->type = IPA_JF_UNKNOWN;
2418 continue;
2421 src = ipa_get_ith_jump_func (top, dst_fid);
2423 if (src_ctx && !src_ctx->useless_p ())
2425 struct ipa_polymorphic_call_context ctx = *src_ctx;
2427 /* TODO: Make type preserved safe WRT contexts. */
2428 if (!ipa_get_jf_ancestor_type_preserved (dst))
2429 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2430 ctx.offset_by (dst->value.ancestor.offset);
2431 if (!ctx.useless_p ())
2433 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2434 count);
2435 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2437 dst_ctx->combine_with (ctx);
2440 if (src->agg.items
2441 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2443 struct ipa_agg_jf_item *item;
2444 int j;
2446 /* Currently we do not produce clobber aggregate jump functions,
2447 replace with merging when we do. */
2448 gcc_assert (!dst->agg.items);
2450 dst->agg.items = vec_safe_copy (src->agg.items);
2451 dst->agg.by_ref = src->agg.by_ref;
2452 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2453 item->offset -= dst->value.ancestor.offset;
2456 if (src->type == IPA_JF_PASS_THROUGH
2457 && src->value.pass_through.operation == NOP_EXPR)
2459 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2460 dst->value.ancestor.agg_preserved &=
2461 src->value.pass_through.agg_preserved;
2463 else if (src->type == IPA_JF_ANCESTOR)
2465 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2466 dst->value.ancestor.offset += src->value.ancestor.offset;
2467 dst->value.ancestor.agg_preserved &=
2468 src->value.ancestor.agg_preserved;
2470 else
2471 dst->type = IPA_JF_UNKNOWN;
2473 else if (dst->type == IPA_JF_PASS_THROUGH)
2475 struct ipa_jump_func *src;
2476 /* We must check range due to calls with variable number of arguments
2477 and we cannot combine jump functions with operations. */
2478 if (dst->value.pass_through.operation == NOP_EXPR
2479 && (dst->value.pass_through.formal_id
2480 < ipa_get_cs_argument_count (top)))
2482 int dst_fid = dst->value.pass_through.formal_id;
2483 src = ipa_get_ith_jump_func (top, dst_fid);
2484 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2485 struct ipa_polymorphic_call_context *src_ctx
2486 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2488 if (src_ctx && !src_ctx->useless_p ())
2490 struct ipa_polymorphic_call_context ctx = *src_ctx;
2492 /* TODO: Make type preserved safe WRT contexts. */
2493 if (!ipa_get_jf_pass_through_type_preserved (dst))
2494 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2495 if (!ctx.useless_p ())
2497 if (!dst_ctx)
2499 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2500 count);
2501 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2503 dst_ctx->combine_with (ctx);
2506 switch (src->type)
2508 case IPA_JF_UNKNOWN:
2509 dst->type = IPA_JF_UNKNOWN;
2510 break;
2511 case IPA_JF_CONST:
2512 ipa_set_jf_cst_copy (dst, src);
2513 break;
2515 case IPA_JF_PASS_THROUGH:
2517 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2518 enum tree_code operation;
2519 operation = ipa_get_jf_pass_through_operation (src);
2521 if (operation == NOP_EXPR)
2523 bool agg_p;
2524 agg_p = dst_agg_p
2525 && ipa_get_jf_pass_through_agg_preserved (src);
2526 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2528 else
2530 tree operand = ipa_get_jf_pass_through_operand (src);
2531 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2532 operation);
2534 break;
2536 case IPA_JF_ANCESTOR:
2538 bool agg_p;
2539 agg_p = dst_agg_p
2540 && ipa_get_jf_ancestor_agg_preserved (src);
2541 ipa_set_ancestor_jf (dst,
2542 ipa_get_jf_ancestor_offset (src),
2543 ipa_get_jf_ancestor_formal_id (src),
2544 agg_p);
2545 break;
2547 default:
2548 gcc_unreachable ();
2551 if (src->agg.items
2552 && (dst_agg_p || !src->agg.by_ref))
2554 /* Currently we do not produce clobber aggregate jump
2555 functions, replace with merging when we do. */
2556 gcc_assert (!dst->agg.items);
2558 dst->agg.by_ref = src->agg.by_ref;
2559 dst->agg.items = vec_safe_copy (src->agg.items);
2562 else
2563 dst->type = IPA_JF_UNKNOWN;
2568 /* If TARGET is an addr_expr of a function declaration, make it the
2569 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2570 Otherwise, return NULL. */
2572 struct cgraph_edge *
2573 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2574 bool speculative)
2576 struct cgraph_node *callee;
2577 struct inline_edge_summary *es = inline_edge_summary (ie);
2578 bool unreachable = false;
2580 if (TREE_CODE (target) == ADDR_EXPR)
2581 target = TREE_OPERAND (target, 0);
2582 if (TREE_CODE (target) != FUNCTION_DECL)
2584 target = canonicalize_constructor_val (target, NULL);
2585 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2587 if (ie->indirect_info->member_ptr)
2588 /* Member pointer call that goes through a VMT lookup. */
2589 return NULL;
2591 if (dump_enabled_p ())
2593 location_t loc = gimple_location_safe (ie->call_stmt);
2594 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2595 "discovered direct call to non-function in %s/%i, "
2596 "making it __builtin_unreachable\n",
2597 ie->caller->name (), ie->caller->order);
2600 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2601 callee = cgraph_node::get_create (target);
2602 unreachable = true;
2604 else
2605 callee = cgraph_node::get (target);
2607 else
2608 callee = cgraph_node::get (target);
2610 /* Because may-edges are not explicitely represented and vtable may be external,
2611 we may create the first reference to the object in the unit. */
2612 if (!callee || callee->global.inlined_to)
2615 /* We are better to ensure we can refer to it.
2616 In the case of static functions we are out of luck, since we already
2617 removed its body. In the case of public functions we may or may
2618 not introduce the reference. */
2619 if (!canonicalize_constructor_val (target, NULL)
2620 || !TREE_PUBLIC (target))
2622 if (dump_file)
2623 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2624 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2625 xstrdup (ie->caller->name ()),
2626 ie->caller->order,
2627 xstrdup (ie->callee->name ()),
2628 ie->callee->order);
2629 return NULL;
2631 callee = cgraph_node::get_create (target);
2634 /* If the edge is already speculated. */
2635 if (speculative && ie->speculative)
2637 struct cgraph_edge *e2;
2638 struct ipa_ref *ref;
2639 ie->speculative_call_info (e2, ie, ref);
2640 if (e2->callee->ultimate_alias_target ()
2641 != callee->ultimate_alias_target ())
2643 if (dump_file)
2644 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2645 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2646 xstrdup (ie->caller->name ()),
2647 ie->caller->order,
2648 xstrdup (callee->name ()),
2649 callee->order,
2650 xstrdup (e2->callee->name ()),
2651 e2->callee->order);
2653 else
2655 if (dump_file)
2656 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2657 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2658 xstrdup (ie->caller->name ()),
2659 ie->caller->order,
2660 xstrdup (callee->name ()),
2661 callee->order);
2663 return NULL;
2666 if (!dbg_cnt (devirt))
2667 return NULL;
2669 ipa_check_create_node_params ();
2671 /* We can not make edges to inline clones. It is bug that someone removed
2672 the cgraph node too early. */
2673 gcc_assert (!callee->global.inlined_to);
2675 if (dump_file && !unreachable)
2677 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2678 "(%s/%i -> %s/%i), for stmt ",
2679 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2680 speculative ? "speculative" : "known",
2681 xstrdup (ie->caller->name ()),
2682 ie->caller->order,
2683 xstrdup (callee->name ()),
2684 callee->order);
2685 if (ie->call_stmt)
2686 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2687 else
2688 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2690 if (dump_enabled_p ())
2692 location_t loc = gimple_location_safe (ie->call_stmt);
2694 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2695 "converting indirect call in %s to direct call to %s\n",
2696 ie->caller->name (), callee->name ());
2698 if (!speculative)
2699 ie = ie->make_direct (callee);
2700 else
2702 if (!callee->can_be_discarded_p ())
2704 cgraph_node *alias;
2705 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2706 if (alias)
2707 callee = alias;
2709 ie = ie->make_speculative
2710 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2712 es = inline_edge_summary (ie);
2713 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2714 - eni_size_weights.call_cost);
2715 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2716 - eni_time_weights.call_cost);
2718 return ie;
2721 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2722 return NULL if there is not any. BY_REF specifies whether the value has to
2723 be passed by reference or by value. */
2725 tree
2726 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2727 HOST_WIDE_INT offset, bool by_ref)
2729 struct ipa_agg_jf_item *item;
2730 int i;
2732 if (by_ref != agg->by_ref)
2733 return NULL;
2735 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2736 if (item->offset == offset)
2738 /* Currently we do not have clobber values, return NULL for them once
2739 we do. */
2740 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2741 return item->value;
2743 return NULL;
2746 /* Remove a reference to SYMBOL from the list of references of a node given by
2747 reference description RDESC. Return true if the reference has been
2748 successfully found and removed. */
2750 static bool
2751 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2753 struct ipa_ref *to_del;
2754 struct cgraph_edge *origin;
2756 origin = rdesc->cs;
2757 if (!origin)
2758 return false;
2759 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
2760 origin->lto_stmt_uid);
2761 if (!to_del)
2762 return false;
2764 to_del->remove_reference ();
2765 if (dump_file)
2766 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2767 xstrdup (origin->caller->name ()),
2768 origin->caller->order, xstrdup (symbol->name ()));
2769 return true;
2772 /* If JFUNC has a reference description with refcount different from
2773 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2774 NULL. JFUNC must be a constant jump function. */
2776 static struct ipa_cst_ref_desc *
2777 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2779 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2780 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2781 return rdesc;
2782 else
2783 return NULL;
2786 /* If the value of constant jump function JFUNC is an address of a function
2787 declaration, return the associated call graph node. Otherwise return
2788 NULL. */
2790 static cgraph_node *
2791 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2793 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2794 tree cst = ipa_get_jf_constant (jfunc);
2795 if (TREE_CODE (cst) != ADDR_EXPR
2796 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2797 return NULL;
2799 return cgraph_node::get (TREE_OPERAND (cst, 0));
2803 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2804 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2805 the edge specified in the rdesc. Return false if either the symbol or the
2806 reference could not be found, otherwise return true. */
2808 static bool
2809 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2811 struct ipa_cst_ref_desc *rdesc;
2812 if (jfunc->type == IPA_JF_CONST
2813 && (rdesc = jfunc_rdesc_usable (jfunc))
2814 && --rdesc->refcount == 0)
2816 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
2817 if (!symbol)
2818 return false;
2820 return remove_described_reference (symbol, rdesc);
2822 return true;
2825 /* Try to find a destination for indirect edge IE that corresponds to a simple
2826 call or a call of a member function pointer and where the destination is a
2827 pointer formal parameter described by jump function JFUNC. If it can be
2828 determined, return the newly direct edge, otherwise return NULL.
2829 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2831 static struct cgraph_edge *
2832 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2833 struct ipa_jump_func *jfunc,
2834 struct ipa_node_params *new_root_info)
2836 struct cgraph_edge *cs;
2837 tree target;
2838 bool agg_contents = ie->indirect_info->agg_contents;
2840 if (ie->indirect_info->agg_contents)
2841 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2842 ie->indirect_info->offset,
2843 ie->indirect_info->by_ref);
2844 else
2845 target = ipa_value_from_jfunc (new_root_info, jfunc);
2846 if (!target)
2847 return NULL;
2848 cs = ipa_make_edge_direct_to_target (ie, target);
2850 if (cs && !agg_contents)
2852 bool ok;
2853 gcc_checking_assert (cs->callee
2854 && (cs != ie
2855 || jfunc->type != IPA_JF_CONST
2856 || !cgraph_node_for_jfunc (jfunc)
2857 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2858 ok = try_decrement_rdesc_refcount (jfunc);
2859 gcc_checking_assert (ok);
2862 return cs;
2865 /* Return the target to be used in cases of impossible devirtualization. IE
2866 and target (the latter can be NULL) are dumped when dumping is enabled. */
2868 tree
2869 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
2871 if (dump_file)
2873 if (target)
2874 fprintf (dump_file,
2875 "Type inconsistent devirtualization: %s/%i->%s\n",
2876 ie->caller->name (), ie->caller->order,
2877 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
2878 else
2879 fprintf (dump_file,
2880 "No devirtualization target in %s/%i\n",
2881 ie->caller->name (), ie->caller->order);
2883 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2884 cgraph_node::get_create (new_target);
2885 return new_target;
2888 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2889 call based on a formal parameter which is described by jump function JFUNC
2890 and if it can be determined, make it direct and return the direct edge.
2891 Otherwise, return NULL. CTX describes the polymorphic context that the
2892 parameter the call is based on brings along with it. */
2894 static struct cgraph_edge *
2895 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2896 struct ipa_jump_func *jfunc,
2897 struct ipa_polymorphic_call_context ctx)
2899 tree target = NULL;
2900 bool speculative = false;
2902 if (!flag_devirtualize)
2903 return NULL;
2905 gcc_assert (!ie->indirect_info->by_ref);
2907 /* Try to do lookup via known virtual table pointer value. */
2908 if (!ie->indirect_info->vptr_changed || flag_devirtualize_speculatively)
2910 tree vtable;
2911 unsigned HOST_WIDE_INT offset;
2912 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
2913 ie->indirect_info->offset,
2914 true);
2915 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
2917 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2918 vtable, offset);
2919 if (t)
2921 if ((TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
2922 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
2923 || !possible_polymorphic_call_target_p
2924 (ie, cgraph_node::get (t)))
2926 /* Do not speculate builtin_unreachable, it is stpid! */
2927 if (!ie->indirect_info->vptr_changed)
2928 target = ipa_impossible_devirt_target (ie, target);
2930 else
2932 target = t;
2933 speculative = ie->indirect_info->vptr_changed;
2939 ipa_polymorphic_call_context ie_context (ie);
2940 vec <cgraph_node *>targets;
2941 bool final;
2943 ctx.offset_by (ie->indirect_info->offset);
2944 if (ie->indirect_info->vptr_changed)
2945 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
2946 ie->indirect_info->otr_type);
2947 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
2948 targets = possible_polymorphic_call_targets
2949 (ie->indirect_info->otr_type,
2950 ie->indirect_info->otr_token,
2951 ctx, &final);
2952 if (final && targets.length () <= 1)
2954 if (targets.length () == 1)
2955 target = targets[0]->decl;
2956 else
2957 target = ipa_impossible_devirt_target (ie, NULL_TREE);
2959 else if (!target && flag_devirtualize_speculatively
2960 && !ie->speculative && ie->maybe_hot_p ())
2962 cgraph_node *n;
2963 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
2964 ie->indirect_info->otr_token,
2965 ie->indirect_info->context);
2966 if (n)
2968 target = n->decl;
2969 speculative = true;
2973 if (target)
2975 if (!possible_polymorphic_call_target_p
2976 (ie, cgraph_node::get_create (target)))
2978 if (speculative)
2979 return NULL;
2980 target = ipa_impossible_devirt_target (ie, target);
2982 return ipa_make_edge_direct_to_target (ie, target, speculative);
2984 else
2985 return NULL;
2988 /* Update the param called notes associated with NODE when CS is being inlined,
2989 assuming NODE is (potentially indirectly) inlined into CS->callee.
2990 Moreover, if the callee is discovered to be constant, create a new cgraph
2991 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2992 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2994 static bool
2995 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2996 struct cgraph_node *node,
2997 vec<cgraph_edge *> *new_edges)
2999 struct ipa_edge_args *top;
3000 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3001 struct ipa_node_params *new_root_info;
3002 bool res = false;
3004 ipa_check_create_edge_args ();
3005 top = IPA_EDGE_REF (cs);
3006 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3007 ? cs->caller->global.inlined_to
3008 : cs->caller);
3010 for (ie = node->indirect_calls; ie; ie = next_ie)
3012 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3013 struct ipa_jump_func *jfunc;
3014 int param_index;
3016 next_ie = ie->next_callee;
3018 if (ici->param_index == -1)
3019 continue;
3021 /* We must check range due to calls with variable number of arguments: */
3022 if (ici->param_index >= ipa_get_cs_argument_count (top))
3024 ici->param_index = -1;
3025 continue;
3028 param_index = ici->param_index;
3029 jfunc = ipa_get_ith_jump_func (top, param_index);
3031 if (!flag_indirect_inlining)
3032 new_direct_edge = NULL;
3033 else if (ici->polymorphic)
3035 ipa_polymorphic_call_context ctx;
3036 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3037 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3039 else
3040 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3041 new_root_info);
3042 /* If speculation was removed, then we need to do nothing. */
3043 if (new_direct_edge && new_direct_edge != ie)
3045 new_direct_edge->indirect_inlining_edge = 1;
3046 top = IPA_EDGE_REF (cs);
3047 res = true;
3049 else if (new_direct_edge)
3051 new_direct_edge->indirect_inlining_edge = 1;
3052 if (new_direct_edge->call_stmt)
3053 new_direct_edge->call_stmt_cannot_inline_p
3054 = !gimple_check_call_matching_types (
3055 new_direct_edge->call_stmt,
3056 new_direct_edge->callee->decl, false);
3057 if (new_edges)
3059 new_edges->safe_push (new_direct_edge);
3060 res = true;
3062 top = IPA_EDGE_REF (cs);
3064 else if (jfunc->type == IPA_JF_PASS_THROUGH
3065 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3067 if ((ici->agg_contents
3068 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
3069 || (ici->polymorphic
3070 && !ipa_get_jf_pass_through_type_preserved (jfunc)))
3071 ici->param_index = -1;
3072 else
3073 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3075 else if (jfunc->type == IPA_JF_ANCESTOR)
3077 if ((ici->agg_contents
3078 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
3079 || (ici->polymorphic
3080 && !ipa_get_jf_ancestor_type_preserved (jfunc)))
3081 ici->param_index = -1;
3082 else
3084 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3085 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3088 else
3089 /* Either we can find a destination for this edge now or never. */
3090 ici->param_index = -1;
3093 return res;
3096 /* Recursively traverse subtree of NODE (including node) made of inlined
3097 cgraph_edges when CS has been inlined and invoke
3098 update_indirect_edges_after_inlining on all nodes and
3099 update_jump_functions_after_inlining on all non-inlined edges that lead out
3100 of this subtree. Newly discovered indirect edges will be added to
3101 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3102 created. */
3104 static bool
3105 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3106 struct cgraph_node *node,
3107 vec<cgraph_edge *> *new_edges)
3109 struct cgraph_edge *e;
3110 bool res;
3112 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3114 for (e = node->callees; e; e = e->next_callee)
3115 if (!e->inline_failed)
3116 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3117 else
3118 update_jump_functions_after_inlining (cs, e);
3119 for (e = node->indirect_calls; e; e = e->next_callee)
3120 update_jump_functions_after_inlining (cs, e);
3122 return res;
3125 /* Combine two controlled uses counts as done during inlining. */
3127 static int
3128 combine_controlled_uses_counters (int c, int d)
3130 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3131 return IPA_UNDESCRIBED_USE;
3132 else
3133 return c + d - 1;
3136 /* Propagate number of controlled users from CS->caleee to the new root of the
3137 tree of inlined nodes. */
3139 static void
3140 propagate_controlled_uses (struct cgraph_edge *cs)
3142 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3143 struct cgraph_node *new_root = cs->caller->global.inlined_to
3144 ? cs->caller->global.inlined_to : cs->caller;
3145 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3146 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3147 int count, i;
3149 count = MIN (ipa_get_cs_argument_count (args),
3150 ipa_get_param_count (old_root_info));
3151 for (i = 0; i < count; i++)
3153 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3154 struct ipa_cst_ref_desc *rdesc;
3156 if (jf->type == IPA_JF_PASS_THROUGH)
3158 int src_idx, c, d;
3159 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3160 c = ipa_get_controlled_uses (new_root_info, src_idx);
3161 d = ipa_get_controlled_uses (old_root_info, i);
3163 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3164 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3165 c = combine_controlled_uses_counters (c, d);
3166 ipa_set_controlled_uses (new_root_info, src_idx, c);
3167 if (c == 0 && new_root_info->ipcp_orig_node)
3169 struct cgraph_node *n;
3170 struct ipa_ref *ref;
3171 tree t = new_root_info->known_csts[src_idx];
3173 if (t && TREE_CODE (t) == ADDR_EXPR
3174 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3175 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3176 && (ref = new_root->find_reference (n, NULL, 0)))
3178 if (dump_file)
3179 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3180 "reference from %s/%i to %s/%i.\n",
3181 xstrdup (new_root->name ()),
3182 new_root->order,
3183 xstrdup (n->name ()), n->order);
3184 ref->remove_reference ();
3188 else if (jf->type == IPA_JF_CONST
3189 && (rdesc = jfunc_rdesc_usable (jf)))
3191 int d = ipa_get_controlled_uses (old_root_info, i);
3192 int c = rdesc->refcount;
3193 rdesc->refcount = combine_controlled_uses_counters (c, d);
3194 if (rdesc->refcount == 0)
3196 tree cst = ipa_get_jf_constant (jf);
3197 struct cgraph_node *n;
3198 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3199 && TREE_CODE (TREE_OPERAND (cst, 0))
3200 == FUNCTION_DECL);
3201 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3202 if (n)
3204 struct cgraph_node *clone;
3205 bool ok;
3206 ok = remove_described_reference (n, rdesc);
3207 gcc_checking_assert (ok);
3209 clone = cs->caller;
3210 while (clone->global.inlined_to
3211 && clone != rdesc->cs->caller
3212 && IPA_NODE_REF (clone)->ipcp_orig_node)
3214 struct ipa_ref *ref;
3215 ref = clone->find_reference (n, NULL, 0);
3216 if (ref)
3218 if (dump_file)
3219 fprintf (dump_file, "ipa-prop: Removing "
3220 "cloning-created reference "
3221 "from %s/%i to %s/%i.\n",
3222 xstrdup (clone->name ()),
3223 clone->order,
3224 xstrdup (n->name ()),
3225 n->order);
3226 ref->remove_reference ();
3228 clone = clone->callers->caller;
3235 for (i = ipa_get_param_count (old_root_info);
3236 i < ipa_get_cs_argument_count (args);
3237 i++)
3239 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3241 if (jf->type == IPA_JF_CONST)
3243 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3244 if (rdesc)
3245 rdesc->refcount = IPA_UNDESCRIBED_USE;
3247 else if (jf->type == IPA_JF_PASS_THROUGH)
3248 ipa_set_controlled_uses (new_root_info,
3249 jf->value.pass_through.formal_id,
3250 IPA_UNDESCRIBED_USE);
3254 /* Update jump functions and call note functions on inlining the call site CS.
3255 CS is expected to lead to a node already cloned by
3256 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3257 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3258 created. */
3260 bool
3261 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3262 vec<cgraph_edge *> *new_edges)
3264 bool changed;
3265 /* Do nothing if the preparation phase has not been carried out yet
3266 (i.e. during early inlining). */
3267 if (!ipa_node_params_vector.exists ())
3268 return false;
3269 gcc_assert (ipa_edge_args_vector);
3271 propagate_controlled_uses (cs);
3272 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3274 return changed;
3277 /* Frees all dynamically allocated structures that the argument info points
3278 to. */
3280 void
3281 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3283 vec_free (args->jump_functions);
3284 memset (args, 0, sizeof (*args));
3287 /* Free all ipa_edge structures. */
3289 void
3290 ipa_free_all_edge_args (void)
3292 int i;
3293 struct ipa_edge_args *args;
3295 if (!ipa_edge_args_vector)
3296 return;
3298 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3299 ipa_free_edge_args_substructures (args);
3301 vec_free (ipa_edge_args_vector);
3304 /* Frees all dynamically allocated structures that the param info points
3305 to. */
3307 void
3308 ipa_free_node_params_substructures (struct ipa_node_params *info)
3310 info->descriptors.release ();
3311 free (info->lattices);
3312 /* Lattice values and their sources are deallocated with their alocation
3313 pool. */
3314 info->known_csts.release ();
3315 info->known_contexts.release ();
3316 memset (info, 0, sizeof (*info));
3319 /* Free all ipa_node_params structures. */
3321 void
3322 ipa_free_all_node_params (void)
3324 int i;
3325 struct ipa_node_params *info;
3327 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
3328 ipa_free_node_params_substructures (info);
3330 ipa_node_params_vector.release ();
3333 /* Set the aggregate replacements of NODE to be AGGVALS. */
3335 void
3336 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3337 struct ipa_agg_replacement_value *aggvals)
3339 if (vec_safe_length (ipa_node_agg_replacements)
3340 <= (unsigned) symtab->cgraph_max_uid)
3341 vec_safe_grow_cleared (ipa_node_agg_replacements,
3342 symtab->cgraph_max_uid + 1);
3344 (*ipa_node_agg_replacements)[node->uid] = aggvals;
3347 /* Hook that is called by cgraph.c when an edge is removed. */
3349 static void
3350 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3352 struct ipa_edge_args *args;
3354 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3355 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3356 return;
3358 args = IPA_EDGE_REF (cs);
3359 if (args->jump_functions)
3361 struct ipa_jump_func *jf;
3362 int i;
3363 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3365 struct ipa_cst_ref_desc *rdesc;
3366 try_decrement_rdesc_refcount (jf);
3367 if (jf->type == IPA_JF_CONST
3368 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3369 && rdesc->cs == cs)
3370 rdesc->cs = NULL;
3374 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3377 /* Hook that is called by cgraph.c when a node is removed. */
3379 static void
3380 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3382 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3383 if (ipa_node_params_vector.length () > (unsigned)node->uid)
3384 ipa_free_node_params_substructures (IPA_NODE_REF (node));
3385 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3386 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
3389 /* Hook that is called by cgraph.c when an edge is duplicated. */
3391 static void
3392 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3393 __attribute__((unused)) void *data)
3395 struct ipa_edge_args *old_args, *new_args;
3396 unsigned int i;
3398 ipa_check_create_edge_args ();
3400 old_args = IPA_EDGE_REF (src);
3401 new_args = IPA_EDGE_REF (dst);
3403 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3404 if (old_args->polymorphic_call_contexts)
3405 new_args->polymorphic_call_contexts
3406 = vec_safe_copy (old_args->polymorphic_call_contexts);
3408 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3410 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3411 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3413 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3415 if (src_jf->type == IPA_JF_CONST)
3417 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3419 if (!src_rdesc)
3420 dst_jf->value.constant.rdesc = NULL;
3421 else if (src->caller == dst->caller)
3423 struct ipa_ref *ref;
3424 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3425 gcc_checking_assert (n);
3426 ref = src->caller->find_reference (n, src->call_stmt,
3427 src->lto_stmt_uid);
3428 gcc_checking_assert (ref);
3429 dst->caller->clone_reference (ref, ref->stmt);
3431 gcc_checking_assert (ipa_refdesc_pool);
3432 struct ipa_cst_ref_desc *dst_rdesc
3433 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3434 dst_rdesc->cs = dst;
3435 dst_rdesc->refcount = src_rdesc->refcount;
3436 dst_rdesc->next_duplicate = NULL;
3437 dst_jf->value.constant.rdesc = dst_rdesc;
3439 else if (src_rdesc->cs == src)
3441 struct ipa_cst_ref_desc *dst_rdesc;
3442 gcc_checking_assert (ipa_refdesc_pool);
3443 dst_rdesc
3444 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3445 dst_rdesc->cs = dst;
3446 dst_rdesc->refcount = src_rdesc->refcount;
3447 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3448 src_rdesc->next_duplicate = dst_rdesc;
3449 dst_jf->value.constant.rdesc = dst_rdesc;
3451 else
3453 struct ipa_cst_ref_desc *dst_rdesc;
3454 /* This can happen during inlining, when a JFUNC can refer to a
3455 reference taken in a function up in the tree of inline clones.
3456 We need to find the duplicate that refers to our tree of
3457 inline clones. */
3459 gcc_assert (dst->caller->global.inlined_to);
3460 for (dst_rdesc = src_rdesc->next_duplicate;
3461 dst_rdesc;
3462 dst_rdesc = dst_rdesc->next_duplicate)
3464 struct cgraph_node *top;
3465 top = dst_rdesc->cs->caller->global.inlined_to
3466 ? dst_rdesc->cs->caller->global.inlined_to
3467 : dst_rdesc->cs->caller;
3468 if (dst->caller->global.inlined_to == top)
3469 break;
3471 gcc_assert (dst_rdesc);
3472 dst_jf->value.constant.rdesc = dst_rdesc;
3475 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3476 && src->caller == dst->caller)
3478 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3479 ? dst->caller->global.inlined_to : dst->caller;
3480 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3481 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3483 int c = ipa_get_controlled_uses (root_info, idx);
3484 if (c != IPA_UNDESCRIBED_USE)
3486 c++;
3487 ipa_set_controlled_uses (root_info, idx, c);
3493 /* Hook that is called by cgraph.c when a node is duplicated. */
3495 static void
3496 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3497 ATTRIBUTE_UNUSED void *data)
3499 struct ipa_node_params *old_info, *new_info;
3500 struct ipa_agg_replacement_value *old_av, *new_av;
3502 ipa_check_create_node_params ();
3503 old_info = IPA_NODE_REF (src);
3504 new_info = IPA_NODE_REF (dst);
3506 new_info->descriptors = old_info->descriptors.copy ();
3507 new_info->lattices = NULL;
3508 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3510 new_info->analysis_done = old_info->analysis_done;
3511 new_info->node_enqueued = old_info->node_enqueued;
3513 old_av = ipa_get_agg_replacements_for_node (src);
3514 if (!old_av)
3515 return;
3517 new_av = NULL;
3518 while (old_av)
3520 struct ipa_agg_replacement_value *v;
3522 v = ggc_alloc<ipa_agg_replacement_value> ();
3523 memcpy (v, old_av, sizeof (*v));
3524 v->next = new_av;
3525 new_av = v;
3526 old_av = old_av->next;
3528 ipa_set_node_agg_value_chain (dst, new_av);
3532 /* Analyze newly added function into callgraph. */
3534 static void
3535 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3537 if (node->has_gimple_body_p ())
3538 ipa_analyze_node (node);
3541 /* Register our cgraph hooks if they are not already there. */
3543 void
3544 ipa_register_cgraph_hooks (void)
3546 if (!edge_removal_hook_holder)
3547 edge_removal_hook_holder =
3548 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3549 if (!node_removal_hook_holder)
3550 node_removal_hook_holder =
3551 symtab->add_cgraph_removal_hook (&ipa_node_removal_hook, NULL);
3552 if (!edge_duplication_hook_holder)
3553 edge_duplication_hook_holder =
3554 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3555 if (!node_duplication_hook_holder)
3556 node_duplication_hook_holder =
3557 symtab->add_cgraph_duplication_hook (&ipa_node_duplication_hook, NULL);
3558 function_insertion_hook_holder =
3559 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3562 /* Unregister our cgraph hooks if they are not already there. */
3564 static void
3565 ipa_unregister_cgraph_hooks (void)
3567 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
3568 edge_removal_hook_holder = NULL;
3569 symtab->remove_cgraph_removal_hook (node_removal_hook_holder);
3570 node_removal_hook_holder = NULL;
3571 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
3572 edge_duplication_hook_holder = NULL;
3573 symtab->remove_cgraph_duplication_hook (node_duplication_hook_holder);
3574 node_duplication_hook_holder = NULL;
3575 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3576 function_insertion_hook_holder = NULL;
3579 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3580 longer needed after ipa-cp. */
3582 void
3583 ipa_free_all_structures_after_ipa_cp (void)
3585 if (!optimize)
3587 ipa_free_all_edge_args ();
3588 ipa_free_all_node_params ();
3589 free_alloc_pool (ipcp_sources_pool);
3590 free_alloc_pool (ipcp_cst_values_pool);
3591 free_alloc_pool (ipcp_poly_ctx_values_pool);
3592 free_alloc_pool (ipcp_agg_lattice_pool);
3593 ipa_unregister_cgraph_hooks ();
3594 if (ipa_refdesc_pool)
3595 free_alloc_pool (ipa_refdesc_pool);
3599 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3600 longer needed after indirect inlining. */
3602 void
3603 ipa_free_all_structures_after_iinln (void)
3605 ipa_free_all_edge_args ();
3606 ipa_free_all_node_params ();
3607 ipa_unregister_cgraph_hooks ();
3608 if (ipcp_sources_pool)
3609 free_alloc_pool (ipcp_sources_pool);
3610 if (ipcp_cst_values_pool)
3611 free_alloc_pool (ipcp_cst_values_pool);
3612 if (ipcp_poly_ctx_values_pool)
3613 free_alloc_pool (ipcp_poly_ctx_values_pool);
3614 if (ipcp_agg_lattice_pool)
3615 free_alloc_pool (ipcp_agg_lattice_pool);
3616 if (ipa_refdesc_pool)
3617 free_alloc_pool (ipa_refdesc_pool);
3620 /* Print ipa_tree_map data structures of all functions in the
3621 callgraph to F. */
3623 void
3624 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3626 int i, count;
3627 struct ipa_node_params *info;
3629 if (!node->definition)
3630 return;
3631 info = IPA_NODE_REF (node);
3632 fprintf (f, " function %s/%i parameter descriptors:\n",
3633 node->name (), node->order);
3634 count = ipa_get_param_count (info);
3635 for (i = 0; i < count; i++)
3637 int c;
3639 fprintf (f, " ");
3640 ipa_dump_param (f, info, i);
3641 if (ipa_is_param_used (info, i))
3642 fprintf (f, " used");
3643 c = ipa_get_controlled_uses (info, i);
3644 if (c == IPA_UNDESCRIBED_USE)
3645 fprintf (f, " undescribed_use");
3646 else
3647 fprintf (f, " controlled_uses=%i", c);
3648 fprintf (f, "\n");
3652 /* Print ipa_tree_map data structures of all functions in the
3653 callgraph to F. */
3655 void
3656 ipa_print_all_params (FILE * f)
3658 struct cgraph_node *node;
3660 fprintf (f, "\nFunction parameters:\n");
3661 FOR_EACH_FUNCTION (node)
3662 ipa_print_node_params (f, node);
3665 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3667 vec<tree>
3668 ipa_get_vector_of_formal_parms (tree fndecl)
3670 vec<tree> args;
3671 int count;
3672 tree parm;
3674 gcc_assert (!flag_wpa);
3675 count = count_formal_params (fndecl);
3676 args.create (count);
3677 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3678 args.quick_push (parm);
3680 return args;
3683 /* Return a heap allocated vector containing types of formal parameters of
3684 function type FNTYPE. */
3686 vec<tree>
3687 ipa_get_vector_of_formal_parm_types (tree fntype)
3689 vec<tree> types;
3690 int count = 0;
3691 tree t;
3693 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3694 count++;
3696 types.create (count);
3697 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3698 types.quick_push (TREE_VALUE (t));
3700 return types;
3703 /* Modify the function declaration FNDECL and its type according to the plan in
3704 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3705 to reflect the actual parameters being modified which are determined by the
3706 base_index field. */
3708 void
3709 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3711 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3712 tree orig_type = TREE_TYPE (fndecl);
3713 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3715 /* The following test is an ugly hack, some functions simply don't have any
3716 arguments in their type. This is probably a bug but well... */
3717 bool care_for_types = (old_arg_types != NULL_TREE);
3718 bool last_parm_void;
3719 vec<tree> otypes;
3720 if (care_for_types)
3722 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3723 == void_type_node);
3724 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3725 if (last_parm_void)
3726 gcc_assert (oparms.length () + 1 == otypes.length ());
3727 else
3728 gcc_assert (oparms.length () == otypes.length ());
3730 else
3732 last_parm_void = false;
3733 otypes.create (0);
3736 int len = adjustments.length ();
3737 tree *link = &DECL_ARGUMENTS (fndecl);
3738 tree new_arg_types = NULL;
3739 for (int i = 0; i < len; i++)
3741 struct ipa_parm_adjustment *adj;
3742 gcc_assert (link);
3744 adj = &adjustments[i];
3745 tree parm;
3746 if (adj->op == IPA_PARM_OP_NEW)
3747 parm = NULL;
3748 else
3749 parm = oparms[adj->base_index];
3750 adj->base = parm;
3752 if (adj->op == IPA_PARM_OP_COPY)
3754 if (care_for_types)
3755 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3756 new_arg_types);
3757 *link = parm;
3758 link = &DECL_CHAIN (parm);
3760 else if (adj->op != IPA_PARM_OP_REMOVE)
3762 tree new_parm;
3763 tree ptype;
3765 if (adj->by_ref)
3766 ptype = build_pointer_type (adj->type);
3767 else
3769 ptype = adj->type;
3770 if (is_gimple_reg_type (ptype))
3772 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3773 if (TYPE_ALIGN (ptype) < malign)
3774 ptype = build_aligned_type (ptype, malign);
3778 if (care_for_types)
3779 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3781 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3782 ptype);
3783 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3784 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3785 DECL_ARTIFICIAL (new_parm) = 1;
3786 DECL_ARG_TYPE (new_parm) = ptype;
3787 DECL_CONTEXT (new_parm) = fndecl;
3788 TREE_USED (new_parm) = 1;
3789 DECL_IGNORED_P (new_parm) = 1;
3790 layout_decl (new_parm, 0);
3792 if (adj->op == IPA_PARM_OP_NEW)
3793 adj->base = NULL;
3794 else
3795 adj->base = parm;
3796 adj->new_decl = new_parm;
3798 *link = new_parm;
3799 link = &DECL_CHAIN (new_parm);
3803 *link = NULL_TREE;
3805 tree new_reversed = NULL;
3806 if (care_for_types)
3808 new_reversed = nreverse (new_arg_types);
3809 if (last_parm_void)
3811 if (new_reversed)
3812 TREE_CHAIN (new_arg_types) = void_list_node;
3813 else
3814 new_reversed = void_list_node;
3818 /* Use copy_node to preserve as much as possible from original type
3819 (debug info, attribute lists etc.)
3820 Exception is METHOD_TYPEs must have THIS argument.
3821 When we are asked to remove it, we need to build new FUNCTION_TYPE
3822 instead. */
3823 tree new_type = NULL;
3824 if (TREE_CODE (orig_type) != METHOD_TYPE
3825 || (adjustments[0].op == IPA_PARM_OP_COPY
3826 && adjustments[0].base_index == 0))
3828 new_type = build_distinct_type_copy (orig_type);
3829 TYPE_ARG_TYPES (new_type) = new_reversed;
3831 else
3833 new_type
3834 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3835 new_reversed));
3836 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3837 DECL_VINDEX (fndecl) = NULL_TREE;
3840 /* When signature changes, we need to clear builtin info. */
3841 if (DECL_BUILT_IN (fndecl))
3843 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3844 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3847 TREE_TYPE (fndecl) = new_type;
3848 DECL_VIRTUAL_P (fndecl) = 0;
3849 DECL_LANG_SPECIFIC (fndecl) = NULL;
3850 otypes.release ();
3851 oparms.release ();
3854 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3855 If this is a directly recursive call, CS must be NULL. Otherwise it must
3856 contain the corresponding call graph edge. */
3858 void
3859 ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt,
3860 ipa_parm_adjustment_vec adjustments)
3862 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
3863 vec<tree> vargs;
3864 vec<tree, va_gc> **debug_args = NULL;
3865 gcall *new_stmt;
3866 gimple_stmt_iterator gsi, prev_gsi;
3867 tree callee_decl;
3868 int i, len;
3870 len = adjustments.length ();
3871 vargs.create (len);
3872 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3873 current_node->remove_stmt_references (stmt);
3875 gsi = gsi_for_stmt (stmt);
3876 prev_gsi = gsi;
3877 gsi_prev (&prev_gsi);
3878 for (i = 0; i < len; i++)
3880 struct ipa_parm_adjustment *adj;
3882 adj = &adjustments[i];
3884 if (adj->op == IPA_PARM_OP_COPY)
3886 tree arg = gimple_call_arg (stmt, adj->base_index);
3888 vargs.quick_push (arg);
3890 else if (adj->op != IPA_PARM_OP_REMOVE)
3892 tree expr, base, off;
3893 location_t loc;
3894 unsigned int deref_align = 0;
3895 bool deref_base = false;
3897 /* We create a new parameter out of the value of the old one, we can
3898 do the following kind of transformations:
3900 - A scalar passed by reference is converted to a scalar passed by
3901 value. (adj->by_ref is false and the type of the original
3902 actual argument is a pointer to a scalar).
3904 - A part of an aggregate is passed instead of the whole aggregate.
3905 The part can be passed either by value or by reference, this is
3906 determined by value of adj->by_ref. Moreover, the code below
3907 handles both situations when the original aggregate is passed by
3908 value (its type is not a pointer) and when it is passed by
3909 reference (it is a pointer to an aggregate).
3911 When the new argument is passed by reference (adj->by_ref is true)
3912 it must be a part of an aggregate and therefore we form it by
3913 simply taking the address of a reference inside the original
3914 aggregate. */
3916 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3917 base = gimple_call_arg (stmt, adj->base_index);
3918 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3919 : EXPR_LOCATION (base);
3921 if (TREE_CODE (base) != ADDR_EXPR
3922 && POINTER_TYPE_P (TREE_TYPE (base)))
3923 off = build_int_cst (adj->alias_ptr_type,
3924 adj->offset / BITS_PER_UNIT);
3925 else
3927 HOST_WIDE_INT base_offset;
3928 tree prev_base;
3929 bool addrof;
3931 if (TREE_CODE (base) == ADDR_EXPR)
3933 base = TREE_OPERAND (base, 0);
3934 addrof = true;
3936 else
3937 addrof = false;
3938 prev_base = base;
3939 base = get_addr_base_and_unit_offset (base, &base_offset);
3940 /* Aggregate arguments can have non-invariant addresses. */
3941 if (!base)
3943 base = build_fold_addr_expr (prev_base);
3944 off = build_int_cst (adj->alias_ptr_type,
3945 adj->offset / BITS_PER_UNIT);
3947 else if (TREE_CODE (base) == MEM_REF)
3949 if (!addrof)
3951 deref_base = true;
3952 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3954 off = build_int_cst (adj->alias_ptr_type,
3955 base_offset
3956 + adj->offset / BITS_PER_UNIT);
3957 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
3958 off);
3959 base = TREE_OPERAND (base, 0);
3961 else
3963 off = build_int_cst (adj->alias_ptr_type,
3964 base_offset
3965 + adj->offset / BITS_PER_UNIT);
3966 base = build_fold_addr_expr (base);
3970 if (!adj->by_ref)
3972 tree type = adj->type;
3973 unsigned int align;
3974 unsigned HOST_WIDE_INT misalign;
3976 if (deref_base)
3978 align = deref_align;
3979 misalign = 0;
3981 else
3983 get_pointer_alignment_1 (base, &align, &misalign);
3984 if (TYPE_ALIGN (type) > align)
3985 align = TYPE_ALIGN (type);
3987 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
3988 * BITS_PER_UNIT);
3989 misalign = misalign & (align - 1);
3990 if (misalign != 0)
3991 align = (misalign & -misalign);
3992 if (align < TYPE_ALIGN (type))
3993 type = build_aligned_type (type, align);
3994 base = force_gimple_operand_gsi (&gsi, base,
3995 true, NULL, true, GSI_SAME_STMT);
3996 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
3997 /* If expr is not a valid gimple call argument emit
3998 a load into a temporary. */
3999 if (is_gimple_reg_type (TREE_TYPE (expr)))
4001 gimple tem = gimple_build_assign (NULL_TREE, expr);
4002 if (gimple_in_ssa_p (cfun))
4004 gimple_set_vuse (tem, gimple_vuse (stmt));
4005 expr = make_ssa_name (TREE_TYPE (expr), tem);
4007 else
4008 expr = create_tmp_reg (TREE_TYPE (expr), NULL);
4009 gimple_assign_set_lhs (tem, expr);
4010 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4013 else
4015 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4016 expr = build_fold_addr_expr (expr);
4017 expr = force_gimple_operand_gsi (&gsi, expr,
4018 true, NULL, true, GSI_SAME_STMT);
4020 vargs.quick_push (expr);
4022 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4024 unsigned int ix;
4025 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4026 gimple def_temp;
4028 arg = gimple_call_arg (stmt, adj->base_index);
4029 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4031 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4032 continue;
4033 arg = fold_convert_loc (gimple_location (stmt),
4034 TREE_TYPE (origin), arg);
4036 if (debug_args == NULL)
4037 debug_args = decl_debug_args_insert (callee_decl);
4038 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4039 if (ddecl == origin)
4041 ddecl = (**debug_args)[ix + 1];
4042 break;
4044 if (ddecl == NULL)
4046 ddecl = make_node (DEBUG_EXPR_DECL);
4047 DECL_ARTIFICIAL (ddecl) = 1;
4048 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4049 DECL_MODE (ddecl) = DECL_MODE (origin);
4051 vec_safe_push (*debug_args, origin);
4052 vec_safe_push (*debug_args, ddecl);
4054 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4055 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4059 if (dump_file && (dump_flags & TDF_DETAILS))
4061 fprintf (dump_file, "replacing stmt:");
4062 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4065 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4066 vargs.release ();
4067 if (gimple_call_lhs (stmt))
4068 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4070 gimple_set_block (new_stmt, gimple_block (stmt));
4071 if (gimple_has_location (stmt))
4072 gimple_set_location (new_stmt, gimple_location (stmt));
4073 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4074 gimple_call_copy_flags (new_stmt, stmt);
4075 if (gimple_in_ssa_p (cfun))
4077 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4078 if (gimple_vdef (stmt))
4080 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4081 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4085 if (dump_file && (dump_flags & TDF_DETAILS))
4087 fprintf (dump_file, "with stmt:");
4088 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4089 fprintf (dump_file, "\n");
4091 gsi_replace (&gsi, new_stmt, true);
4092 if (cs)
4093 cs->set_call_stmt (new_stmt);
4096 current_node->record_stmt_references (gsi_stmt (gsi));
4097 gsi_prev (&gsi);
4099 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4102 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4103 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4104 specifies whether the function should care about type incompatibility the
4105 current and new expressions. If it is false, the function will leave
4106 incompatibility issues to the caller. Return true iff the expression
4107 was modified. */
4109 bool
4110 ipa_modify_expr (tree *expr, bool convert,
4111 ipa_parm_adjustment_vec adjustments)
4113 struct ipa_parm_adjustment *cand
4114 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4115 if (!cand)
4116 return false;
4118 tree src;
4119 if (cand->by_ref)
4120 src = build_simple_mem_ref (cand->new_decl);
4121 else
4122 src = cand->new_decl;
4124 if (dump_file && (dump_flags & TDF_DETAILS))
4126 fprintf (dump_file, "About to replace expr ");
4127 print_generic_expr (dump_file, *expr, 0);
4128 fprintf (dump_file, " with ");
4129 print_generic_expr (dump_file, src, 0);
4130 fprintf (dump_file, "\n");
4133 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4135 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4136 *expr = vce;
4138 else
4139 *expr = src;
4140 return true;
4143 /* If T is an SSA_NAME, return NULL if it is not a default def or
4144 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4145 the base variable is always returned, regardless if it is a default
4146 def. Return T if it is not an SSA_NAME. */
4148 static tree
4149 get_ssa_base_param (tree t, bool ignore_default_def)
4151 if (TREE_CODE (t) == SSA_NAME)
4153 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4154 return SSA_NAME_VAR (t);
4155 else
4156 return NULL_TREE;
4158 return t;
4161 /* Given an expression, return an adjustment entry specifying the
4162 transformation to be done on EXPR. If no suitable adjustment entry
4163 was found, returns NULL.
4165 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4166 default def, otherwise bail on them.
4168 If CONVERT is non-NULL, this function will set *CONVERT if the
4169 expression provided is a component reference. ADJUSTMENTS is the
4170 adjustments vector. */
4172 ipa_parm_adjustment *
4173 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4174 ipa_parm_adjustment_vec adjustments,
4175 bool ignore_default_def)
4177 if (TREE_CODE (**expr) == BIT_FIELD_REF
4178 || TREE_CODE (**expr) == IMAGPART_EXPR
4179 || TREE_CODE (**expr) == REALPART_EXPR)
4181 *expr = &TREE_OPERAND (**expr, 0);
4182 if (convert)
4183 *convert = true;
4186 HOST_WIDE_INT offset, size, max_size;
4187 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
4188 if (!base || size == -1 || max_size == -1)
4189 return NULL;
4191 if (TREE_CODE (base) == MEM_REF)
4193 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4194 base = TREE_OPERAND (base, 0);
4197 base = get_ssa_base_param (base, ignore_default_def);
4198 if (!base || TREE_CODE (base) != PARM_DECL)
4199 return NULL;
4201 struct ipa_parm_adjustment *cand = NULL;
4202 unsigned int len = adjustments.length ();
4203 for (unsigned i = 0; i < len; i++)
4205 struct ipa_parm_adjustment *adj = &adjustments[i];
4207 if (adj->base == base
4208 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4210 cand = adj;
4211 break;
4215 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4216 return NULL;
4217 return cand;
4220 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4222 static bool
4223 index_in_adjustments_multiple_times_p (int base_index,
4224 ipa_parm_adjustment_vec adjustments)
4226 int i, len = adjustments.length ();
4227 bool one = false;
4229 for (i = 0; i < len; i++)
4231 struct ipa_parm_adjustment *adj;
4232 adj = &adjustments[i];
4234 if (adj->base_index == base_index)
4236 if (one)
4237 return true;
4238 else
4239 one = true;
4242 return false;
4246 /* Return adjustments that should have the same effect on function parameters
4247 and call arguments as if they were first changed according to adjustments in
4248 INNER and then by adjustments in OUTER. */
4250 ipa_parm_adjustment_vec
4251 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4252 ipa_parm_adjustment_vec outer)
4254 int i, outlen = outer.length ();
4255 int inlen = inner.length ();
4256 int removals = 0;
4257 ipa_parm_adjustment_vec adjustments, tmp;
4259 tmp.create (inlen);
4260 for (i = 0; i < inlen; i++)
4262 struct ipa_parm_adjustment *n;
4263 n = &inner[i];
4265 if (n->op == IPA_PARM_OP_REMOVE)
4266 removals++;
4267 else
4269 /* FIXME: Handling of new arguments are not implemented yet. */
4270 gcc_assert (n->op != IPA_PARM_OP_NEW);
4271 tmp.quick_push (*n);
4275 adjustments.create (outlen + removals);
4276 for (i = 0; i < outlen; i++)
4278 struct ipa_parm_adjustment r;
4279 struct ipa_parm_adjustment *out = &outer[i];
4280 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4282 memset (&r, 0, sizeof (r));
4283 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4284 if (out->op == IPA_PARM_OP_REMOVE)
4286 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4288 r.op = IPA_PARM_OP_REMOVE;
4289 adjustments.quick_push (r);
4291 continue;
4293 else
4295 /* FIXME: Handling of new arguments are not implemented yet. */
4296 gcc_assert (out->op != IPA_PARM_OP_NEW);
4299 r.base_index = in->base_index;
4300 r.type = out->type;
4302 /* FIXME: Create nonlocal value too. */
4304 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4305 r.op = IPA_PARM_OP_COPY;
4306 else if (in->op == IPA_PARM_OP_COPY)
4307 r.offset = out->offset;
4308 else if (out->op == IPA_PARM_OP_COPY)
4309 r.offset = in->offset;
4310 else
4311 r.offset = in->offset + out->offset;
4312 adjustments.quick_push (r);
4315 for (i = 0; i < inlen; i++)
4317 struct ipa_parm_adjustment *n = &inner[i];
4319 if (n->op == IPA_PARM_OP_REMOVE)
4320 adjustments.quick_push (*n);
4323 tmp.release ();
4324 return adjustments;
4327 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4328 friendly way, assuming they are meant to be applied to FNDECL. */
4330 void
4331 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4332 tree fndecl)
4334 int i, len = adjustments.length ();
4335 bool first = true;
4336 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4338 fprintf (file, "IPA param adjustments: ");
4339 for (i = 0; i < len; i++)
4341 struct ipa_parm_adjustment *adj;
4342 adj = &adjustments[i];
4344 if (!first)
4345 fprintf (file, " ");
4346 else
4347 first = false;
4349 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4350 print_generic_expr (file, parms[adj->base_index], 0);
4351 if (adj->base)
4353 fprintf (file, ", base: ");
4354 print_generic_expr (file, adj->base, 0);
4356 if (adj->new_decl)
4358 fprintf (file, ", new_decl: ");
4359 print_generic_expr (file, adj->new_decl, 0);
4361 if (adj->new_ssa_base)
4363 fprintf (file, ", new_ssa_base: ");
4364 print_generic_expr (file, adj->new_ssa_base, 0);
4367 if (adj->op == IPA_PARM_OP_COPY)
4368 fprintf (file, ", copy_param");
4369 else if (adj->op == IPA_PARM_OP_REMOVE)
4370 fprintf (file, ", remove_param");
4371 else
4372 fprintf (file, ", offset %li", (long) adj->offset);
4373 if (adj->by_ref)
4374 fprintf (file, ", by_ref");
4375 print_node_brief (file, ", type: ", adj->type, 0);
4376 fprintf (file, "\n");
4378 parms.release ();
4381 /* Dump the AV linked list. */
4383 void
4384 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4386 bool comma = false;
4387 fprintf (f, " Aggregate replacements:");
4388 for (; av; av = av->next)
4390 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4391 av->index, av->offset);
4392 print_generic_expr (f, av->value, 0);
4393 comma = true;
4395 fprintf (f, "\n");
4398 /* Stream out jump function JUMP_FUNC to OB. */
4400 static void
4401 ipa_write_jump_function (struct output_block *ob,
4402 struct ipa_jump_func *jump_func)
4404 struct ipa_agg_jf_item *item;
4405 struct bitpack_d bp;
4406 int i, count;
4408 streamer_write_uhwi (ob, jump_func->type);
4409 switch (jump_func->type)
4411 case IPA_JF_UNKNOWN:
4412 break;
4413 case IPA_JF_CONST:
4414 gcc_assert (
4415 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4416 stream_write_tree (ob, jump_func->value.constant.value, true);
4417 break;
4418 case IPA_JF_PASS_THROUGH:
4419 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4420 if (jump_func->value.pass_through.operation == NOP_EXPR)
4422 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4423 bp = bitpack_create (ob->main_stream);
4424 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4425 streamer_write_bitpack (&bp);
4427 else
4429 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4430 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4432 break;
4433 case IPA_JF_ANCESTOR:
4434 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4435 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4436 bp = bitpack_create (ob->main_stream);
4437 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4438 streamer_write_bitpack (&bp);
4439 break;
4442 count = vec_safe_length (jump_func->agg.items);
4443 streamer_write_uhwi (ob, count);
4444 if (count)
4446 bp = bitpack_create (ob->main_stream);
4447 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4448 streamer_write_bitpack (&bp);
4451 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4453 streamer_write_uhwi (ob, item->offset);
4454 stream_write_tree (ob, item->value, true);
4458 /* Read in jump function JUMP_FUNC from IB. */
4460 static void
4461 ipa_read_jump_function (struct lto_input_block *ib,
4462 struct ipa_jump_func *jump_func,
4463 struct cgraph_edge *cs,
4464 struct data_in *data_in)
4466 enum jump_func_type jftype;
4467 enum tree_code operation;
4468 int i, count;
4470 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4471 switch (jftype)
4473 case IPA_JF_UNKNOWN:
4474 jump_func->type = IPA_JF_UNKNOWN;
4475 break;
4476 case IPA_JF_CONST:
4477 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4478 break;
4479 case IPA_JF_PASS_THROUGH:
4480 operation = (enum tree_code) streamer_read_uhwi (ib);
4481 if (operation == NOP_EXPR)
4483 int formal_id = streamer_read_uhwi (ib);
4484 struct bitpack_d bp = streamer_read_bitpack (ib);
4485 bool agg_preserved = bp_unpack_value (&bp, 1);
4486 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4488 else
4490 tree operand = stream_read_tree (ib, data_in);
4491 int formal_id = streamer_read_uhwi (ib);
4492 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4493 operation);
4495 break;
4496 case IPA_JF_ANCESTOR:
4498 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4499 int formal_id = streamer_read_uhwi (ib);
4500 struct bitpack_d bp = streamer_read_bitpack (ib);
4501 bool agg_preserved = bp_unpack_value (&bp, 1);
4502 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4503 break;
4507 count = streamer_read_uhwi (ib);
4508 vec_alloc (jump_func->agg.items, count);
4509 if (count)
4511 struct bitpack_d bp = streamer_read_bitpack (ib);
4512 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4514 for (i = 0; i < count; i++)
4516 struct ipa_agg_jf_item item;
4517 item.offset = streamer_read_uhwi (ib);
4518 item.value = stream_read_tree (ib, data_in);
4519 jump_func->agg.items->quick_push (item);
4523 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4524 relevant to indirect inlining to OB. */
4526 static void
4527 ipa_write_indirect_edge_info (struct output_block *ob,
4528 struct cgraph_edge *cs)
4530 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4531 struct bitpack_d bp;
4533 streamer_write_hwi (ob, ii->param_index);
4534 bp = bitpack_create (ob->main_stream);
4535 bp_pack_value (&bp, ii->polymorphic, 1);
4536 bp_pack_value (&bp, ii->agg_contents, 1);
4537 bp_pack_value (&bp, ii->member_ptr, 1);
4538 bp_pack_value (&bp, ii->by_ref, 1);
4539 bp_pack_value (&bp, ii->vptr_changed, 1);
4540 streamer_write_bitpack (&bp);
4541 if (ii->agg_contents || ii->polymorphic)
4542 streamer_write_hwi (ob, ii->offset);
4543 else
4544 gcc_assert (ii->offset == 0);
4546 if (ii->polymorphic)
4548 streamer_write_hwi (ob, ii->otr_token);
4549 stream_write_tree (ob, ii->otr_type, true);
4550 ii->context.stream_out (ob);
4554 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4555 relevant to indirect inlining from IB. */
4557 static void
4558 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4559 struct data_in *data_in,
4560 struct cgraph_edge *cs)
4562 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4563 struct bitpack_d bp;
4565 ii->param_index = (int) streamer_read_hwi (ib);
4566 bp = streamer_read_bitpack (ib);
4567 ii->polymorphic = bp_unpack_value (&bp, 1);
4568 ii->agg_contents = bp_unpack_value (&bp, 1);
4569 ii->member_ptr = bp_unpack_value (&bp, 1);
4570 ii->by_ref = bp_unpack_value (&bp, 1);
4571 ii->vptr_changed = bp_unpack_value (&bp, 1);
4572 if (ii->agg_contents || ii->polymorphic)
4573 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4574 else
4575 ii->offset = 0;
4576 if (ii->polymorphic)
4578 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4579 ii->otr_type = stream_read_tree (ib, data_in);
4580 ii->context.stream_in (ib, data_in);
4584 /* Stream out NODE info to OB. */
4586 static void
4587 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4589 int node_ref;
4590 lto_symtab_encoder_t encoder;
4591 struct ipa_node_params *info = IPA_NODE_REF (node);
4592 int j;
4593 struct cgraph_edge *e;
4594 struct bitpack_d bp;
4596 encoder = ob->decl_state->symtab_node_encoder;
4597 node_ref = lto_symtab_encoder_encode (encoder, node);
4598 streamer_write_uhwi (ob, node_ref);
4600 streamer_write_uhwi (ob, ipa_get_param_count (info));
4601 for (j = 0; j < ipa_get_param_count (info); j++)
4602 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4603 bp = bitpack_create (ob->main_stream);
4604 gcc_assert (info->analysis_done
4605 || ipa_get_param_count (info) == 0);
4606 gcc_assert (!info->node_enqueued);
4607 gcc_assert (!info->ipcp_orig_node);
4608 for (j = 0; j < ipa_get_param_count (info); j++)
4609 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4610 streamer_write_bitpack (&bp);
4611 for (j = 0; j < ipa_get_param_count (info); j++)
4612 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4613 for (e = node->callees; e; e = e->next_callee)
4615 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4617 streamer_write_uhwi (ob,
4618 ipa_get_cs_argument_count (args) * 2
4619 + (args->polymorphic_call_contexts != NULL));
4620 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4622 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4623 if (args->polymorphic_call_contexts != NULL)
4624 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4627 for (e = node->indirect_calls; e; e = e->next_callee)
4629 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4631 streamer_write_uhwi (ob,
4632 ipa_get_cs_argument_count (args) * 2
4633 + (args->polymorphic_call_contexts != NULL));
4634 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4636 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4637 if (args->polymorphic_call_contexts != NULL)
4638 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4640 ipa_write_indirect_edge_info (ob, e);
4644 /* Stream in NODE info from IB. */
4646 static void
4647 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4648 struct data_in *data_in)
4650 struct ipa_node_params *info = IPA_NODE_REF (node);
4651 int k;
4652 struct cgraph_edge *e;
4653 struct bitpack_d bp;
4655 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4657 for (k = 0; k < ipa_get_param_count (info); k++)
4658 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4660 bp = streamer_read_bitpack (ib);
4661 if (ipa_get_param_count (info) != 0)
4662 info->analysis_done = true;
4663 info->node_enqueued = false;
4664 for (k = 0; k < ipa_get_param_count (info); k++)
4665 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4666 for (k = 0; k < ipa_get_param_count (info); k++)
4667 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4668 for (e = node->callees; e; e = e->next_callee)
4670 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4671 int count = streamer_read_uhwi (ib);
4672 bool contexts_computed = count & 1;
4673 count /= 2;
4675 if (!count)
4676 continue;
4677 vec_safe_grow_cleared (args->jump_functions, count);
4678 if (contexts_computed)
4679 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4681 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4683 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4684 data_in);
4685 if (contexts_computed)
4686 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4689 for (e = node->indirect_calls; e; e = e->next_callee)
4691 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4692 int count = streamer_read_uhwi (ib);
4693 bool contexts_computed = count & 1;
4694 count /= 2;
4696 if (count)
4698 vec_safe_grow_cleared (args->jump_functions, count);
4699 if (contexts_computed)
4700 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4701 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4703 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4704 data_in);
4705 if (contexts_computed)
4706 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4709 ipa_read_indirect_edge_info (ib, data_in, e);
4713 /* Write jump functions for nodes in SET. */
4715 void
4716 ipa_prop_write_jump_functions (void)
4718 struct cgraph_node *node;
4719 struct output_block *ob;
4720 unsigned int count = 0;
4721 lto_symtab_encoder_iterator lsei;
4722 lto_symtab_encoder_t encoder;
4725 if (!ipa_node_params_vector.exists ())
4726 return;
4728 ob = create_output_block (LTO_section_jump_functions);
4729 encoder = ob->decl_state->symtab_node_encoder;
4730 ob->symbol = NULL;
4731 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4732 lsei_next_function_in_partition (&lsei))
4734 node = lsei_cgraph_node (lsei);
4735 if (node->has_gimple_body_p ()
4736 && IPA_NODE_REF (node) != NULL)
4737 count++;
4740 streamer_write_uhwi (ob, count);
4742 /* Process all of the functions. */
4743 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4744 lsei_next_function_in_partition (&lsei))
4746 node = lsei_cgraph_node (lsei);
4747 if (node->has_gimple_body_p ()
4748 && IPA_NODE_REF (node) != NULL)
4749 ipa_write_node_info (ob, node);
4751 streamer_write_char_stream (ob->main_stream, 0);
4752 produce_asm (ob, NULL);
4753 destroy_output_block (ob);
4756 /* Read section in file FILE_DATA of length LEN with data DATA. */
4758 static void
4759 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4760 size_t len)
4762 const struct lto_function_header *header =
4763 (const struct lto_function_header *) data;
4764 const int cfg_offset = sizeof (struct lto_function_header);
4765 const int main_offset = cfg_offset + header->cfg_size;
4766 const int string_offset = main_offset + header->main_size;
4767 struct data_in *data_in;
4768 unsigned int i;
4769 unsigned int count;
4771 lto_input_block ib_main ((const char *) data + main_offset,
4772 header->main_size);
4774 data_in =
4775 lto_data_in_create (file_data, (const char *) data + string_offset,
4776 header->string_size, vNULL);
4777 count = streamer_read_uhwi (&ib_main);
4779 for (i = 0; i < count; i++)
4781 unsigned int index;
4782 struct cgraph_node *node;
4783 lto_symtab_encoder_t encoder;
4785 index = streamer_read_uhwi (&ib_main);
4786 encoder = file_data->symtab_node_encoder;
4787 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4788 index));
4789 gcc_assert (node->definition);
4790 ipa_read_node_info (&ib_main, node, data_in);
4792 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4793 len);
4794 lto_data_in_delete (data_in);
4797 /* Read ipcp jump functions. */
4799 void
4800 ipa_prop_read_jump_functions (void)
4802 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4803 struct lto_file_decl_data *file_data;
4804 unsigned int j = 0;
4806 ipa_check_create_node_params ();
4807 ipa_check_create_edge_args ();
4808 ipa_register_cgraph_hooks ();
4810 while ((file_data = file_data_vec[j++]))
4812 size_t len;
4813 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4815 if (data)
4816 ipa_prop_read_section (file_data, data, len);
4820 /* After merging units, we can get mismatch in argument counts.
4821 Also decl merging might've rendered parameter lists obsolete.
4822 Also compute called_with_variable_arg info. */
4824 void
4825 ipa_update_after_lto_read (void)
4827 ipa_check_create_node_params ();
4828 ipa_check_create_edge_args ();
4831 void
4832 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4834 int node_ref;
4835 unsigned int count = 0;
4836 lto_symtab_encoder_t encoder;
4837 struct ipa_agg_replacement_value *aggvals, *av;
4839 aggvals = ipa_get_agg_replacements_for_node (node);
4840 encoder = ob->decl_state->symtab_node_encoder;
4841 node_ref = lto_symtab_encoder_encode (encoder, node);
4842 streamer_write_uhwi (ob, node_ref);
4844 for (av = aggvals; av; av = av->next)
4845 count++;
4846 streamer_write_uhwi (ob, count);
4848 for (av = aggvals; av; av = av->next)
4850 struct bitpack_d bp;
4852 streamer_write_uhwi (ob, av->offset);
4853 streamer_write_uhwi (ob, av->index);
4854 stream_write_tree (ob, av->value, true);
4856 bp = bitpack_create (ob->main_stream);
4857 bp_pack_value (&bp, av->by_ref, 1);
4858 streamer_write_bitpack (&bp);
4862 /* Stream in the aggregate value replacement chain for NODE from IB. */
4864 static void
4865 read_agg_replacement_chain (struct lto_input_block *ib,
4866 struct cgraph_node *node,
4867 struct data_in *data_in)
4869 struct ipa_agg_replacement_value *aggvals = NULL;
4870 unsigned int count, i;
4872 count = streamer_read_uhwi (ib);
4873 for (i = 0; i <count; i++)
4875 struct ipa_agg_replacement_value *av;
4876 struct bitpack_d bp;
4878 av = ggc_alloc<ipa_agg_replacement_value> ();
4879 av->offset = streamer_read_uhwi (ib);
4880 av->index = streamer_read_uhwi (ib);
4881 av->value = stream_read_tree (ib, data_in);
4882 bp = streamer_read_bitpack (ib);
4883 av->by_ref = bp_unpack_value (&bp, 1);
4884 av->next = aggvals;
4885 aggvals = av;
4887 ipa_set_node_agg_value_chain (node, aggvals);
4890 /* Write all aggregate replacement for nodes in set. */
4892 void
4893 ipa_prop_write_all_agg_replacement (void)
4895 struct cgraph_node *node;
4896 struct output_block *ob;
4897 unsigned int count = 0;
4898 lto_symtab_encoder_iterator lsei;
4899 lto_symtab_encoder_t encoder;
4901 if (!ipa_node_agg_replacements)
4902 return;
4904 ob = create_output_block (LTO_section_ipcp_transform);
4905 encoder = ob->decl_state->symtab_node_encoder;
4906 ob->symbol = NULL;
4907 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4908 lsei_next_function_in_partition (&lsei))
4910 node = lsei_cgraph_node (lsei);
4911 if (node->has_gimple_body_p ()
4912 && ipa_get_agg_replacements_for_node (node) != NULL)
4913 count++;
4916 streamer_write_uhwi (ob, count);
4918 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4919 lsei_next_function_in_partition (&lsei))
4921 node = lsei_cgraph_node (lsei);
4922 if (node->has_gimple_body_p ()
4923 && ipa_get_agg_replacements_for_node (node) != NULL)
4924 write_agg_replacement_chain (ob, node);
4926 streamer_write_char_stream (ob->main_stream, 0);
4927 produce_asm (ob, NULL);
4928 destroy_output_block (ob);
4931 /* Read replacements section in file FILE_DATA of length LEN with data
4932 DATA. */
4934 static void
4935 read_replacements_section (struct lto_file_decl_data *file_data,
4936 const char *data,
4937 size_t len)
4939 const struct lto_function_header *header =
4940 (const struct lto_function_header *) data;
4941 const int cfg_offset = sizeof (struct lto_function_header);
4942 const int main_offset = cfg_offset + header->cfg_size;
4943 const int string_offset = main_offset + header->main_size;
4944 struct data_in *data_in;
4945 unsigned int i;
4946 unsigned int count;
4948 lto_input_block ib_main ((const char *) data + main_offset,
4949 header->main_size);
4951 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4952 header->string_size, vNULL);
4953 count = streamer_read_uhwi (&ib_main);
4955 for (i = 0; i < count; i++)
4957 unsigned int index;
4958 struct cgraph_node *node;
4959 lto_symtab_encoder_t encoder;
4961 index = streamer_read_uhwi (&ib_main);
4962 encoder = file_data->symtab_node_encoder;
4963 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4964 index));
4965 gcc_assert (node->definition);
4966 read_agg_replacement_chain (&ib_main, node, data_in);
4968 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4969 len);
4970 lto_data_in_delete (data_in);
4973 /* Read IPA-CP aggregate replacements. */
4975 void
4976 ipa_prop_read_all_agg_replacement (void)
4978 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4979 struct lto_file_decl_data *file_data;
4980 unsigned int j = 0;
4982 while ((file_data = file_data_vec[j++]))
4984 size_t len;
4985 const char *data = lto_get_section_data (file_data,
4986 LTO_section_ipcp_transform,
4987 NULL, &len);
4988 if (data)
4989 read_replacements_section (file_data, data, len);
4993 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4994 NODE. */
4996 static void
4997 adjust_agg_replacement_values (struct cgraph_node *node,
4998 struct ipa_agg_replacement_value *aggval)
5000 struct ipa_agg_replacement_value *v;
5001 int i, c = 0, d = 0, *adj;
5003 if (!node->clone.combined_args_to_skip)
5004 return;
5006 for (v = aggval; v; v = v->next)
5008 gcc_assert (v->index >= 0);
5009 if (c < v->index)
5010 c = v->index;
5012 c++;
5014 adj = XALLOCAVEC (int, c);
5015 for (i = 0; i < c; i++)
5016 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5018 adj[i] = -1;
5019 d++;
5021 else
5022 adj[i] = i - d;
5024 for (v = aggval; v; v = v->next)
5025 v->index = adj[v->index];
5028 /* Dominator walker driving the ipcp modification phase. */
5030 class ipcp_modif_dom_walker : public dom_walker
5032 public:
5033 ipcp_modif_dom_walker (struct func_body_info *fbi,
5034 vec<ipa_param_descriptor> descs,
5035 struct ipa_agg_replacement_value *av,
5036 bool *sc, bool *cc)
5037 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5038 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5040 virtual void before_dom_children (basic_block);
5042 private:
5043 struct func_body_info *m_fbi;
5044 vec<ipa_param_descriptor> m_descriptors;
5045 struct ipa_agg_replacement_value *m_aggval;
5046 bool *m_something_changed, *m_cfg_changed;
5049 void
5050 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5052 gimple_stmt_iterator gsi;
5053 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5055 struct ipa_agg_replacement_value *v;
5056 gimple stmt = gsi_stmt (gsi);
5057 tree rhs, val, t;
5058 HOST_WIDE_INT offset, size;
5059 int index;
5060 bool by_ref, vce;
5062 if (!gimple_assign_load_p (stmt))
5063 continue;
5064 rhs = gimple_assign_rhs1 (stmt);
5065 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5066 continue;
5068 vce = false;
5069 t = rhs;
5070 while (handled_component_p (t))
5072 /* V_C_E can do things like convert an array of integers to one
5073 bigger integer and similar things we do not handle below. */
5074 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5076 vce = true;
5077 break;
5079 t = TREE_OPERAND (t, 0);
5081 if (vce)
5082 continue;
5084 if (!ipa_load_from_parm_agg_1 (m_fbi, m_descriptors, stmt, rhs, &index,
5085 &offset, &size, &by_ref))
5086 continue;
5087 for (v = m_aggval; v; v = v->next)
5088 if (v->index == index
5089 && v->offset == offset)
5090 break;
5091 if (!v
5092 || v->by_ref != by_ref
5093 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5094 continue;
5096 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5097 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5099 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5100 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5101 else if (TYPE_SIZE (TREE_TYPE (rhs))
5102 == TYPE_SIZE (TREE_TYPE (v->value)))
5103 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5104 else
5106 if (dump_file)
5108 fprintf (dump_file, " const ");
5109 print_generic_expr (dump_file, v->value, 0);
5110 fprintf (dump_file, " can't be converted to type of ");
5111 print_generic_expr (dump_file, rhs, 0);
5112 fprintf (dump_file, "\n");
5114 continue;
5117 else
5118 val = v->value;
5120 if (dump_file && (dump_flags & TDF_DETAILS))
5122 fprintf (dump_file, "Modifying stmt:\n ");
5123 print_gimple_stmt (dump_file, stmt, 0, 0);
5125 gimple_assign_set_rhs_from_tree (&gsi, val);
5126 update_stmt (stmt);
5128 if (dump_file && (dump_flags & TDF_DETAILS))
5130 fprintf (dump_file, "into:\n ");
5131 print_gimple_stmt (dump_file, stmt, 0, 0);
5132 fprintf (dump_file, "\n");
5135 *m_something_changed = true;
5136 if (maybe_clean_eh_stmt (stmt)
5137 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5138 *m_cfg_changed = true;
5143 /* IPCP transformation phase doing propagation of aggregate values. */
5145 unsigned int
5146 ipcp_transform_function (struct cgraph_node *node)
5148 vec<ipa_param_descriptor> descriptors = vNULL;
5149 struct func_body_info fbi;
5150 struct ipa_agg_replacement_value *aggval;
5151 int param_count;
5152 bool cfg_changed = false, something_changed = false;
5154 gcc_checking_assert (cfun);
5155 gcc_checking_assert (current_function_decl);
5157 if (dump_file)
5158 fprintf (dump_file, "Modification phase of node %s/%i\n",
5159 node->name (), node->order);
5161 aggval = ipa_get_agg_replacements_for_node (node);
5162 if (!aggval)
5163 return 0;
5164 param_count = count_formal_params (node->decl);
5165 if (param_count == 0)
5166 return 0;
5167 adjust_agg_replacement_values (node, aggval);
5168 if (dump_file)
5169 ipa_dump_agg_replacement_values (dump_file, aggval);
5171 fbi.node = node;
5172 fbi.info = NULL;
5173 fbi.bb_infos = vNULL;
5174 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5175 fbi.param_count = param_count;
5176 fbi.aa_walked = 0;
5178 descriptors.safe_grow_cleared (param_count);
5179 ipa_populate_param_decls (node, descriptors);
5180 calculate_dominance_info (CDI_DOMINATORS);
5181 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5182 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5184 int i;
5185 struct ipa_bb_info *bi;
5186 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5187 free_ipa_bb_info (bi);
5188 fbi.bb_infos.release ();
5189 free_dominance_info (CDI_DOMINATORS);
5190 (*ipa_node_agg_replacements)[node->uid] = NULL;
5191 descriptors.release ();
5193 if (!something_changed)
5194 return 0;
5195 else if (cfg_changed)
5196 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5197 else
5198 return TODO_update_ssa_only_virtuals;