svn merge -r 217500:218679 svn+ssh://gcc.gnu.org/svn/gcc/trunk
[official-gcc.git] / gcc / ipa-prop.c
blob50adefbcde51d708cf58f34d08d0712df6236e38
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "predict.h"
25 #include "vec.h"
26 #include "hashtab.h"
27 #include "hash-set.h"
28 #include "machmode.h"
29 #include "tm.h"
30 #include "hard-reg-set.h"
31 #include "input.h"
32 #include "function.h"
33 #include "dominance.h"
34 #include "cfg.h"
35 #include "basic-block.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-fold.h"
39 #include "tree-eh.h"
40 #include "gimple-expr.h"
41 #include "is-a.h"
42 #include "gimple.h"
43 #include "expr.h"
44 #include "stor-layout.h"
45 #include "print-tree.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "langhooks.h"
51 #include "target.h"
52 #include "hash-map.h"
53 #include "plugin-api.h"
54 #include "ipa-ref.h"
55 #include "cgraph.h"
56 #include "alloc-pool.h"
57 #include "ipa-prop.h"
58 #include "bitmap.h"
59 #include "gimple-ssa.h"
60 #include "tree-cfg.h"
61 #include "tree-phinodes.h"
62 #include "ssa-iterators.h"
63 #include "tree-into-ssa.h"
64 #include "tree-dfa.h"
65 #include "tree-pass.h"
66 #include "tree-inline.h"
67 #include "ipa-inline.h"
68 #include "flags.h"
69 #include "diagnostic.h"
70 #include "gimple-pretty-print.h"
71 #include "lto-streamer.h"
72 #include "data-streamer.h"
73 #include "tree-streamer.h"
74 #include "params.h"
75 #include "ipa-utils.h"
76 #include "stringpool.h"
77 #include "tree-ssanames.h"
78 #include "dbgcnt.h"
79 #include "domwalk.h"
80 #include "builtins.h"
81 #include "calls.h"
83 /* Intermediate information that we get from alias analysis about a particular
84 parameter in a particular basic_block. When a parameter or the memory it
85 references is marked modified, we use that information in all dominatd
86 blocks without cosulting alias analysis oracle. */
88 struct param_aa_status
90 /* Set when this structure contains meaningful information. If not, the
91 structure describing a dominating BB should be used instead. */
92 bool valid;
94 /* Whether we have seen something which might have modified the data in
95 question. PARM is for the parameter itself, REF is for data it points to
96 but using the alias type of individual accesses and PT is the same thing
97 but for computing aggregate pass-through functions using a very inclusive
98 ao_ref. */
99 bool parm_modified, ref_modified, pt_modified;
102 /* Information related to a given BB that used only when looking at function
103 body. */
105 struct ipa_bb_info
107 /* Call graph edges going out of this BB. */
108 vec<cgraph_edge *> cg_edges;
109 /* Alias analysis statuses of each formal parameter at this bb. */
110 vec<param_aa_status> param_aa_statuses;
113 /* Structure with global information that is only used when looking at function
114 body. */
116 struct func_body_info
118 /* The node that is being analyzed. */
119 cgraph_node *node;
121 /* Its info. */
122 struct ipa_node_params *info;
124 /* Information about individual BBs. */
125 vec<ipa_bb_info> bb_infos;
127 /* Number of parameters. */
128 int param_count;
130 /* Number of statements already walked by when analyzing this function. */
131 unsigned int aa_walked;
134 /* Vector where the parameter infos are actually stored. */
135 vec<ipa_node_params> ipa_node_params_vector;
136 /* Vector of IPA-CP transformation data for each clone. */
137 vec<ipcp_transformation_summary, va_gc> *ipcp_transformations;
138 /* Vector where the parameter infos are actually stored. */
139 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
141 /* Holders of ipa cgraph hooks: */
142 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
143 static struct cgraph_node_hook_list *node_removal_hook_holder;
144 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
145 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
146 static struct cgraph_node_hook_list *function_insertion_hook_holder;
148 /* Description of a reference to an IPA constant. */
149 struct ipa_cst_ref_desc
151 /* Edge that corresponds to the statement which took the reference. */
152 struct cgraph_edge *cs;
153 /* Linked list of duplicates created when call graph edges are cloned. */
154 struct ipa_cst_ref_desc *next_duplicate;
155 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
156 if out of control. */
157 int refcount;
160 /* Allocation pool for reference descriptions. */
162 static alloc_pool ipa_refdesc_pool;
164 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
165 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
167 static bool
168 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
170 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
172 if (!fs_opts)
173 return false;
174 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
177 /* Return index of the formal whose tree is PTREE in function which corresponds
178 to INFO. */
180 static int
181 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
183 int i, count;
185 count = descriptors.length ();
186 for (i = 0; i < count; i++)
187 if (descriptors[i].decl == ptree)
188 return i;
190 return -1;
193 /* Return index of the formal whose tree is PTREE in function which corresponds
194 to INFO. */
197 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
199 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
202 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
203 NODE. */
205 static void
206 ipa_populate_param_decls (struct cgraph_node *node,
207 vec<ipa_param_descriptor> &descriptors)
209 tree fndecl;
210 tree fnargs;
211 tree parm;
212 int param_num;
214 fndecl = node->decl;
215 gcc_assert (gimple_has_body_p (fndecl));
216 fnargs = DECL_ARGUMENTS (fndecl);
217 param_num = 0;
218 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
220 descriptors[param_num].decl = parm;
221 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
222 true);
223 param_num++;
227 /* Return how many formal parameters FNDECL has. */
230 count_formal_params (tree fndecl)
232 tree parm;
233 int count = 0;
234 gcc_assert (gimple_has_body_p (fndecl));
236 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
237 count++;
239 return count;
242 /* Return the declaration of Ith formal parameter of the function corresponding
243 to INFO. Note there is no setter function as this array is built just once
244 using ipa_initialize_node_params. */
246 void
247 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
249 fprintf (file, "param #%i", i);
250 if (info->descriptors[i].decl)
252 fprintf (file, " ");
253 print_generic_expr (file, info->descriptors[i].decl, 0);
257 /* Initialize the ipa_node_params structure associated with NODE
258 to hold PARAM_COUNT parameters. */
260 void
261 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
263 struct ipa_node_params *info = IPA_NODE_REF (node);
265 if (!info->descriptors.exists () && param_count)
266 info->descriptors.safe_grow_cleared (param_count);
269 /* Initialize the ipa_node_params structure associated with NODE by counting
270 the function parameters, creating the descriptors and populating their
271 param_decls. */
273 void
274 ipa_initialize_node_params (struct cgraph_node *node)
276 struct ipa_node_params *info = IPA_NODE_REF (node);
278 if (!info->descriptors.exists ())
280 ipa_alloc_node_params (node, count_formal_params (node->decl));
281 ipa_populate_param_decls (node, info->descriptors);
285 /* Print the jump functions associated with call graph edge CS to file F. */
287 static void
288 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
290 int i, count;
292 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
293 for (i = 0; i < count; i++)
295 struct ipa_jump_func *jump_func;
296 enum jump_func_type type;
298 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
299 type = jump_func->type;
301 fprintf (f, " param %d: ", i);
302 if (type == IPA_JF_UNKNOWN)
303 fprintf (f, "UNKNOWN\n");
304 else if (type == IPA_JF_CONST)
306 tree val = jump_func->value.constant.value;
307 fprintf (f, "CONST: ");
308 print_generic_expr (f, val, 0);
309 if (TREE_CODE (val) == ADDR_EXPR
310 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
312 fprintf (f, " -> ");
313 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
316 fprintf (f, "\n");
318 else if (type == IPA_JF_PASS_THROUGH)
320 fprintf (f, "PASS THROUGH: ");
321 fprintf (f, "%d, op %s",
322 jump_func->value.pass_through.formal_id,
323 get_tree_code_name(jump_func->value.pass_through.operation));
324 if (jump_func->value.pass_through.operation != NOP_EXPR)
326 fprintf (f, " ");
327 print_generic_expr (f,
328 jump_func->value.pass_through.operand, 0);
330 if (jump_func->value.pass_through.agg_preserved)
331 fprintf (f, ", agg_preserved");
332 fprintf (f, "\n");
334 else if (type == IPA_JF_ANCESTOR)
336 fprintf (f, "ANCESTOR: ");
337 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC,
338 jump_func->value.ancestor.formal_id,
339 jump_func->value.ancestor.offset);
340 if (jump_func->value.ancestor.agg_preserved)
341 fprintf (f, ", agg_preserved");
342 fprintf (f, "\n");
345 if (jump_func->agg.items)
347 struct ipa_agg_jf_item *item;
348 int j;
350 fprintf (f, " Aggregate passed by %s:\n",
351 jump_func->agg.by_ref ? "reference" : "value");
352 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
354 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
355 item->offset);
356 if (TYPE_P (item->value))
357 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
358 tree_to_uhwi (TYPE_SIZE (item->value)));
359 else
361 fprintf (f, "cst: ");
362 print_generic_expr (f, item->value, 0);
364 fprintf (f, "\n");
368 struct ipa_polymorphic_call_context *ctx
369 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
370 if (ctx && !ctx->useless_p ())
372 fprintf (f, " Context: ");
373 ctx->dump (dump_file);
376 if (jump_func->alignment.known)
378 fprintf (f, " Alignment: %u, misalignment: %u\n",
379 jump_func->alignment.align,
380 jump_func->alignment.misalign);
382 else
383 fprintf (f, " Unknown alignment\n");
388 /* Print the jump functions of all arguments on all call graph edges going from
389 NODE to file F. */
391 void
392 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
394 struct cgraph_edge *cs;
396 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
397 node->order);
398 for (cs = node->callees; cs; cs = cs->next_callee)
400 if (!ipa_edge_args_info_available_for_edge_p (cs))
401 continue;
403 fprintf (f, " callsite %s/%i -> %s/%i : \n",
404 xstrdup_for_dump (node->name ()), node->order,
405 xstrdup_for_dump (cs->callee->name ()),
406 cs->callee->order);
407 ipa_print_node_jump_functions_for_edge (f, cs);
410 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
412 struct cgraph_indirect_call_info *ii;
413 if (!ipa_edge_args_info_available_for_edge_p (cs))
414 continue;
416 ii = cs->indirect_info;
417 if (ii->agg_contents)
418 fprintf (f, " indirect %s callsite, calling param %i, "
419 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
420 ii->member_ptr ? "member ptr" : "aggregate",
421 ii->param_index, ii->offset,
422 ii->by_ref ? "by reference" : "by_value");
423 else
424 fprintf (f, " indirect %s callsite, calling param %i, "
425 "offset " HOST_WIDE_INT_PRINT_DEC,
426 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
427 ii->offset);
429 if (cs->call_stmt)
431 fprintf (f, ", for stmt ");
432 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
434 else
435 fprintf (f, "\n");
436 if (ii->polymorphic)
437 ii->context.dump (f);
438 ipa_print_node_jump_functions_for_edge (f, cs);
442 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
444 void
445 ipa_print_all_jump_functions (FILE *f)
447 struct cgraph_node *node;
449 fprintf (f, "\nJump functions:\n");
450 FOR_EACH_FUNCTION (node)
452 ipa_print_node_jump_functions (f, node);
456 /* Set jfunc to be a know-really nothing jump function. */
458 static void
459 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
461 jfunc->type = IPA_JF_UNKNOWN;
462 jfunc->alignment.known = false;
465 /* Set JFUNC to be a copy of another jmp (to be used by jump function
466 combination code). The two functions will share their rdesc. */
468 static void
469 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
470 struct ipa_jump_func *src)
473 gcc_checking_assert (src->type == IPA_JF_CONST);
474 dst->type = IPA_JF_CONST;
475 dst->value.constant = src->value.constant;
478 /* Set JFUNC to be a constant jmp function. */
480 static void
481 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
482 struct cgraph_edge *cs)
484 constant = unshare_expr (constant);
485 if (constant && EXPR_P (constant))
486 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
487 jfunc->type = IPA_JF_CONST;
488 jfunc->value.constant.value = unshare_expr_without_location (constant);
490 if (TREE_CODE (constant) == ADDR_EXPR
491 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
493 struct ipa_cst_ref_desc *rdesc;
494 if (!ipa_refdesc_pool)
495 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
496 sizeof (struct ipa_cst_ref_desc), 32);
498 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
499 rdesc->cs = cs;
500 rdesc->next_duplicate = NULL;
501 rdesc->refcount = 1;
502 jfunc->value.constant.rdesc = rdesc;
504 else
505 jfunc->value.constant.rdesc = NULL;
508 /* Set JFUNC to be a simple pass-through jump function. */
509 static void
510 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
511 bool agg_preserved)
513 jfunc->type = IPA_JF_PASS_THROUGH;
514 jfunc->value.pass_through.operand = NULL_TREE;
515 jfunc->value.pass_through.formal_id = formal_id;
516 jfunc->value.pass_through.operation = NOP_EXPR;
517 jfunc->value.pass_through.agg_preserved = agg_preserved;
520 /* Set JFUNC to be an arithmetic pass through jump function. */
522 static void
523 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
524 tree operand, enum tree_code operation)
526 jfunc->type = IPA_JF_PASS_THROUGH;
527 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
528 jfunc->value.pass_through.formal_id = formal_id;
529 jfunc->value.pass_through.operation = operation;
530 jfunc->value.pass_through.agg_preserved = false;
533 /* Set JFUNC to be an ancestor jump function. */
535 static void
536 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
537 int formal_id, bool agg_preserved)
539 jfunc->type = IPA_JF_ANCESTOR;
540 jfunc->value.ancestor.formal_id = formal_id;
541 jfunc->value.ancestor.offset = offset;
542 jfunc->value.ancestor.agg_preserved = agg_preserved;
545 /* Get IPA BB information about the given BB. FBI is the context of analyzis
546 of this function body. */
548 static struct ipa_bb_info *
549 ipa_get_bb_info (struct func_body_info *fbi, basic_block bb)
551 gcc_checking_assert (fbi);
552 return &fbi->bb_infos[bb->index];
555 /* Structure to be passed in between detect_type_change and
556 check_stmt_for_type_change. */
558 struct prop_type_change_info
560 /* Offset into the object where there is the virtual method pointer we are
561 looking for. */
562 HOST_WIDE_INT offset;
563 /* The declaration or SSA_NAME pointer of the base that we are checking for
564 type change. */
565 tree object;
566 /* Set to true if dynamic type change has been detected. */
567 bool type_maybe_changed;
570 /* Return true if STMT can modify a virtual method table pointer.
572 This function makes special assumptions about both constructors and
573 destructors which are all the functions that are allowed to alter the VMT
574 pointers. It assumes that destructors begin with assignment into all VMT
575 pointers and that constructors essentially look in the following way:
577 1) The very first thing they do is that they call constructors of ancestor
578 sub-objects that have them.
580 2) Then VMT pointers of this and all its ancestors is set to new values
581 corresponding to the type corresponding to the constructor.
583 3) Only afterwards, other stuff such as constructor of member sub-objects
584 and the code written by the user is run. Only this may include calling
585 virtual functions, directly or indirectly.
587 There is no way to call a constructor of an ancestor sub-object in any
588 other way.
590 This means that we do not have to care whether constructors get the correct
591 type information because they will always change it (in fact, if we define
592 the type to be given by the VMT pointer, it is undefined).
594 The most important fact to derive from the above is that if, for some
595 statement in the section 3, we try to detect whether the dynamic type has
596 changed, we can safely ignore all calls as we examine the function body
597 backwards until we reach statements in section 2 because these calls cannot
598 be ancestor constructors or destructors (if the input is not bogus) and so
599 do not change the dynamic type (this holds true only for automatically
600 allocated objects but at the moment we devirtualize only these). We then
601 must detect that statements in section 2 change the dynamic type and can try
602 to derive the new type. That is enough and we can stop, we will never see
603 the calls into constructors of sub-objects in this code. Therefore we can
604 safely ignore all call statements that we traverse.
607 static bool
608 stmt_may_be_vtbl_ptr_store (gimple stmt)
610 if (is_gimple_call (stmt))
611 return false;
612 if (gimple_clobber_p (stmt))
613 return false;
614 else if (is_gimple_assign (stmt))
616 tree lhs = gimple_assign_lhs (stmt);
618 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
620 if (flag_strict_aliasing
621 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
622 return false;
624 if (TREE_CODE (lhs) == COMPONENT_REF
625 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
626 return false;
627 /* In the future we might want to use get_base_ref_and_offset to find
628 if there is a field corresponding to the offset and if so, proceed
629 almost like if it was a component ref. */
632 return true;
635 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
636 to check whether a particular statement may modify the virtual table
637 pointerIt stores its result into DATA, which points to a
638 prop_type_change_info structure. */
640 static bool
641 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
643 gimple stmt = SSA_NAME_DEF_STMT (vdef);
644 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
646 if (stmt_may_be_vtbl_ptr_store (stmt))
648 tci->type_maybe_changed = true;
649 return true;
651 else
652 return false;
655 /* See if ARG is PARAM_DECl describing instance passed by pointer
656 or reference in FUNCTION. Return false if the dynamic type may change
657 in between beggining of the function until CALL is invoked.
659 Generally functions are not allowed to change type of such instances,
660 but they call destructors. We assume that methods can not destroy the THIS
661 pointer. Also as a special cases, constructor and destructors may change
662 type of the THIS pointer. */
664 static bool
665 param_type_may_change_p (tree function, tree arg, gimple call)
667 /* Pure functions can not do any changes on the dynamic type;
668 that require writting to memory. */
669 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
670 return false;
671 /* We need to check if we are within inlined consturctor
672 or destructor (ideally we would have way to check that the
673 inline cdtor is actually working on ARG, but we don't have
674 easy tie on this, so punt on all non-pure cdtors.
675 We may also record the types of cdtors and once we know type
676 of the instance match them.
678 Also code unification optimizations may merge calls from
679 different blocks making return values unreliable. So
680 do nothing during late optimization. */
681 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
682 return true;
683 if (TREE_CODE (arg) == SSA_NAME
684 && SSA_NAME_IS_DEFAULT_DEF (arg)
685 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
687 /* Normal (non-THIS) argument. */
688 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
689 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
690 /* THIS pointer of an method - here we we want to watch constructors
691 and destructors as those definitely may change the dynamic
692 type. */
693 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
694 && !DECL_CXX_CONSTRUCTOR_P (function)
695 && !DECL_CXX_DESTRUCTOR_P (function)
696 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
698 /* Walk the inline stack and watch out for ctors/dtors. */
699 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
700 block = BLOCK_SUPERCONTEXT (block))
701 if (BLOCK_ABSTRACT_ORIGIN (block)
702 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
704 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
706 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
707 continue;
708 if (TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
709 && (DECL_CXX_CONSTRUCTOR_P (fn)
710 || DECL_CXX_DESTRUCTOR_P (fn)))
711 return true;
713 return false;
716 return true;
719 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
720 callsite CALL) by looking for assignments to its virtual table pointer. If
721 it is, return true and fill in the jump function JFUNC with relevant type
722 information or set it to unknown. ARG is the object itself (not a pointer
723 to it, unless dereferenced). BASE is the base of the memory access as
724 returned by get_ref_base_and_extent, as is the offset.
726 This is helper function for detect_type_change and detect_type_change_ssa
727 that does the heavy work which is usually unnecesary. */
729 static bool
730 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
731 gcall *call, struct ipa_jump_func *jfunc,
732 HOST_WIDE_INT offset)
734 struct prop_type_change_info tci;
735 ao_ref ao;
736 bool entry_reached = false;
738 gcc_checking_assert (DECL_P (arg)
739 || TREE_CODE (arg) == MEM_REF
740 || handled_component_p (arg));
742 comp_type = TYPE_MAIN_VARIANT (comp_type);
744 /* Const calls cannot call virtual methods through VMT and so type changes do
745 not matter. */
746 if (!flag_devirtualize || !gimple_vuse (call)
747 /* Be sure expected_type is polymorphic. */
748 || !comp_type
749 || TREE_CODE (comp_type) != RECORD_TYPE
750 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
751 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
752 return true;
754 ao_ref_init (&ao, arg);
755 ao.base = base;
756 ao.offset = offset;
757 ao.size = POINTER_SIZE;
758 ao.max_size = ao.size;
760 tci.offset = offset;
761 tci.object = get_base_address (arg);
762 tci.type_maybe_changed = false;
764 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
765 &tci, NULL, &entry_reached);
766 if (!tci.type_maybe_changed)
767 return false;
769 ipa_set_jf_unknown (jfunc);
770 return true;
773 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
774 If it is, return true and fill in the jump function JFUNC with relevant type
775 information or set it to unknown. ARG is the object itself (not a pointer
776 to it, unless dereferenced). BASE is the base of the memory access as
777 returned by get_ref_base_and_extent, as is the offset. */
779 static bool
780 detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
781 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
783 if (!flag_devirtualize)
784 return false;
786 if (TREE_CODE (base) == MEM_REF
787 && !param_type_may_change_p (current_function_decl,
788 TREE_OPERAND (base, 0),
789 call))
790 return false;
791 return detect_type_change_from_memory_writes (arg, base, comp_type,
792 call, jfunc, offset);
795 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
796 SSA name (its dereference will become the base and the offset is assumed to
797 be zero). */
799 static bool
800 detect_type_change_ssa (tree arg, tree comp_type,
801 gcall *call, struct ipa_jump_func *jfunc)
803 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
804 if (!flag_devirtualize
805 || !POINTER_TYPE_P (TREE_TYPE (arg)))
806 return false;
808 if (!param_type_may_change_p (current_function_decl, arg, call))
809 return false;
811 arg = build2 (MEM_REF, ptr_type_node, arg,
812 build_int_cst (ptr_type_node, 0));
814 return detect_type_change_from_memory_writes (arg, arg, comp_type,
815 call, jfunc, 0);
818 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
819 boolean variable pointed to by DATA. */
821 static bool
822 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
823 void *data)
825 bool *b = (bool *) data;
826 *b = true;
827 return true;
830 /* Return true if we have already walked so many statements in AA that we
831 should really just start giving up. */
833 static bool
834 aa_overwalked (struct func_body_info *fbi)
836 gcc_checking_assert (fbi);
837 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
840 /* Find the nearest valid aa status for parameter specified by INDEX that
841 dominates BB. */
843 static struct param_aa_status *
844 find_dominating_aa_status (struct func_body_info *fbi, basic_block bb,
845 int index)
847 while (true)
849 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
850 if (!bb)
851 return NULL;
852 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
853 if (!bi->param_aa_statuses.is_empty ()
854 && bi->param_aa_statuses[index].valid)
855 return &bi->param_aa_statuses[index];
859 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
860 structures and/or intialize the result with a dominating description as
861 necessary. */
863 static struct param_aa_status *
864 parm_bb_aa_status_for_bb (struct func_body_info *fbi, basic_block bb,
865 int index)
867 gcc_checking_assert (fbi);
868 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
869 if (bi->param_aa_statuses.is_empty ())
870 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
871 struct param_aa_status *paa = &bi->param_aa_statuses[index];
872 if (!paa->valid)
874 gcc_checking_assert (!paa->parm_modified
875 && !paa->ref_modified
876 && !paa->pt_modified);
877 struct param_aa_status *dom_paa;
878 dom_paa = find_dominating_aa_status (fbi, bb, index);
879 if (dom_paa)
880 *paa = *dom_paa;
881 else
882 paa->valid = true;
885 return paa;
888 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
889 a value known not to be modified in this function before reaching the
890 statement STMT. FBI holds information about the function we have so far
891 gathered but do not survive the summary building stage. */
893 static bool
894 parm_preserved_before_stmt_p (struct func_body_info *fbi, int index,
895 gimple stmt, tree parm_load)
897 struct param_aa_status *paa;
898 bool modified = false;
899 ao_ref refd;
901 /* FIXME: FBI can be NULL if we are being called from outside
902 ipa_node_analysis or ipcp_transform_function, which currently happens
903 during inlining analysis. It would be great to extend fbi's lifetime and
904 always have it. Currently, we are just not afraid of too much walking in
905 that case. */
906 if (fbi)
908 if (aa_overwalked (fbi))
909 return false;
910 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
911 if (paa->parm_modified)
912 return false;
914 else
915 paa = NULL;
917 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
918 ao_ref_init (&refd, parm_load);
919 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
920 &modified, NULL);
921 if (fbi)
922 fbi->aa_walked += walked;
923 if (paa && modified)
924 paa->parm_modified = true;
925 return !modified;
928 /* If STMT is an assignment that loads a value from an parameter declaration,
929 return the index of the parameter in ipa_node_params which has not been
930 modified. Otherwise return -1. */
932 static int
933 load_from_unmodified_param (struct func_body_info *fbi,
934 vec<ipa_param_descriptor> descriptors,
935 gimple stmt)
937 int index;
938 tree op1;
940 if (!gimple_assign_single_p (stmt))
941 return -1;
943 op1 = gimple_assign_rhs1 (stmt);
944 if (TREE_CODE (op1) != PARM_DECL)
945 return -1;
947 index = ipa_get_param_decl_index_1 (descriptors, op1);
948 if (index < 0
949 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
950 return -1;
952 return index;
955 /* Return true if memory reference REF (which must be a load through parameter
956 with INDEX) loads data that are known to be unmodified in this function
957 before reaching statement STMT. */
959 static bool
960 parm_ref_data_preserved_p (struct func_body_info *fbi,
961 int index, gimple stmt, tree ref)
963 struct param_aa_status *paa;
964 bool modified = false;
965 ao_ref refd;
967 /* FIXME: FBI can be NULL if we are being called from outside
968 ipa_node_analysis or ipcp_transform_function, which currently happens
969 during inlining analysis. It would be great to extend fbi's lifetime and
970 always have it. Currently, we are just not afraid of too much walking in
971 that case. */
972 if (fbi)
974 if (aa_overwalked (fbi))
975 return false;
976 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
977 if (paa->ref_modified)
978 return false;
980 else
981 paa = NULL;
983 gcc_checking_assert (gimple_vuse (stmt));
984 ao_ref_init (&refd, ref);
985 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
986 &modified, NULL);
987 if (fbi)
988 fbi->aa_walked += walked;
989 if (paa && modified)
990 paa->ref_modified = true;
991 return !modified;
994 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
995 is known to be unmodified in this function before reaching call statement
996 CALL into which it is passed. FBI describes the function body. */
998 static bool
999 parm_ref_data_pass_through_p (struct func_body_info *fbi, int index,
1000 gimple call, tree parm)
1002 bool modified = false;
1003 ao_ref refd;
1005 /* It's unnecessary to calculate anything about memory contnets for a const
1006 function because it is not goin to use it. But do not cache the result
1007 either. Also, no such calculations for non-pointers. */
1008 if (!gimple_vuse (call)
1009 || !POINTER_TYPE_P (TREE_TYPE (parm))
1010 || aa_overwalked (fbi))
1011 return false;
1013 struct param_aa_status *paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (call),
1014 index);
1015 if (paa->pt_modified)
1016 return false;
1018 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1019 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1020 &modified, NULL);
1021 fbi->aa_walked += walked;
1022 if (modified)
1023 paa->pt_modified = true;
1024 return !modified;
1027 /* Return true if we can prove that OP is a memory reference loading unmodified
1028 data from an aggregate passed as a parameter and if the aggregate is passed
1029 by reference, that the alias type of the load corresponds to the type of the
1030 formal parameter (so that we can rely on this type for TBAA in callers).
1031 INFO and PARMS_AINFO describe parameters of the current function (but the
1032 latter can be NULL), STMT is the load statement. If function returns true,
1033 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1034 within the aggregate and whether it is a load from a value passed by
1035 reference respectively. */
1037 static bool
1038 ipa_load_from_parm_agg_1 (struct func_body_info *fbi,
1039 vec<ipa_param_descriptor> descriptors,
1040 gimple stmt, tree op, int *index_p,
1041 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1042 bool *by_ref_p)
1044 int index;
1045 HOST_WIDE_INT size, max_size;
1046 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
1048 if (max_size == -1 || max_size != size || *offset_p < 0)
1049 return false;
1051 if (DECL_P (base))
1053 int index = ipa_get_param_decl_index_1 (descriptors, base);
1054 if (index >= 0
1055 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1057 *index_p = index;
1058 *by_ref_p = false;
1059 if (size_p)
1060 *size_p = size;
1061 return true;
1063 return false;
1066 if (TREE_CODE (base) != MEM_REF
1067 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1068 || !integer_zerop (TREE_OPERAND (base, 1)))
1069 return false;
1071 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1073 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1074 index = ipa_get_param_decl_index_1 (descriptors, parm);
1076 else
1078 /* This branch catches situations where a pointer parameter is not a
1079 gimple register, for example:
1081 void hip7(S*) (struct S * p)
1083 void (*<T2e4>) (struct S *) D.1867;
1084 struct S * p.1;
1086 <bb 2>:
1087 p.1_1 = p;
1088 D.1867_2 = p.1_1->f;
1089 D.1867_2 ();
1090 gdp = &p;
1093 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1094 index = load_from_unmodified_param (fbi, descriptors, def);
1097 if (index >= 0
1098 && parm_ref_data_preserved_p (fbi, index, stmt, op))
1100 *index_p = index;
1101 *by_ref_p = true;
1102 if (size_p)
1103 *size_p = size;
1104 return true;
1106 return false;
1109 /* Just like the previous function, just without the param_analysis_info
1110 pointer, for users outside of this file. */
1112 bool
1113 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
1114 tree op, int *index_p, HOST_WIDE_INT *offset_p,
1115 bool *by_ref_p)
1117 return ipa_load_from_parm_agg_1 (NULL, info->descriptors, stmt, op, index_p,
1118 offset_p, NULL, by_ref_p);
1121 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1122 of an assignment statement STMT, try to determine whether we are actually
1123 handling any of the following cases and construct an appropriate jump
1124 function into JFUNC if so:
1126 1) The passed value is loaded from a formal parameter which is not a gimple
1127 register (most probably because it is addressable, the value has to be
1128 scalar) and we can guarantee the value has not changed. This case can
1129 therefore be described by a simple pass-through jump function. For example:
1131 foo (int a)
1133 int a.0;
1135 a.0_2 = a;
1136 bar (a.0_2);
1138 2) The passed value can be described by a simple arithmetic pass-through
1139 jump function. E.g.
1141 foo (int a)
1143 int D.2064;
1145 D.2064_4 = a.1(D) + 4;
1146 bar (D.2064_4);
1148 This case can also occur in combination of the previous one, e.g.:
1150 foo (int a, int z)
1152 int a.0;
1153 int D.2064;
1155 a.0_3 = a;
1156 D.2064_4 = a.0_3 + 4;
1157 foo (D.2064_4);
1159 3) The passed value is an address of an object within another one (which
1160 also passed by reference). Such situations are described by an ancestor
1161 jump function and describe situations such as:
1163 B::foo() (struct B * const this)
1165 struct A * D.1845;
1167 D.1845_2 = &this_1(D)->D.1748;
1168 A::bar (D.1845_2);
1170 INFO is the structure describing individual parameters access different
1171 stages of IPA optimizations. PARMS_AINFO contains the information that is
1172 only needed for intraprocedural analysis. */
1174 static void
1175 compute_complex_assign_jump_func (struct func_body_info *fbi,
1176 struct ipa_node_params *info,
1177 struct ipa_jump_func *jfunc,
1178 gcall *call, gimple stmt, tree name,
1179 tree param_type)
1181 HOST_WIDE_INT offset, size, max_size;
1182 tree op1, tc_ssa, base, ssa;
1183 int index;
1185 op1 = gimple_assign_rhs1 (stmt);
1187 if (TREE_CODE (op1) == SSA_NAME)
1189 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1190 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1191 else
1192 index = load_from_unmodified_param (fbi, info->descriptors,
1193 SSA_NAME_DEF_STMT (op1));
1194 tc_ssa = op1;
1196 else
1198 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1199 tc_ssa = gimple_assign_lhs (stmt);
1202 if (index >= 0)
1204 tree op2 = gimple_assign_rhs2 (stmt);
1206 if (op2)
1208 if (!is_gimple_ip_invariant (op2)
1209 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1210 && !useless_type_conversion_p (TREE_TYPE (name),
1211 TREE_TYPE (op1))))
1212 return;
1214 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1215 gimple_assign_rhs_code (stmt));
1217 else if (gimple_assign_single_p (stmt))
1219 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1220 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1222 return;
1225 if (TREE_CODE (op1) != ADDR_EXPR)
1226 return;
1227 op1 = TREE_OPERAND (op1, 0);
1228 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1229 return;
1230 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1231 if (TREE_CODE (base) != MEM_REF
1232 /* If this is a varying address, punt. */
1233 || max_size == -1
1234 || max_size != size)
1235 return;
1236 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1237 ssa = TREE_OPERAND (base, 0);
1238 if (TREE_CODE (ssa) != SSA_NAME
1239 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1240 || offset < 0)
1241 return;
1243 /* Dynamic types are changed in constructors and destructors. */
1244 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1245 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1246 ipa_set_ancestor_jf (jfunc, offset, index,
1247 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1250 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1251 it looks like:
1253 iftmp.1_3 = &obj_2(D)->D.1762;
1255 The base of the MEM_REF must be a default definition SSA NAME of a
1256 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1257 whole MEM_REF expression is returned and the offset calculated from any
1258 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1259 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1261 static tree
1262 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1264 HOST_WIDE_INT size, max_size;
1265 tree expr, parm, obj;
1267 if (!gimple_assign_single_p (assign))
1268 return NULL_TREE;
1269 expr = gimple_assign_rhs1 (assign);
1271 if (TREE_CODE (expr) != ADDR_EXPR)
1272 return NULL_TREE;
1273 expr = TREE_OPERAND (expr, 0);
1274 obj = expr;
1275 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1277 if (TREE_CODE (expr) != MEM_REF
1278 /* If this is a varying address, punt. */
1279 || max_size == -1
1280 || max_size != size
1281 || *offset < 0)
1282 return NULL_TREE;
1283 parm = TREE_OPERAND (expr, 0);
1284 if (TREE_CODE (parm) != SSA_NAME
1285 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1286 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1287 return NULL_TREE;
1289 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1290 *obj_p = obj;
1291 return expr;
1295 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1296 statement PHI, try to find out whether NAME is in fact a
1297 multiple-inheritance typecast from a descendant into an ancestor of a formal
1298 parameter and thus can be described by an ancestor jump function and if so,
1299 write the appropriate function into JFUNC.
1301 Essentially we want to match the following pattern:
1303 if (obj_2(D) != 0B)
1304 goto <bb 3>;
1305 else
1306 goto <bb 4>;
1308 <bb 3>:
1309 iftmp.1_3 = &obj_2(D)->D.1762;
1311 <bb 4>:
1312 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1313 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1314 return D.1879_6; */
1316 static void
1317 compute_complex_ancestor_jump_func (struct func_body_info *fbi,
1318 struct ipa_node_params *info,
1319 struct ipa_jump_func *jfunc,
1320 gcall *call, gphi *phi)
1322 HOST_WIDE_INT offset;
1323 gimple assign, cond;
1324 basic_block phi_bb, assign_bb, cond_bb;
1325 tree tmp, parm, expr, obj;
1326 int index, i;
1328 if (gimple_phi_num_args (phi) != 2)
1329 return;
1331 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1332 tmp = PHI_ARG_DEF (phi, 0);
1333 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1334 tmp = PHI_ARG_DEF (phi, 1);
1335 else
1336 return;
1337 if (TREE_CODE (tmp) != SSA_NAME
1338 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1339 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1340 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1341 return;
1343 assign = SSA_NAME_DEF_STMT (tmp);
1344 assign_bb = gimple_bb (assign);
1345 if (!single_pred_p (assign_bb))
1346 return;
1347 expr = get_ancestor_addr_info (assign, &obj, &offset);
1348 if (!expr)
1349 return;
1350 parm = TREE_OPERAND (expr, 0);
1351 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1352 if (index < 0)
1353 return;
1355 cond_bb = single_pred (assign_bb);
1356 cond = last_stmt (cond_bb);
1357 if (!cond
1358 || gimple_code (cond) != GIMPLE_COND
1359 || gimple_cond_code (cond) != NE_EXPR
1360 || gimple_cond_lhs (cond) != parm
1361 || !integer_zerop (gimple_cond_rhs (cond)))
1362 return;
1364 phi_bb = gimple_bb (phi);
1365 for (i = 0; i < 2; i++)
1367 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1368 if (pred != assign_bb && pred != cond_bb)
1369 return;
1372 ipa_set_ancestor_jf (jfunc, offset, index,
1373 parm_ref_data_pass_through_p (fbi, index, call, parm));
1376 /* Inspect the given TYPE and return true iff it has the same structure (the
1377 same number of fields of the same types) as a C++ member pointer. If
1378 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1379 corresponding fields there. */
1381 static bool
1382 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1384 tree fld;
1386 if (TREE_CODE (type) != RECORD_TYPE)
1387 return false;
1389 fld = TYPE_FIELDS (type);
1390 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1391 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1392 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1393 return false;
1395 if (method_ptr)
1396 *method_ptr = fld;
1398 fld = DECL_CHAIN (fld);
1399 if (!fld || INTEGRAL_TYPE_P (fld)
1400 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1401 return false;
1402 if (delta)
1403 *delta = fld;
1405 if (DECL_CHAIN (fld))
1406 return false;
1408 return true;
1411 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1412 return the rhs of its defining statement. Otherwise return RHS as it
1413 is. */
1415 static inline tree
1416 get_ssa_def_if_simple_copy (tree rhs)
1418 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1420 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1422 if (gimple_assign_single_p (def_stmt))
1423 rhs = gimple_assign_rhs1 (def_stmt);
1424 else
1425 break;
1427 return rhs;
1430 /* Simple linked list, describing known contents of an aggregate beforere
1431 call. */
1433 struct ipa_known_agg_contents_list
1435 /* Offset and size of the described part of the aggregate. */
1436 HOST_WIDE_INT offset, size;
1437 /* Known constant value or NULL if the contents is known to be unknown. */
1438 tree constant;
1439 /* Pointer to the next structure in the list. */
1440 struct ipa_known_agg_contents_list *next;
1443 /* Find the proper place in linked list of ipa_known_agg_contents_list
1444 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1445 unless there is a partial overlap, in which case return NULL, or such
1446 element is already there, in which case set *ALREADY_THERE to true. */
1448 static struct ipa_known_agg_contents_list **
1449 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1450 HOST_WIDE_INT lhs_offset,
1451 HOST_WIDE_INT lhs_size,
1452 bool *already_there)
1454 struct ipa_known_agg_contents_list **p = list;
1455 while (*p && (*p)->offset < lhs_offset)
1457 if ((*p)->offset + (*p)->size > lhs_offset)
1458 return NULL;
1459 p = &(*p)->next;
1462 if (*p && (*p)->offset < lhs_offset + lhs_size)
1464 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1465 /* We already know this value is subsequently overwritten with
1466 something else. */
1467 *already_there = true;
1468 else
1469 /* Otherwise this is a partial overlap which we cannot
1470 represent. */
1471 return NULL;
1473 return p;
1476 /* Build aggregate jump function from LIST, assuming there are exactly
1477 CONST_COUNT constant entries there and that th offset of the passed argument
1478 is ARG_OFFSET and store it into JFUNC. */
1480 static void
1481 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1482 int const_count, HOST_WIDE_INT arg_offset,
1483 struct ipa_jump_func *jfunc)
1485 vec_alloc (jfunc->agg.items, const_count);
1486 while (list)
1488 if (list->constant)
1490 struct ipa_agg_jf_item item;
1491 item.offset = list->offset - arg_offset;
1492 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1493 item.value = unshare_expr_without_location (list->constant);
1494 jfunc->agg.items->quick_push (item);
1496 list = list->next;
1500 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1501 in ARG is filled in with constant values. ARG can either be an aggregate
1502 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1503 aggregate. JFUNC is the jump function into which the constants are
1504 subsequently stored. */
1506 static void
1507 determine_locally_known_aggregate_parts (gcall *call, tree arg,
1508 tree arg_type,
1509 struct ipa_jump_func *jfunc)
1511 struct ipa_known_agg_contents_list *list = NULL;
1512 int item_count = 0, const_count = 0;
1513 HOST_WIDE_INT arg_offset, arg_size;
1514 gimple_stmt_iterator gsi;
1515 tree arg_base;
1516 bool check_ref, by_ref;
1517 ao_ref r;
1519 /* The function operates in three stages. First, we prepare check_ref, r,
1520 arg_base and arg_offset based on what is actually passed as an actual
1521 argument. */
1523 if (POINTER_TYPE_P (arg_type))
1525 by_ref = true;
1526 if (TREE_CODE (arg) == SSA_NAME)
1528 tree type_size;
1529 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1530 return;
1531 check_ref = true;
1532 arg_base = arg;
1533 arg_offset = 0;
1534 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1535 arg_size = tree_to_uhwi (type_size);
1536 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1538 else if (TREE_CODE (arg) == ADDR_EXPR)
1540 HOST_WIDE_INT arg_max_size;
1542 arg = TREE_OPERAND (arg, 0);
1543 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1544 &arg_max_size);
1545 if (arg_max_size == -1
1546 || arg_max_size != arg_size
1547 || arg_offset < 0)
1548 return;
1549 if (DECL_P (arg_base))
1551 check_ref = false;
1552 ao_ref_init (&r, arg_base);
1554 else
1555 return;
1557 else
1558 return;
1560 else
1562 HOST_WIDE_INT arg_max_size;
1564 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1566 by_ref = false;
1567 check_ref = false;
1568 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1569 &arg_max_size);
1570 if (arg_max_size == -1
1571 || arg_max_size != arg_size
1572 || arg_offset < 0)
1573 return;
1575 ao_ref_init (&r, arg);
1578 /* Second stage walks back the BB, looks at individual statements and as long
1579 as it is confident of how the statements affect contents of the
1580 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1581 describing it. */
1582 gsi = gsi_for_stmt (call);
1583 gsi_prev (&gsi);
1584 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1586 struct ipa_known_agg_contents_list *n, **p;
1587 gimple stmt = gsi_stmt (gsi);
1588 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1589 tree lhs, rhs, lhs_base;
1591 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1592 continue;
1593 if (!gimple_assign_single_p (stmt))
1594 break;
1596 lhs = gimple_assign_lhs (stmt);
1597 rhs = gimple_assign_rhs1 (stmt);
1598 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1599 || TREE_CODE (lhs) == BIT_FIELD_REF
1600 || contains_bitfld_component_ref_p (lhs))
1601 break;
1603 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1604 &lhs_max_size);
1605 if (lhs_max_size == -1
1606 || lhs_max_size != lhs_size)
1607 break;
1609 if (check_ref)
1611 if (TREE_CODE (lhs_base) != MEM_REF
1612 || TREE_OPERAND (lhs_base, 0) != arg_base
1613 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1614 break;
1616 else if (lhs_base != arg_base)
1618 if (DECL_P (lhs_base))
1619 continue;
1620 else
1621 break;
1624 bool already_there = false;
1625 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1626 &already_there);
1627 if (!p)
1628 break;
1629 if (already_there)
1630 continue;
1632 rhs = get_ssa_def_if_simple_copy (rhs);
1633 n = XALLOCA (struct ipa_known_agg_contents_list);
1634 n->size = lhs_size;
1635 n->offset = lhs_offset;
1636 if (is_gimple_ip_invariant (rhs))
1638 n->constant = rhs;
1639 const_count++;
1641 else
1642 n->constant = NULL_TREE;
1643 n->next = *p;
1644 *p = n;
1646 item_count++;
1647 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1648 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1649 break;
1652 /* Third stage just goes over the list and creates an appropriate vector of
1653 ipa_agg_jf_item structures out of it, of sourse only if there are
1654 any known constants to begin with. */
1656 if (const_count)
1658 jfunc->agg.by_ref = by_ref;
1659 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1663 static tree
1664 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1666 int n;
1667 tree type = (e->callee
1668 ? TREE_TYPE (e->callee->decl)
1669 : gimple_call_fntype (e->call_stmt));
1670 tree t = TYPE_ARG_TYPES (type);
1672 for (n = 0; n < i; n++)
1674 if (!t)
1675 break;
1676 t = TREE_CHAIN (t);
1678 if (t)
1679 return TREE_VALUE (t);
1680 if (!e->callee)
1681 return NULL;
1682 t = DECL_ARGUMENTS (e->callee->decl);
1683 for (n = 0; n < i; n++)
1685 if (!t)
1686 return NULL;
1687 t = TREE_CHAIN (t);
1689 if (t)
1690 return TREE_TYPE (t);
1691 return NULL;
1694 /* Compute jump function for all arguments of callsite CS and insert the
1695 information in the jump_functions array in the ipa_edge_args corresponding
1696 to this callsite. */
1698 static void
1699 ipa_compute_jump_functions_for_edge (struct func_body_info *fbi,
1700 struct cgraph_edge *cs)
1702 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1703 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1704 gcall *call = cs->call_stmt;
1705 int n, arg_num = gimple_call_num_args (call);
1706 bool useful_context = false;
1708 if (arg_num == 0 || args->jump_functions)
1709 return;
1710 vec_safe_grow_cleared (args->jump_functions, arg_num);
1711 if (flag_devirtualize)
1712 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1714 if (gimple_call_internal_p (call))
1715 return;
1716 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1717 return;
1719 for (n = 0; n < arg_num; n++)
1721 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1722 tree arg = gimple_call_arg (call, n);
1723 tree param_type = ipa_get_callee_param_type (cs, n);
1724 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1726 tree instance;
1727 struct ipa_polymorphic_call_context context (cs->caller->decl,
1728 arg, cs->call_stmt,
1729 &instance);
1730 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1731 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1732 if (!context.useless_p ())
1733 useful_context = true;
1736 if (POINTER_TYPE_P (TREE_TYPE(arg)))
1738 unsigned HOST_WIDE_INT hwi_bitpos;
1739 unsigned align;
1741 if (get_pointer_alignment_1 (arg, &align, &hwi_bitpos)
1742 && align % BITS_PER_UNIT == 0
1743 && hwi_bitpos % BITS_PER_UNIT == 0)
1745 jfunc->alignment.known = true;
1746 jfunc->alignment.align = align / BITS_PER_UNIT;
1747 jfunc->alignment.misalign = hwi_bitpos / BITS_PER_UNIT;
1749 else
1750 gcc_assert (!jfunc->alignment.known);
1752 else
1753 gcc_assert (!jfunc->alignment.known);
1755 if (is_gimple_ip_invariant (arg))
1756 ipa_set_jf_constant (jfunc, arg, cs);
1757 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1758 && TREE_CODE (arg) == PARM_DECL)
1760 int index = ipa_get_param_decl_index (info, arg);
1762 gcc_assert (index >=0);
1763 /* Aggregate passed by value, check for pass-through, otherwise we
1764 will attempt to fill in aggregate contents later in this
1765 for cycle. */
1766 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1768 ipa_set_jf_simple_pass_through (jfunc, index, false);
1769 continue;
1772 else if (TREE_CODE (arg) == SSA_NAME)
1774 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1776 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1777 if (index >= 0)
1779 bool agg_p;
1780 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1781 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1784 else
1786 gimple stmt = SSA_NAME_DEF_STMT (arg);
1787 if (is_gimple_assign (stmt))
1788 compute_complex_assign_jump_func (fbi, info, jfunc,
1789 call, stmt, arg, param_type);
1790 else if (gimple_code (stmt) == GIMPLE_PHI)
1791 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1792 call,
1793 as_a <gphi *> (stmt));
1797 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1798 passed (because type conversions are ignored in gimple). Usually we can
1799 safely get type from function declaration, but in case of K&R prototypes or
1800 variadic functions we can try our luck with type of the pointer passed.
1801 TODO: Since we look for actual initialization of the memory object, we may better
1802 work out the type based on the memory stores we find. */
1803 if (!param_type)
1804 param_type = TREE_TYPE (arg);
1806 if ((jfunc->type != IPA_JF_PASS_THROUGH
1807 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1808 && (jfunc->type != IPA_JF_ANCESTOR
1809 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1810 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1811 || POINTER_TYPE_P (param_type)))
1812 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1814 if (!useful_context)
1815 vec_free (args->polymorphic_call_contexts);
1818 /* Compute jump functions for all edges - both direct and indirect - outgoing
1819 from BB. */
1821 static void
1822 ipa_compute_jump_functions_for_bb (struct func_body_info *fbi, basic_block bb)
1824 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1825 int i;
1826 struct cgraph_edge *cs;
1828 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1830 struct cgraph_node *callee = cs->callee;
1832 if (callee)
1834 callee->ultimate_alias_target ();
1835 /* We do not need to bother analyzing calls to unknown functions
1836 unless they may become known during lto/whopr. */
1837 if (!callee->definition && !flag_lto)
1838 continue;
1840 ipa_compute_jump_functions_for_edge (fbi, cs);
1844 /* If STMT looks like a statement loading a value from a member pointer formal
1845 parameter, return that parameter and store the offset of the field to
1846 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1847 might be clobbered). If USE_DELTA, then we look for a use of the delta
1848 field rather than the pfn. */
1850 static tree
1851 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1852 HOST_WIDE_INT *offset_p)
1854 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1856 if (!gimple_assign_single_p (stmt))
1857 return NULL_TREE;
1859 rhs = gimple_assign_rhs1 (stmt);
1860 if (TREE_CODE (rhs) == COMPONENT_REF)
1862 ref_field = TREE_OPERAND (rhs, 1);
1863 rhs = TREE_OPERAND (rhs, 0);
1865 else
1866 ref_field = NULL_TREE;
1867 if (TREE_CODE (rhs) != MEM_REF)
1868 return NULL_TREE;
1869 rec = TREE_OPERAND (rhs, 0);
1870 if (TREE_CODE (rec) != ADDR_EXPR)
1871 return NULL_TREE;
1872 rec = TREE_OPERAND (rec, 0);
1873 if (TREE_CODE (rec) != PARM_DECL
1874 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1875 return NULL_TREE;
1876 ref_offset = TREE_OPERAND (rhs, 1);
1878 if (use_delta)
1879 fld = delta_field;
1880 else
1881 fld = ptr_field;
1882 if (offset_p)
1883 *offset_p = int_bit_position (fld);
1885 if (ref_field)
1887 if (integer_nonzerop (ref_offset))
1888 return NULL_TREE;
1889 return ref_field == fld ? rec : NULL_TREE;
1891 else
1892 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1893 : NULL_TREE;
1896 /* Returns true iff T is an SSA_NAME defined by a statement. */
1898 static bool
1899 ipa_is_ssa_with_stmt_def (tree t)
1901 if (TREE_CODE (t) == SSA_NAME
1902 && !SSA_NAME_IS_DEFAULT_DEF (t))
1903 return true;
1904 else
1905 return false;
1908 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1909 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1910 indirect call graph edge. */
1912 static struct cgraph_edge *
1913 ipa_note_param_call (struct cgraph_node *node, int param_index,
1914 gcall *stmt)
1916 struct cgraph_edge *cs;
1918 cs = node->get_edge (stmt);
1919 cs->indirect_info->param_index = param_index;
1920 cs->indirect_info->agg_contents = 0;
1921 cs->indirect_info->member_ptr = 0;
1922 return cs;
1925 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1926 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1927 intermediate information about each formal parameter. Currently it checks
1928 whether the call calls a pointer that is a formal parameter and if so, the
1929 parameter is marked with the called flag and an indirect call graph edge
1930 describing the call is created. This is very simple for ordinary pointers
1931 represented in SSA but not-so-nice when it comes to member pointers. The
1932 ugly part of this function does nothing more than trying to match the
1933 pattern of such a call. An example of such a pattern is the gimple dump
1934 below, the call is on the last line:
1936 <bb 2>:
1937 f$__delta_5 = f.__delta;
1938 f$__pfn_24 = f.__pfn;
1941 <bb 2>:
1942 f$__delta_5 = MEM[(struct *)&f];
1943 f$__pfn_24 = MEM[(struct *)&f + 4B];
1945 and a few lines below:
1947 <bb 5>
1948 D.2496_3 = (int) f$__pfn_24;
1949 D.2497_4 = D.2496_3 & 1;
1950 if (D.2497_4 != 0)
1951 goto <bb 3>;
1952 else
1953 goto <bb 4>;
1955 <bb 6>:
1956 D.2500_7 = (unsigned int) f$__delta_5;
1957 D.2501_8 = &S + D.2500_7;
1958 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1959 D.2503_10 = *D.2502_9;
1960 D.2504_12 = f$__pfn_24 + -1;
1961 D.2505_13 = (unsigned int) D.2504_12;
1962 D.2506_14 = D.2503_10 + D.2505_13;
1963 D.2507_15 = *D.2506_14;
1964 iftmp.11_16 = (String:: *) D.2507_15;
1966 <bb 7>:
1967 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1968 D.2500_19 = (unsigned int) f$__delta_5;
1969 D.2508_20 = &S + D.2500_19;
1970 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1972 Such patterns are results of simple calls to a member pointer:
1974 int doprinting (int (MyString::* f)(int) const)
1976 MyString S ("somestring");
1978 return (S.*f)(4);
1981 Moreover, the function also looks for called pointers loaded from aggregates
1982 passed by value or reference. */
1984 static void
1985 ipa_analyze_indirect_call_uses (struct func_body_info *fbi, gcall *call,
1986 tree target)
1988 struct ipa_node_params *info = fbi->info;
1989 HOST_WIDE_INT offset;
1990 bool by_ref;
1992 if (SSA_NAME_IS_DEFAULT_DEF (target))
1994 tree var = SSA_NAME_VAR (target);
1995 int index = ipa_get_param_decl_index (info, var);
1996 if (index >= 0)
1997 ipa_note_param_call (fbi->node, index, call);
1998 return;
2001 int index;
2002 gimple def = SSA_NAME_DEF_STMT (target);
2003 if (gimple_assign_single_p (def)
2004 && ipa_load_from_parm_agg_1 (fbi, info->descriptors, def,
2005 gimple_assign_rhs1 (def), &index, &offset,
2006 NULL, &by_ref))
2008 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2009 cs->indirect_info->offset = offset;
2010 cs->indirect_info->agg_contents = 1;
2011 cs->indirect_info->by_ref = by_ref;
2012 return;
2015 /* Now we need to try to match the complex pattern of calling a member
2016 pointer. */
2017 if (gimple_code (def) != GIMPLE_PHI
2018 || gimple_phi_num_args (def) != 2
2019 || !POINTER_TYPE_P (TREE_TYPE (target))
2020 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2021 return;
2023 /* First, we need to check whether one of these is a load from a member
2024 pointer that is a parameter to this function. */
2025 tree n1 = PHI_ARG_DEF (def, 0);
2026 tree n2 = PHI_ARG_DEF (def, 1);
2027 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2028 return;
2029 gimple d1 = SSA_NAME_DEF_STMT (n1);
2030 gimple d2 = SSA_NAME_DEF_STMT (n2);
2032 tree rec;
2033 basic_block bb, virt_bb;
2034 basic_block join = gimple_bb (def);
2035 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2037 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2038 return;
2040 bb = EDGE_PRED (join, 0)->src;
2041 virt_bb = gimple_bb (d2);
2043 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2045 bb = EDGE_PRED (join, 1)->src;
2046 virt_bb = gimple_bb (d1);
2048 else
2049 return;
2051 /* Second, we need to check that the basic blocks are laid out in the way
2052 corresponding to the pattern. */
2054 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2055 || single_pred (virt_bb) != bb
2056 || single_succ (virt_bb) != join)
2057 return;
2059 /* Third, let's see that the branching is done depending on the least
2060 significant bit of the pfn. */
2062 gimple branch = last_stmt (bb);
2063 if (!branch || gimple_code (branch) != GIMPLE_COND)
2064 return;
2066 if ((gimple_cond_code (branch) != NE_EXPR
2067 && gimple_cond_code (branch) != EQ_EXPR)
2068 || !integer_zerop (gimple_cond_rhs (branch)))
2069 return;
2071 tree cond = gimple_cond_lhs (branch);
2072 if (!ipa_is_ssa_with_stmt_def (cond))
2073 return;
2075 def = SSA_NAME_DEF_STMT (cond);
2076 if (!is_gimple_assign (def)
2077 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2078 || !integer_onep (gimple_assign_rhs2 (def)))
2079 return;
2081 cond = gimple_assign_rhs1 (def);
2082 if (!ipa_is_ssa_with_stmt_def (cond))
2083 return;
2085 def = SSA_NAME_DEF_STMT (cond);
2087 if (is_gimple_assign (def)
2088 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2090 cond = gimple_assign_rhs1 (def);
2091 if (!ipa_is_ssa_with_stmt_def (cond))
2092 return;
2093 def = SSA_NAME_DEF_STMT (cond);
2096 tree rec2;
2097 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2098 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2099 == ptrmemfunc_vbit_in_delta),
2100 NULL);
2101 if (rec != rec2)
2102 return;
2104 index = ipa_get_param_decl_index (info, rec);
2105 if (index >= 0
2106 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2108 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2109 cs->indirect_info->offset = offset;
2110 cs->indirect_info->agg_contents = 1;
2111 cs->indirect_info->member_ptr = 1;
2114 return;
2117 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2118 object referenced in the expression is a formal parameter of the caller
2119 FBI->node (described by FBI->info), create a call note for the
2120 statement. */
2122 static void
2123 ipa_analyze_virtual_call_uses (struct func_body_info *fbi,
2124 gcall *call, tree target)
2126 tree obj = OBJ_TYPE_REF_OBJECT (target);
2127 int index;
2128 HOST_WIDE_INT anc_offset;
2130 if (!flag_devirtualize)
2131 return;
2133 if (TREE_CODE (obj) != SSA_NAME)
2134 return;
2136 struct ipa_node_params *info = fbi->info;
2137 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2139 struct ipa_jump_func jfunc;
2140 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2141 return;
2143 anc_offset = 0;
2144 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2145 gcc_assert (index >= 0);
2146 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2147 call, &jfunc))
2148 return;
2150 else
2152 struct ipa_jump_func jfunc;
2153 gimple stmt = SSA_NAME_DEF_STMT (obj);
2154 tree expr;
2156 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2157 if (!expr)
2158 return;
2159 index = ipa_get_param_decl_index (info,
2160 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2161 gcc_assert (index >= 0);
2162 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2163 call, &jfunc, anc_offset))
2164 return;
2167 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2168 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2169 ii->offset = anc_offset;
2170 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2171 ii->otr_type = obj_type_ref_class (target);
2172 ii->polymorphic = 1;
2175 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2176 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2177 containing intermediate information about each formal parameter. */
2179 static void
2180 ipa_analyze_call_uses (struct func_body_info *fbi, gcall *call)
2182 tree target = gimple_call_fn (call);
2184 if (!target
2185 || (TREE_CODE (target) != SSA_NAME
2186 && !virtual_method_call_p (target)))
2187 return;
2189 struct cgraph_edge *cs = fbi->node->get_edge (call);
2190 /* If we previously turned the call into a direct call, there is
2191 no need to analyze. */
2192 if (cs && !cs->indirect_unknown_callee)
2193 return;
2195 if (cs->indirect_info->polymorphic && flag_devirtualize)
2197 tree instance;
2198 tree target = gimple_call_fn (call);
2199 ipa_polymorphic_call_context context (current_function_decl,
2200 target, call, &instance);
2202 gcc_checking_assert (cs->indirect_info->otr_type
2203 == obj_type_ref_class (target));
2204 gcc_checking_assert (cs->indirect_info->otr_token
2205 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2207 cs->indirect_info->vptr_changed
2208 = !context.get_dynamic_type (instance,
2209 OBJ_TYPE_REF_OBJECT (target),
2210 obj_type_ref_class (target), call);
2211 cs->indirect_info->context = context;
2214 if (TREE_CODE (target) == SSA_NAME)
2215 ipa_analyze_indirect_call_uses (fbi, call, target);
2216 else if (virtual_method_call_p (target))
2217 ipa_analyze_virtual_call_uses (fbi, call, target);
2221 /* Analyze the call statement STMT with respect to formal parameters (described
2222 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2223 formal parameters are called. */
2225 static void
2226 ipa_analyze_stmt_uses (struct func_body_info *fbi, gimple stmt)
2228 if (is_gimple_call (stmt))
2229 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2232 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2233 If OP is a parameter declaration, mark it as used in the info structure
2234 passed in DATA. */
2236 static bool
2237 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2239 struct ipa_node_params *info = (struct ipa_node_params *) data;
2241 op = get_base_address (op);
2242 if (op
2243 && TREE_CODE (op) == PARM_DECL)
2245 int index = ipa_get_param_decl_index (info, op);
2246 gcc_assert (index >= 0);
2247 ipa_set_param_used (info, index, true);
2250 return false;
2253 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2254 the findings in various structures of the associated ipa_node_params
2255 structure, such as parameter flags, notes etc. FBI holds various data about
2256 the function being analyzed. */
2258 static void
2259 ipa_analyze_params_uses_in_bb (struct func_body_info *fbi, basic_block bb)
2261 gimple_stmt_iterator gsi;
2262 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2264 gimple stmt = gsi_stmt (gsi);
2266 if (is_gimple_debug (stmt))
2267 continue;
2269 ipa_analyze_stmt_uses (fbi, stmt);
2270 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2271 visit_ref_for_mod_analysis,
2272 visit_ref_for_mod_analysis,
2273 visit_ref_for_mod_analysis);
2275 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2276 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2277 visit_ref_for_mod_analysis,
2278 visit_ref_for_mod_analysis,
2279 visit_ref_for_mod_analysis);
2282 /* Calculate controlled uses of parameters of NODE. */
2284 static void
2285 ipa_analyze_controlled_uses (struct cgraph_node *node)
2287 struct ipa_node_params *info = IPA_NODE_REF (node);
2289 for (int i = 0; i < ipa_get_param_count (info); i++)
2291 tree parm = ipa_get_param (info, i);
2292 int controlled_uses = 0;
2294 /* For SSA regs see if parameter is used. For non-SSA we compute
2295 the flag during modification analysis. */
2296 if (is_gimple_reg (parm))
2298 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2299 parm);
2300 if (ddef && !has_zero_uses (ddef))
2302 imm_use_iterator imm_iter;
2303 use_operand_p use_p;
2305 ipa_set_param_used (info, i, true);
2306 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2307 if (!is_gimple_call (USE_STMT (use_p)))
2309 if (!is_gimple_debug (USE_STMT (use_p)))
2311 controlled_uses = IPA_UNDESCRIBED_USE;
2312 break;
2315 else
2316 controlled_uses++;
2318 else
2319 controlled_uses = 0;
2321 else
2322 controlled_uses = IPA_UNDESCRIBED_USE;
2323 ipa_set_controlled_uses (info, i, controlled_uses);
2327 /* Free stuff in BI. */
2329 static void
2330 free_ipa_bb_info (struct ipa_bb_info *bi)
2332 bi->cg_edges.release ();
2333 bi->param_aa_statuses.release ();
2336 /* Dominator walker driving the analysis. */
2338 class analysis_dom_walker : public dom_walker
2340 public:
2341 analysis_dom_walker (struct func_body_info *fbi)
2342 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2344 virtual void before_dom_children (basic_block);
2346 private:
2347 struct func_body_info *m_fbi;
2350 void
2351 analysis_dom_walker::before_dom_children (basic_block bb)
2353 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2354 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2357 /* Initialize the array describing properties of of formal parameters
2358 of NODE, analyze their uses and compute jump functions associated
2359 with actual arguments of calls from within NODE. */
2361 void
2362 ipa_analyze_node (struct cgraph_node *node)
2364 struct func_body_info fbi;
2365 struct ipa_node_params *info;
2367 ipa_check_create_node_params ();
2368 ipa_check_create_edge_args ();
2369 info = IPA_NODE_REF (node);
2371 if (info->analysis_done)
2372 return;
2373 info->analysis_done = 1;
2375 if (ipa_func_spec_opts_forbid_analysis_p (node))
2377 for (int i = 0; i < ipa_get_param_count (info); i++)
2379 ipa_set_param_used (info, i, true);
2380 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2382 return;
2385 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2386 push_cfun (func);
2387 calculate_dominance_info (CDI_DOMINATORS);
2388 ipa_initialize_node_params (node);
2389 ipa_analyze_controlled_uses (node);
2391 fbi.node = node;
2392 fbi.info = IPA_NODE_REF (node);
2393 fbi.bb_infos = vNULL;
2394 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2395 fbi.param_count = ipa_get_param_count (info);
2396 fbi.aa_walked = 0;
2398 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2400 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2401 bi->cg_edges.safe_push (cs);
2404 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2406 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2407 bi->cg_edges.safe_push (cs);
2410 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2412 int i;
2413 struct ipa_bb_info *bi;
2414 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
2415 free_ipa_bb_info (bi);
2416 fbi.bb_infos.release ();
2417 free_dominance_info (CDI_DOMINATORS);
2418 pop_cfun ();
2421 /* Update the jump functions associated with call graph edge E when the call
2422 graph edge CS is being inlined, assuming that E->caller is already (possibly
2423 indirectly) inlined into CS->callee and that E has not been inlined. */
2425 static void
2426 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2427 struct cgraph_edge *e)
2429 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2430 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2431 int count = ipa_get_cs_argument_count (args);
2432 int i;
2434 for (i = 0; i < count; i++)
2436 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2437 struct ipa_polymorphic_call_context *dst_ctx
2438 = ipa_get_ith_polymorhic_call_context (args, i);
2440 if (dst->type == IPA_JF_ANCESTOR)
2442 struct ipa_jump_func *src;
2443 int dst_fid = dst->value.ancestor.formal_id;
2444 struct ipa_polymorphic_call_context *src_ctx
2445 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2447 /* Variable number of arguments can cause havoc if we try to access
2448 one that does not exist in the inlined edge. So make sure we
2449 don't. */
2450 if (dst_fid >= ipa_get_cs_argument_count (top))
2452 ipa_set_jf_unknown (dst);
2453 continue;
2456 src = ipa_get_ith_jump_func (top, dst_fid);
2458 if (src_ctx && !src_ctx->useless_p ())
2460 struct ipa_polymorphic_call_context ctx = *src_ctx;
2462 /* TODO: Make type preserved safe WRT contexts. */
2463 if (!ipa_get_jf_ancestor_type_preserved (dst))
2464 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2465 ctx.offset_by (dst->value.ancestor.offset);
2466 if (!ctx.useless_p ())
2468 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2469 count);
2470 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2472 dst_ctx->combine_with (ctx);
2475 if (src->agg.items
2476 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2478 struct ipa_agg_jf_item *item;
2479 int j;
2481 /* Currently we do not produce clobber aggregate jump functions,
2482 replace with merging when we do. */
2483 gcc_assert (!dst->agg.items);
2485 dst->agg.items = vec_safe_copy (src->agg.items);
2486 dst->agg.by_ref = src->agg.by_ref;
2487 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2488 item->offset -= dst->value.ancestor.offset;
2491 if (src->type == IPA_JF_PASS_THROUGH
2492 && src->value.pass_through.operation == NOP_EXPR)
2494 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2495 dst->value.ancestor.agg_preserved &=
2496 src->value.pass_through.agg_preserved;
2498 else if (src->type == IPA_JF_ANCESTOR)
2500 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2501 dst->value.ancestor.offset += src->value.ancestor.offset;
2502 dst->value.ancestor.agg_preserved &=
2503 src->value.ancestor.agg_preserved;
2505 else
2506 ipa_set_jf_unknown (dst);
2508 else if (dst->type == IPA_JF_PASS_THROUGH)
2510 struct ipa_jump_func *src;
2511 /* We must check range due to calls with variable number of arguments
2512 and we cannot combine jump functions with operations. */
2513 if (dst->value.pass_through.operation == NOP_EXPR
2514 && (dst->value.pass_through.formal_id
2515 < ipa_get_cs_argument_count (top)))
2517 int dst_fid = dst->value.pass_through.formal_id;
2518 src = ipa_get_ith_jump_func (top, dst_fid);
2519 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2520 struct ipa_polymorphic_call_context *src_ctx
2521 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2523 if (src_ctx && !src_ctx->useless_p ())
2525 struct ipa_polymorphic_call_context ctx = *src_ctx;
2527 /* TODO: Make type preserved safe WRT contexts. */
2528 if (!ipa_get_jf_pass_through_type_preserved (dst))
2529 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2530 if (!ctx.useless_p ())
2532 if (!dst_ctx)
2534 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2535 count);
2536 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2538 dst_ctx->combine_with (ctx);
2541 switch (src->type)
2543 case IPA_JF_UNKNOWN:
2544 ipa_set_jf_unknown (dst);
2545 break;
2546 case IPA_JF_CONST:
2547 ipa_set_jf_cst_copy (dst, src);
2548 break;
2550 case IPA_JF_PASS_THROUGH:
2552 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2553 enum tree_code operation;
2554 operation = ipa_get_jf_pass_through_operation (src);
2556 if (operation == NOP_EXPR)
2558 bool agg_p;
2559 agg_p = dst_agg_p
2560 && ipa_get_jf_pass_through_agg_preserved (src);
2561 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2563 else
2565 tree operand = ipa_get_jf_pass_through_operand (src);
2566 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2567 operation);
2569 break;
2571 case IPA_JF_ANCESTOR:
2573 bool agg_p;
2574 agg_p = dst_agg_p
2575 && ipa_get_jf_ancestor_agg_preserved (src);
2576 ipa_set_ancestor_jf (dst,
2577 ipa_get_jf_ancestor_offset (src),
2578 ipa_get_jf_ancestor_formal_id (src),
2579 agg_p);
2580 break;
2582 default:
2583 gcc_unreachable ();
2586 if (src->agg.items
2587 && (dst_agg_p || !src->agg.by_ref))
2589 /* Currently we do not produce clobber aggregate jump
2590 functions, replace with merging when we do. */
2591 gcc_assert (!dst->agg.items);
2593 dst->agg.by_ref = src->agg.by_ref;
2594 dst->agg.items = vec_safe_copy (src->agg.items);
2597 else
2598 ipa_set_jf_unknown (dst);
2603 /* If TARGET is an addr_expr of a function declaration, make it the
2604 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2605 Otherwise, return NULL. */
2607 struct cgraph_edge *
2608 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2609 bool speculative)
2611 struct cgraph_node *callee;
2612 struct inline_edge_summary *es = inline_edge_summary (ie);
2613 bool unreachable = false;
2615 if (TREE_CODE (target) == ADDR_EXPR)
2616 target = TREE_OPERAND (target, 0);
2617 if (TREE_CODE (target) != FUNCTION_DECL)
2619 target = canonicalize_constructor_val (target, NULL);
2620 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2622 if (ie->indirect_info->member_ptr)
2623 /* Member pointer call that goes through a VMT lookup. */
2624 return NULL;
2626 if (dump_enabled_p ())
2628 location_t loc = gimple_location_safe (ie->call_stmt);
2629 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2630 "discovered direct call to non-function in %s/%i, "
2631 "making it __builtin_unreachable\n",
2632 ie->caller->name (), ie->caller->order);
2635 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2636 callee = cgraph_node::get_create (target);
2637 unreachable = true;
2639 else
2640 callee = cgraph_node::get (target);
2642 else
2643 callee = cgraph_node::get (target);
2645 /* Because may-edges are not explicitely represented and vtable may be external,
2646 we may create the first reference to the object in the unit. */
2647 if (!callee || callee->global.inlined_to)
2650 /* We are better to ensure we can refer to it.
2651 In the case of static functions we are out of luck, since we already
2652 removed its body. In the case of public functions we may or may
2653 not introduce the reference. */
2654 if (!canonicalize_constructor_val (target, NULL)
2655 || !TREE_PUBLIC (target))
2657 if (dump_file)
2658 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2659 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2660 xstrdup_for_dump (ie->caller->name ()),
2661 ie->caller->order,
2662 xstrdup_for_dump (ie->callee->name ()),
2663 ie->callee->order);
2664 return NULL;
2666 callee = cgraph_node::get_create (target);
2669 /* If the edge is already speculated. */
2670 if (speculative && ie->speculative)
2672 struct cgraph_edge *e2;
2673 struct ipa_ref *ref;
2674 ie->speculative_call_info (e2, ie, ref);
2675 if (e2->callee->ultimate_alias_target ()
2676 != callee->ultimate_alias_target ())
2678 if (dump_file)
2679 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2680 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2681 xstrdup_for_dump (ie->caller->name ()),
2682 ie->caller->order,
2683 xstrdup_for_dump (callee->name ()),
2684 callee->order,
2685 xstrdup_for_dump (e2->callee->name ()),
2686 e2->callee->order);
2688 else
2690 if (dump_file)
2691 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2692 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2693 xstrdup_for_dump (ie->caller->name ()),
2694 ie->caller->order,
2695 xstrdup_for_dump (callee->name ()),
2696 callee->order);
2698 return NULL;
2701 if (!dbg_cnt (devirt))
2702 return NULL;
2704 ipa_check_create_node_params ();
2706 /* We can not make edges to inline clones. It is bug that someone removed
2707 the cgraph node too early. */
2708 gcc_assert (!callee->global.inlined_to);
2710 if (dump_file && !unreachable)
2712 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2713 "(%s/%i -> %s/%i), for stmt ",
2714 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2715 speculative ? "speculative" : "known",
2716 xstrdup_for_dump (ie->caller->name ()),
2717 ie->caller->order,
2718 xstrdup_for_dump (callee->name ()),
2719 callee->order);
2720 if (ie->call_stmt)
2721 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2722 else
2723 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2725 if (dump_enabled_p ())
2727 location_t loc = gimple_location_safe (ie->call_stmt);
2729 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2730 "converting indirect call in %s to direct call to %s\n",
2731 ie->caller->name (), callee->name ());
2733 if (!speculative)
2734 ie = ie->make_direct (callee);
2735 else
2737 if (!callee->can_be_discarded_p ())
2739 cgraph_node *alias;
2740 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2741 if (alias)
2742 callee = alias;
2744 ie = ie->make_speculative
2745 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2747 es = inline_edge_summary (ie);
2748 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2749 - eni_size_weights.call_cost);
2750 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2751 - eni_time_weights.call_cost);
2753 return ie;
2756 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2757 return NULL if there is not any. BY_REF specifies whether the value has to
2758 be passed by reference or by value. */
2760 tree
2761 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2762 HOST_WIDE_INT offset, bool by_ref)
2764 struct ipa_agg_jf_item *item;
2765 int i;
2767 if (by_ref != agg->by_ref)
2768 return NULL;
2770 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2771 if (item->offset == offset)
2773 /* Currently we do not have clobber values, return NULL for them once
2774 we do. */
2775 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2776 return item->value;
2778 return NULL;
2781 /* Remove a reference to SYMBOL from the list of references of a node given by
2782 reference description RDESC. Return true if the reference has been
2783 successfully found and removed. */
2785 static bool
2786 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2788 struct ipa_ref *to_del;
2789 struct cgraph_edge *origin;
2791 origin = rdesc->cs;
2792 if (!origin)
2793 return false;
2794 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
2795 origin->lto_stmt_uid);
2796 if (!to_del)
2797 return false;
2799 to_del->remove_reference ();
2800 if (dump_file)
2801 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2802 xstrdup_for_dump (origin->caller->name ()),
2803 origin->caller->order, xstrdup_for_dump (symbol->name ()));
2804 return true;
2807 /* If JFUNC has a reference description with refcount different from
2808 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2809 NULL. JFUNC must be a constant jump function. */
2811 static struct ipa_cst_ref_desc *
2812 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2814 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2815 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2816 return rdesc;
2817 else
2818 return NULL;
2821 /* If the value of constant jump function JFUNC is an address of a function
2822 declaration, return the associated call graph node. Otherwise return
2823 NULL. */
2825 static cgraph_node *
2826 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2828 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2829 tree cst = ipa_get_jf_constant (jfunc);
2830 if (TREE_CODE (cst) != ADDR_EXPR
2831 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2832 return NULL;
2834 return cgraph_node::get (TREE_OPERAND (cst, 0));
2838 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2839 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2840 the edge specified in the rdesc. Return false if either the symbol or the
2841 reference could not be found, otherwise return true. */
2843 static bool
2844 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2846 struct ipa_cst_ref_desc *rdesc;
2847 if (jfunc->type == IPA_JF_CONST
2848 && (rdesc = jfunc_rdesc_usable (jfunc))
2849 && --rdesc->refcount == 0)
2851 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
2852 if (!symbol)
2853 return false;
2855 return remove_described_reference (symbol, rdesc);
2857 return true;
2860 /* Try to find a destination for indirect edge IE that corresponds to a simple
2861 call or a call of a member function pointer and where the destination is a
2862 pointer formal parameter described by jump function JFUNC. If it can be
2863 determined, return the newly direct edge, otherwise return NULL.
2864 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2866 static struct cgraph_edge *
2867 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2868 struct ipa_jump_func *jfunc,
2869 struct ipa_node_params *new_root_info)
2871 struct cgraph_edge *cs;
2872 tree target;
2873 bool agg_contents = ie->indirect_info->agg_contents;
2875 if (ie->indirect_info->agg_contents)
2876 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2877 ie->indirect_info->offset,
2878 ie->indirect_info->by_ref);
2879 else
2880 target = ipa_value_from_jfunc (new_root_info, jfunc);
2881 if (!target)
2882 return NULL;
2883 cs = ipa_make_edge_direct_to_target (ie, target);
2885 if (cs && !agg_contents)
2887 bool ok;
2888 gcc_checking_assert (cs->callee
2889 && (cs != ie
2890 || jfunc->type != IPA_JF_CONST
2891 || !cgraph_node_for_jfunc (jfunc)
2892 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2893 ok = try_decrement_rdesc_refcount (jfunc);
2894 gcc_checking_assert (ok);
2897 return cs;
2900 /* Return the target to be used in cases of impossible devirtualization. IE
2901 and target (the latter can be NULL) are dumped when dumping is enabled. */
2903 tree
2904 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
2906 if (dump_file)
2908 if (target)
2909 fprintf (dump_file,
2910 "Type inconsistent devirtualization: %s/%i->%s\n",
2911 ie->caller->name (), ie->caller->order,
2912 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
2913 else
2914 fprintf (dump_file,
2915 "No devirtualization target in %s/%i\n",
2916 ie->caller->name (), ie->caller->order);
2918 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2919 cgraph_node::get_create (new_target);
2920 return new_target;
2923 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2924 call based on a formal parameter which is described by jump function JFUNC
2925 and if it can be determined, make it direct and return the direct edge.
2926 Otherwise, return NULL. CTX describes the polymorphic context that the
2927 parameter the call is based on brings along with it. */
2929 static struct cgraph_edge *
2930 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2931 struct ipa_jump_func *jfunc,
2932 struct ipa_polymorphic_call_context ctx)
2934 tree target = NULL;
2935 bool speculative = false;
2937 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
2938 return NULL;
2940 gcc_assert (!ie->indirect_info->by_ref);
2942 /* Try to do lookup via known virtual table pointer value. */
2943 if (!ie->indirect_info->vptr_changed
2944 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
2946 tree vtable;
2947 unsigned HOST_WIDE_INT offset;
2948 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
2949 ie->indirect_info->offset,
2950 true);
2951 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
2953 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2954 vtable, offset);
2955 if (t)
2957 if ((TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
2958 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
2959 || !possible_polymorphic_call_target_p
2960 (ie, cgraph_node::get (t)))
2962 /* Do not speculate builtin_unreachable, it is stpid! */
2963 if (!ie->indirect_info->vptr_changed)
2964 target = ipa_impossible_devirt_target (ie, target);
2966 else
2968 target = t;
2969 speculative = ie->indirect_info->vptr_changed;
2975 ipa_polymorphic_call_context ie_context (ie);
2976 vec <cgraph_node *>targets;
2977 bool final;
2979 ctx.offset_by (ie->indirect_info->offset);
2980 if (ie->indirect_info->vptr_changed)
2981 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
2982 ie->indirect_info->otr_type);
2983 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
2984 targets = possible_polymorphic_call_targets
2985 (ie->indirect_info->otr_type,
2986 ie->indirect_info->otr_token,
2987 ctx, &final);
2988 if (final && targets.length () <= 1)
2990 if (targets.length () == 1)
2991 target = targets[0]->decl;
2992 else
2993 target = ipa_impossible_devirt_target (ie, NULL_TREE);
2995 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
2996 && !ie->speculative && ie->maybe_hot_p ())
2998 cgraph_node *n;
2999 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3000 ie->indirect_info->otr_token,
3001 ie->indirect_info->context);
3002 if (n)
3004 target = n->decl;
3005 speculative = true;
3009 if (target)
3011 if (!possible_polymorphic_call_target_p
3012 (ie, cgraph_node::get_create (target)))
3014 if (speculative)
3015 return NULL;
3016 target = ipa_impossible_devirt_target (ie, target);
3018 return ipa_make_edge_direct_to_target (ie, target, speculative);
3020 else
3021 return NULL;
3024 /* Update the param called notes associated with NODE when CS is being inlined,
3025 assuming NODE is (potentially indirectly) inlined into CS->callee.
3026 Moreover, if the callee is discovered to be constant, create a new cgraph
3027 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3028 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3030 static bool
3031 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3032 struct cgraph_node *node,
3033 vec<cgraph_edge *> *new_edges)
3035 struct ipa_edge_args *top;
3036 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3037 struct ipa_node_params *new_root_info;
3038 bool res = false;
3040 ipa_check_create_edge_args ();
3041 top = IPA_EDGE_REF (cs);
3042 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3043 ? cs->caller->global.inlined_to
3044 : cs->caller);
3046 for (ie = node->indirect_calls; ie; ie = next_ie)
3048 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3049 struct ipa_jump_func *jfunc;
3050 int param_index;
3052 next_ie = ie->next_callee;
3054 if (ici->param_index == -1)
3055 continue;
3057 /* We must check range due to calls with variable number of arguments: */
3058 if (ici->param_index >= ipa_get_cs_argument_count (top))
3060 ici->param_index = -1;
3061 continue;
3064 param_index = ici->param_index;
3065 jfunc = ipa_get_ith_jump_func (top, param_index);
3067 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3068 new_direct_edge = NULL;
3069 else if (ici->polymorphic)
3071 ipa_polymorphic_call_context ctx;
3072 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3073 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3075 else
3076 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3077 new_root_info);
3078 /* If speculation was removed, then we need to do nothing. */
3079 if (new_direct_edge && new_direct_edge != ie)
3081 new_direct_edge->indirect_inlining_edge = 1;
3082 top = IPA_EDGE_REF (cs);
3083 res = true;
3085 else if (new_direct_edge)
3087 new_direct_edge->indirect_inlining_edge = 1;
3088 if (new_direct_edge->call_stmt)
3089 new_direct_edge->call_stmt_cannot_inline_p
3090 = !gimple_check_call_matching_types (
3091 new_direct_edge->call_stmt,
3092 new_direct_edge->callee->decl, false);
3093 if (new_edges)
3095 new_edges->safe_push (new_direct_edge);
3096 res = true;
3098 top = IPA_EDGE_REF (cs);
3100 else if (jfunc->type == IPA_JF_PASS_THROUGH
3101 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3103 if ((ici->agg_contents
3104 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
3105 || (ici->polymorphic
3106 && !ipa_get_jf_pass_through_type_preserved (jfunc)))
3107 ici->param_index = -1;
3108 else
3109 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3111 else if (jfunc->type == IPA_JF_ANCESTOR)
3113 if ((ici->agg_contents
3114 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
3115 || (ici->polymorphic
3116 && !ipa_get_jf_ancestor_type_preserved (jfunc)))
3117 ici->param_index = -1;
3118 else
3120 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3121 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3124 else
3125 /* Either we can find a destination for this edge now or never. */
3126 ici->param_index = -1;
3129 return res;
3132 /* Recursively traverse subtree of NODE (including node) made of inlined
3133 cgraph_edges when CS has been inlined and invoke
3134 update_indirect_edges_after_inlining on all nodes and
3135 update_jump_functions_after_inlining on all non-inlined edges that lead out
3136 of this subtree. Newly discovered indirect edges will be added to
3137 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3138 created. */
3140 static bool
3141 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3142 struct cgraph_node *node,
3143 vec<cgraph_edge *> *new_edges)
3145 struct cgraph_edge *e;
3146 bool res;
3148 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3150 for (e = node->callees; e; e = e->next_callee)
3151 if (!e->inline_failed)
3152 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3153 else
3154 update_jump_functions_after_inlining (cs, e);
3155 for (e = node->indirect_calls; e; e = e->next_callee)
3156 update_jump_functions_after_inlining (cs, e);
3158 return res;
3161 /* Combine two controlled uses counts as done during inlining. */
3163 static int
3164 combine_controlled_uses_counters (int c, int d)
3166 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3167 return IPA_UNDESCRIBED_USE;
3168 else
3169 return c + d - 1;
3172 /* Propagate number of controlled users from CS->caleee to the new root of the
3173 tree of inlined nodes. */
3175 static void
3176 propagate_controlled_uses (struct cgraph_edge *cs)
3178 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3179 struct cgraph_node *new_root = cs->caller->global.inlined_to
3180 ? cs->caller->global.inlined_to : cs->caller;
3181 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3182 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3183 int count, i;
3185 count = MIN (ipa_get_cs_argument_count (args),
3186 ipa_get_param_count (old_root_info));
3187 for (i = 0; i < count; i++)
3189 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3190 struct ipa_cst_ref_desc *rdesc;
3192 if (jf->type == IPA_JF_PASS_THROUGH)
3194 int src_idx, c, d;
3195 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3196 c = ipa_get_controlled_uses (new_root_info, src_idx);
3197 d = ipa_get_controlled_uses (old_root_info, i);
3199 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3200 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3201 c = combine_controlled_uses_counters (c, d);
3202 ipa_set_controlled_uses (new_root_info, src_idx, c);
3203 if (c == 0 && new_root_info->ipcp_orig_node)
3205 struct cgraph_node *n;
3206 struct ipa_ref *ref;
3207 tree t = new_root_info->known_csts[src_idx];
3209 if (t && TREE_CODE (t) == ADDR_EXPR
3210 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3211 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3212 && (ref = new_root->find_reference (n, NULL, 0)))
3214 if (dump_file)
3215 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3216 "reference from %s/%i to %s/%i.\n",
3217 xstrdup_for_dump (new_root->name ()),
3218 new_root->order,
3219 xstrdup_for_dump (n->name ()), n->order);
3220 ref->remove_reference ();
3224 else if (jf->type == IPA_JF_CONST
3225 && (rdesc = jfunc_rdesc_usable (jf)))
3227 int d = ipa_get_controlled_uses (old_root_info, i);
3228 int c = rdesc->refcount;
3229 rdesc->refcount = combine_controlled_uses_counters (c, d);
3230 if (rdesc->refcount == 0)
3232 tree cst = ipa_get_jf_constant (jf);
3233 struct cgraph_node *n;
3234 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3235 && TREE_CODE (TREE_OPERAND (cst, 0))
3236 == FUNCTION_DECL);
3237 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3238 if (n)
3240 struct cgraph_node *clone;
3241 bool ok;
3242 ok = remove_described_reference (n, rdesc);
3243 gcc_checking_assert (ok);
3245 clone = cs->caller;
3246 while (clone->global.inlined_to
3247 && clone != rdesc->cs->caller
3248 && IPA_NODE_REF (clone)->ipcp_orig_node)
3250 struct ipa_ref *ref;
3251 ref = clone->find_reference (n, NULL, 0);
3252 if (ref)
3254 if (dump_file)
3255 fprintf (dump_file, "ipa-prop: Removing "
3256 "cloning-created reference "
3257 "from %s/%i to %s/%i.\n",
3258 xstrdup_for_dump (clone->name ()),
3259 clone->order,
3260 xstrdup_for_dump (n->name ()),
3261 n->order);
3262 ref->remove_reference ();
3264 clone = clone->callers->caller;
3271 for (i = ipa_get_param_count (old_root_info);
3272 i < ipa_get_cs_argument_count (args);
3273 i++)
3275 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3277 if (jf->type == IPA_JF_CONST)
3279 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3280 if (rdesc)
3281 rdesc->refcount = IPA_UNDESCRIBED_USE;
3283 else if (jf->type == IPA_JF_PASS_THROUGH)
3284 ipa_set_controlled_uses (new_root_info,
3285 jf->value.pass_through.formal_id,
3286 IPA_UNDESCRIBED_USE);
3290 /* Update jump functions and call note functions on inlining the call site CS.
3291 CS is expected to lead to a node already cloned by
3292 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3293 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3294 created. */
3296 bool
3297 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3298 vec<cgraph_edge *> *new_edges)
3300 bool changed;
3301 /* Do nothing if the preparation phase has not been carried out yet
3302 (i.e. during early inlining). */
3303 if (!ipa_node_params_vector.exists ())
3304 return false;
3305 gcc_assert (ipa_edge_args_vector);
3307 propagate_controlled_uses (cs);
3308 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3310 return changed;
3313 /* Frees all dynamically allocated structures that the argument info points
3314 to. */
3316 void
3317 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3319 vec_free (args->jump_functions);
3320 memset (args, 0, sizeof (*args));
3323 /* Free all ipa_edge structures. */
3325 void
3326 ipa_free_all_edge_args (void)
3328 int i;
3329 struct ipa_edge_args *args;
3331 if (!ipa_edge_args_vector)
3332 return;
3334 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3335 ipa_free_edge_args_substructures (args);
3337 vec_free (ipa_edge_args_vector);
3340 /* Frees all dynamically allocated structures that the param info points
3341 to. */
3343 void
3344 ipa_free_node_params_substructures (struct ipa_node_params *info)
3346 info->descriptors.release ();
3347 free (info->lattices);
3348 /* Lattice values and their sources are deallocated with their alocation
3349 pool. */
3350 info->known_csts.release ();
3351 info->known_contexts.release ();
3352 memset (info, 0, sizeof (*info));
3355 /* Free all ipa_node_params structures. */
3357 void
3358 ipa_free_all_node_params (void)
3360 int i;
3361 struct ipa_node_params *info;
3363 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
3364 ipa_free_node_params_substructures (info);
3366 ipa_node_params_vector.release ();
3369 /* Grow ipcp_transformations if necessary. */
3371 void
3372 ipcp_grow_transformations_if_necessary (void)
3374 if (vec_safe_length (ipcp_transformations)
3375 <= (unsigned) symtab->cgraph_max_uid)
3376 vec_safe_grow_cleared (ipcp_transformations, symtab->cgraph_max_uid + 1);
3379 /* Set the aggregate replacements of NODE to be AGGVALS. */
3381 void
3382 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3383 struct ipa_agg_replacement_value *aggvals)
3385 ipcp_grow_transformations_if_necessary ();
3386 (*ipcp_transformations)[node->uid].agg_values = aggvals;
3389 /* Hook that is called by cgraph.c when an edge is removed. */
3391 static void
3392 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3394 struct ipa_edge_args *args;
3396 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3397 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3398 return;
3400 args = IPA_EDGE_REF (cs);
3401 if (args->jump_functions)
3403 struct ipa_jump_func *jf;
3404 int i;
3405 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3407 struct ipa_cst_ref_desc *rdesc;
3408 try_decrement_rdesc_refcount (jf);
3409 if (jf->type == IPA_JF_CONST
3410 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3411 && rdesc->cs == cs)
3412 rdesc->cs = NULL;
3416 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3419 /* Hook that is called by cgraph.c when a node is removed. */
3421 static void
3422 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3424 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3425 if (ipa_node_params_vector.length () > (unsigned)node->uid)
3426 ipa_free_node_params_substructures (IPA_NODE_REF (node));
3427 if (vec_safe_length (ipcp_transformations) > (unsigned)node->uid)
3429 (*ipcp_transformations)[(unsigned)node->uid].agg_values = NULL;
3430 (*ipcp_transformations)[(unsigned)node->uid].alignments = NULL;
3434 /* Hook that is called by cgraph.c when an edge is duplicated. */
3436 static void
3437 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3438 __attribute__((unused)) void *data)
3440 struct ipa_edge_args *old_args, *new_args;
3441 unsigned int i;
3443 ipa_check_create_edge_args ();
3445 old_args = IPA_EDGE_REF (src);
3446 new_args = IPA_EDGE_REF (dst);
3448 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3449 if (old_args->polymorphic_call_contexts)
3450 new_args->polymorphic_call_contexts
3451 = vec_safe_copy (old_args->polymorphic_call_contexts);
3453 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3455 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3456 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3458 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3460 if (src_jf->type == IPA_JF_CONST)
3462 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3464 if (!src_rdesc)
3465 dst_jf->value.constant.rdesc = NULL;
3466 else if (src->caller == dst->caller)
3468 struct ipa_ref *ref;
3469 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3470 gcc_checking_assert (n);
3471 ref = src->caller->find_reference (n, src->call_stmt,
3472 src->lto_stmt_uid);
3473 gcc_checking_assert (ref);
3474 dst->caller->clone_reference (ref, ref->stmt);
3476 gcc_checking_assert (ipa_refdesc_pool);
3477 struct ipa_cst_ref_desc *dst_rdesc
3478 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3479 dst_rdesc->cs = dst;
3480 dst_rdesc->refcount = src_rdesc->refcount;
3481 dst_rdesc->next_duplicate = NULL;
3482 dst_jf->value.constant.rdesc = dst_rdesc;
3484 else if (src_rdesc->cs == src)
3486 struct ipa_cst_ref_desc *dst_rdesc;
3487 gcc_checking_assert (ipa_refdesc_pool);
3488 dst_rdesc
3489 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3490 dst_rdesc->cs = dst;
3491 dst_rdesc->refcount = src_rdesc->refcount;
3492 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3493 src_rdesc->next_duplicate = dst_rdesc;
3494 dst_jf->value.constant.rdesc = dst_rdesc;
3496 else
3498 struct ipa_cst_ref_desc *dst_rdesc;
3499 /* This can happen during inlining, when a JFUNC can refer to a
3500 reference taken in a function up in the tree of inline clones.
3501 We need to find the duplicate that refers to our tree of
3502 inline clones. */
3504 gcc_assert (dst->caller->global.inlined_to);
3505 for (dst_rdesc = src_rdesc->next_duplicate;
3506 dst_rdesc;
3507 dst_rdesc = dst_rdesc->next_duplicate)
3509 struct cgraph_node *top;
3510 top = dst_rdesc->cs->caller->global.inlined_to
3511 ? dst_rdesc->cs->caller->global.inlined_to
3512 : dst_rdesc->cs->caller;
3513 if (dst->caller->global.inlined_to == top)
3514 break;
3516 gcc_assert (dst_rdesc);
3517 dst_jf->value.constant.rdesc = dst_rdesc;
3520 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3521 && src->caller == dst->caller)
3523 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3524 ? dst->caller->global.inlined_to : dst->caller;
3525 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3526 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3528 int c = ipa_get_controlled_uses (root_info, idx);
3529 if (c != IPA_UNDESCRIBED_USE)
3531 c++;
3532 ipa_set_controlled_uses (root_info, idx, c);
3538 /* Hook that is called by cgraph.c when a node is duplicated. */
3540 static void
3541 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3542 ATTRIBUTE_UNUSED void *data)
3544 struct ipa_node_params *old_info, *new_info;
3545 struct ipa_agg_replacement_value *old_av, *new_av;
3547 ipa_check_create_node_params ();
3548 old_info = IPA_NODE_REF (src);
3549 new_info = IPA_NODE_REF (dst);
3551 new_info->descriptors = old_info->descriptors.copy ();
3552 new_info->lattices = NULL;
3553 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3555 new_info->analysis_done = old_info->analysis_done;
3556 new_info->node_enqueued = old_info->node_enqueued;
3558 old_av = ipa_get_agg_replacements_for_node (src);
3559 if (old_av)
3561 new_av = NULL;
3562 while (old_av)
3564 struct ipa_agg_replacement_value *v;
3566 v = ggc_alloc<ipa_agg_replacement_value> ();
3567 memcpy (v, old_av, sizeof (*v));
3568 v->next = new_av;
3569 new_av = v;
3570 old_av = old_av->next;
3572 ipa_set_node_agg_value_chain (dst, new_av);
3575 ipcp_transformation_summary *src_trans = ipcp_get_transformation_summary (src);
3577 if (src_trans && vec_safe_length (src_trans->alignments) > 0)
3579 ipcp_grow_transformations_if_necessary ();
3580 src_trans = ipcp_get_transformation_summary (src);
3581 const vec<ipa_alignment, va_gc> *src_alignments = src_trans->alignments;
3582 vec<ipa_alignment, va_gc> *&dst_alignments
3583 = ipcp_get_transformation_summary (dst)->alignments;
3584 vec_safe_reserve_exact (dst_alignments, src_alignments->length ());
3585 for (unsigned i = 0; i < src_alignments->length (); ++i)
3586 dst_alignments->quick_push ((*src_alignments)[i]);
3591 /* Analyze newly added function into callgraph. */
3593 static void
3594 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3596 if (node->has_gimple_body_p ())
3597 ipa_analyze_node (node);
3600 /* Register our cgraph hooks if they are not already there. */
3602 void
3603 ipa_register_cgraph_hooks (void)
3605 if (!edge_removal_hook_holder)
3606 edge_removal_hook_holder =
3607 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3608 if (!node_removal_hook_holder)
3609 node_removal_hook_holder =
3610 symtab->add_cgraph_removal_hook (&ipa_node_removal_hook, NULL);
3611 if (!edge_duplication_hook_holder)
3612 edge_duplication_hook_holder =
3613 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3614 if (!node_duplication_hook_holder)
3615 node_duplication_hook_holder =
3616 symtab->add_cgraph_duplication_hook (&ipa_node_duplication_hook, NULL);
3617 if (!function_insertion_hook_holder)
3618 function_insertion_hook_holder =
3619 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3622 /* Unregister our cgraph hooks if they are not already there. */
3624 static void
3625 ipa_unregister_cgraph_hooks (void)
3627 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
3628 edge_removal_hook_holder = NULL;
3629 symtab->remove_cgraph_removal_hook (node_removal_hook_holder);
3630 node_removal_hook_holder = NULL;
3631 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
3632 edge_duplication_hook_holder = NULL;
3633 symtab->remove_cgraph_duplication_hook (node_duplication_hook_holder);
3634 node_duplication_hook_holder = NULL;
3635 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3636 function_insertion_hook_holder = NULL;
3639 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3640 longer needed after ipa-cp. */
3642 void
3643 ipa_free_all_structures_after_ipa_cp (void)
3645 if (!optimize && !in_lto_p)
3647 ipa_free_all_edge_args ();
3648 ipa_free_all_node_params ();
3649 free_alloc_pool (ipcp_sources_pool);
3650 free_alloc_pool (ipcp_cst_values_pool);
3651 free_alloc_pool (ipcp_poly_ctx_values_pool);
3652 free_alloc_pool (ipcp_agg_lattice_pool);
3653 ipa_unregister_cgraph_hooks ();
3654 if (ipa_refdesc_pool)
3655 free_alloc_pool (ipa_refdesc_pool);
3659 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3660 longer needed after indirect inlining. */
3662 void
3663 ipa_free_all_structures_after_iinln (void)
3665 ipa_free_all_edge_args ();
3666 ipa_free_all_node_params ();
3667 ipa_unregister_cgraph_hooks ();
3668 if (ipcp_sources_pool)
3669 free_alloc_pool (ipcp_sources_pool);
3670 if (ipcp_cst_values_pool)
3671 free_alloc_pool (ipcp_cst_values_pool);
3672 if (ipcp_poly_ctx_values_pool)
3673 free_alloc_pool (ipcp_poly_ctx_values_pool);
3674 if (ipcp_agg_lattice_pool)
3675 free_alloc_pool (ipcp_agg_lattice_pool);
3676 if (ipa_refdesc_pool)
3677 free_alloc_pool (ipa_refdesc_pool);
3680 /* Print ipa_tree_map data structures of all functions in the
3681 callgraph to F. */
3683 void
3684 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3686 int i, count;
3687 struct ipa_node_params *info;
3689 if (!node->definition)
3690 return;
3691 info = IPA_NODE_REF (node);
3692 fprintf (f, " function %s/%i parameter descriptors:\n",
3693 node->name (), node->order);
3694 count = ipa_get_param_count (info);
3695 for (i = 0; i < count; i++)
3697 int c;
3699 fprintf (f, " ");
3700 ipa_dump_param (f, info, i);
3701 if (ipa_is_param_used (info, i))
3702 fprintf (f, " used");
3703 c = ipa_get_controlled_uses (info, i);
3704 if (c == IPA_UNDESCRIBED_USE)
3705 fprintf (f, " undescribed_use");
3706 else
3707 fprintf (f, " controlled_uses=%i", c);
3708 fprintf (f, "\n");
3712 /* Print ipa_tree_map data structures of all functions in the
3713 callgraph to F. */
3715 void
3716 ipa_print_all_params (FILE * f)
3718 struct cgraph_node *node;
3720 fprintf (f, "\nFunction parameters:\n");
3721 FOR_EACH_FUNCTION (node)
3722 ipa_print_node_params (f, node);
3725 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3727 vec<tree>
3728 ipa_get_vector_of_formal_parms (tree fndecl)
3730 vec<tree> args;
3731 int count;
3732 tree parm;
3734 gcc_assert (!flag_wpa);
3735 count = count_formal_params (fndecl);
3736 args.create (count);
3737 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3738 args.quick_push (parm);
3740 return args;
3743 /* Return a heap allocated vector containing types of formal parameters of
3744 function type FNTYPE. */
3746 vec<tree>
3747 ipa_get_vector_of_formal_parm_types (tree fntype)
3749 vec<tree> types;
3750 int count = 0;
3751 tree t;
3753 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3754 count++;
3756 types.create (count);
3757 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3758 types.quick_push (TREE_VALUE (t));
3760 return types;
3763 /* Modify the function declaration FNDECL and its type according to the plan in
3764 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3765 to reflect the actual parameters being modified which are determined by the
3766 base_index field. */
3768 void
3769 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3771 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3772 tree orig_type = TREE_TYPE (fndecl);
3773 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3775 /* The following test is an ugly hack, some functions simply don't have any
3776 arguments in their type. This is probably a bug but well... */
3777 bool care_for_types = (old_arg_types != NULL_TREE);
3778 bool last_parm_void;
3779 vec<tree> otypes;
3780 if (care_for_types)
3782 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3783 == void_type_node);
3784 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3785 if (last_parm_void)
3786 gcc_assert (oparms.length () + 1 == otypes.length ());
3787 else
3788 gcc_assert (oparms.length () == otypes.length ());
3790 else
3792 last_parm_void = false;
3793 otypes.create (0);
3796 int len = adjustments.length ();
3797 tree *link = &DECL_ARGUMENTS (fndecl);
3798 tree new_arg_types = NULL;
3799 for (int i = 0; i < len; i++)
3801 struct ipa_parm_adjustment *adj;
3802 gcc_assert (link);
3804 adj = &adjustments[i];
3805 tree parm;
3806 if (adj->op == IPA_PARM_OP_NEW)
3807 parm = NULL;
3808 else
3809 parm = oparms[adj->base_index];
3810 adj->base = parm;
3812 if (adj->op == IPA_PARM_OP_COPY)
3814 if (care_for_types)
3815 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3816 new_arg_types);
3817 *link = parm;
3818 link = &DECL_CHAIN (parm);
3820 else if (adj->op != IPA_PARM_OP_REMOVE)
3822 tree new_parm;
3823 tree ptype;
3825 if (adj->by_ref)
3826 ptype = build_pointer_type (adj->type);
3827 else
3829 ptype = adj->type;
3830 if (is_gimple_reg_type (ptype))
3832 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3833 if (TYPE_ALIGN (ptype) < malign)
3834 ptype = build_aligned_type (ptype, malign);
3838 if (care_for_types)
3839 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3841 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3842 ptype);
3843 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3844 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3845 DECL_ARTIFICIAL (new_parm) = 1;
3846 DECL_ARG_TYPE (new_parm) = ptype;
3847 DECL_CONTEXT (new_parm) = fndecl;
3848 TREE_USED (new_parm) = 1;
3849 DECL_IGNORED_P (new_parm) = 1;
3850 layout_decl (new_parm, 0);
3852 if (adj->op == IPA_PARM_OP_NEW)
3853 adj->base = NULL;
3854 else
3855 adj->base = parm;
3856 adj->new_decl = new_parm;
3858 *link = new_parm;
3859 link = &DECL_CHAIN (new_parm);
3863 *link = NULL_TREE;
3865 tree new_reversed = NULL;
3866 if (care_for_types)
3868 new_reversed = nreverse (new_arg_types);
3869 if (last_parm_void)
3871 if (new_reversed)
3872 TREE_CHAIN (new_arg_types) = void_list_node;
3873 else
3874 new_reversed = void_list_node;
3878 /* Use copy_node to preserve as much as possible from original type
3879 (debug info, attribute lists etc.)
3880 Exception is METHOD_TYPEs must have THIS argument.
3881 When we are asked to remove it, we need to build new FUNCTION_TYPE
3882 instead. */
3883 tree new_type = NULL;
3884 if (TREE_CODE (orig_type) != METHOD_TYPE
3885 || (adjustments[0].op == IPA_PARM_OP_COPY
3886 && adjustments[0].base_index == 0))
3888 new_type = build_distinct_type_copy (orig_type);
3889 TYPE_ARG_TYPES (new_type) = new_reversed;
3891 else
3893 new_type
3894 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3895 new_reversed));
3896 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3897 DECL_VINDEX (fndecl) = NULL_TREE;
3900 /* When signature changes, we need to clear builtin info. */
3901 if (DECL_BUILT_IN (fndecl))
3903 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3904 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3907 TREE_TYPE (fndecl) = new_type;
3908 DECL_VIRTUAL_P (fndecl) = 0;
3909 DECL_LANG_SPECIFIC (fndecl) = NULL;
3910 otypes.release ();
3911 oparms.release ();
3914 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3915 If this is a directly recursive call, CS must be NULL. Otherwise it must
3916 contain the corresponding call graph edge. */
3918 void
3919 ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt,
3920 ipa_parm_adjustment_vec adjustments)
3922 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
3923 vec<tree> vargs;
3924 vec<tree, va_gc> **debug_args = NULL;
3925 gcall *new_stmt;
3926 gimple_stmt_iterator gsi, prev_gsi;
3927 tree callee_decl;
3928 int i, len;
3930 len = adjustments.length ();
3931 vargs.create (len);
3932 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3933 current_node->remove_stmt_references (stmt);
3935 gsi = gsi_for_stmt (stmt);
3936 prev_gsi = gsi;
3937 gsi_prev (&prev_gsi);
3938 for (i = 0; i < len; i++)
3940 struct ipa_parm_adjustment *adj;
3942 adj = &adjustments[i];
3944 if (adj->op == IPA_PARM_OP_COPY)
3946 tree arg = gimple_call_arg (stmt, adj->base_index);
3948 vargs.quick_push (arg);
3950 else if (adj->op != IPA_PARM_OP_REMOVE)
3952 tree expr, base, off;
3953 location_t loc;
3954 unsigned int deref_align = 0;
3955 bool deref_base = false;
3957 /* We create a new parameter out of the value of the old one, we can
3958 do the following kind of transformations:
3960 - A scalar passed by reference is converted to a scalar passed by
3961 value. (adj->by_ref is false and the type of the original
3962 actual argument is a pointer to a scalar).
3964 - A part of an aggregate is passed instead of the whole aggregate.
3965 The part can be passed either by value or by reference, this is
3966 determined by value of adj->by_ref. Moreover, the code below
3967 handles both situations when the original aggregate is passed by
3968 value (its type is not a pointer) and when it is passed by
3969 reference (it is a pointer to an aggregate).
3971 When the new argument is passed by reference (adj->by_ref is true)
3972 it must be a part of an aggregate and therefore we form it by
3973 simply taking the address of a reference inside the original
3974 aggregate. */
3976 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3977 base = gimple_call_arg (stmt, adj->base_index);
3978 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3979 : EXPR_LOCATION (base);
3981 if (TREE_CODE (base) != ADDR_EXPR
3982 && POINTER_TYPE_P (TREE_TYPE (base)))
3983 off = build_int_cst (adj->alias_ptr_type,
3984 adj->offset / BITS_PER_UNIT);
3985 else
3987 HOST_WIDE_INT base_offset;
3988 tree prev_base;
3989 bool addrof;
3991 if (TREE_CODE (base) == ADDR_EXPR)
3993 base = TREE_OPERAND (base, 0);
3994 addrof = true;
3996 else
3997 addrof = false;
3998 prev_base = base;
3999 base = get_addr_base_and_unit_offset (base, &base_offset);
4000 /* Aggregate arguments can have non-invariant addresses. */
4001 if (!base)
4003 base = build_fold_addr_expr (prev_base);
4004 off = build_int_cst (adj->alias_ptr_type,
4005 adj->offset / BITS_PER_UNIT);
4007 else if (TREE_CODE (base) == MEM_REF)
4009 if (!addrof)
4011 deref_base = true;
4012 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4014 off = build_int_cst (adj->alias_ptr_type,
4015 base_offset
4016 + adj->offset / BITS_PER_UNIT);
4017 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
4018 off);
4019 base = TREE_OPERAND (base, 0);
4021 else
4023 off = build_int_cst (adj->alias_ptr_type,
4024 base_offset
4025 + adj->offset / BITS_PER_UNIT);
4026 base = build_fold_addr_expr (base);
4030 if (!adj->by_ref)
4032 tree type = adj->type;
4033 unsigned int align;
4034 unsigned HOST_WIDE_INT misalign;
4036 if (deref_base)
4038 align = deref_align;
4039 misalign = 0;
4041 else
4043 get_pointer_alignment_1 (base, &align, &misalign);
4044 if (TYPE_ALIGN (type) > align)
4045 align = TYPE_ALIGN (type);
4047 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
4048 * BITS_PER_UNIT);
4049 misalign = misalign & (align - 1);
4050 if (misalign != 0)
4051 align = (misalign & -misalign);
4052 if (align < TYPE_ALIGN (type))
4053 type = build_aligned_type (type, align);
4054 base = force_gimple_operand_gsi (&gsi, base,
4055 true, NULL, true, GSI_SAME_STMT);
4056 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4057 /* If expr is not a valid gimple call argument emit
4058 a load into a temporary. */
4059 if (is_gimple_reg_type (TREE_TYPE (expr)))
4061 gimple tem = gimple_build_assign (NULL_TREE, expr);
4062 if (gimple_in_ssa_p (cfun))
4064 gimple_set_vuse (tem, gimple_vuse (stmt));
4065 expr = make_ssa_name (TREE_TYPE (expr), tem);
4067 else
4068 expr = create_tmp_reg (TREE_TYPE (expr));
4069 gimple_assign_set_lhs (tem, expr);
4070 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4073 else
4075 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4076 expr = build_fold_addr_expr (expr);
4077 expr = force_gimple_operand_gsi (&gsi, expr,
4078 true, NULL, true, GSI_SAME_STMT);
4080 vargs.quick_push (expr);
4082 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4084 unsigned int ix;
4085 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4086 gimple def_temp;
4088 arg = gimple_call_arg (stmt, adj->base_index);
4089 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4091 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4092 continue;
4093 arg = fold_convert_loc (gimple_location (stmt),
4094 TREE_TYPE (origin), arg);
4096 if (debug_args == NULL)
4097 debug_args = decl_debug_args_insert (callee_decl);
4098 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4099 if (ddecl == origin)
4101 ddecl = (**debug_args)[ix + 1];
4102 break;
4104 if (ddecl == NULL)
4106 ddecl = make_node (DEBUG_EXPR_DECL);
4107 DECL_ARTIFICIAL (ddecl) = 1;
4108 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4109 DECL_MODE (ddecl) = DECL_MODE (origin);
4111 vec_safe_push (*debug_args, origin);
4112 vec_safe_push (*debug_args, ddecl);
4114 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4115 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4119 if (dump_file && (dump_flags & TDF_DETAILS))
4121 fprintf (dump_file, "replacing stmt:");
4122 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4125 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4126 vargs.release ();
4127 if (gimple_call_lhs (stmt))
4128 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4130 gimple_set_block (new_stmt, gimple_block (stmt));
4131 if (gimple_has_location (stmt))
4132 gimple_set_location (new_stmt, gimple_location (stmt));
4133 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4134 gimple_call_copy_flags (new_stmt, stmt);
4135 if (gimple_in_ssa_p (cfun))
4137 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4138 if (gimple_vdef (stmt))
4140 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4141 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4145 if (dump_file && (dump_flags & TDF_DETAILS))
4147 fprintf (dump_file, "with stmt:");
4148 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4149 fprintf (dump_file, "\n");
4151 gsi_replace (&gsi, new_stmt, true);
4152 if (cs)
4153 cs->set_call_stmt (new_stmt);
4156 current_node->record_stmt_references (gsi_stmt (gsi));
4157 gsi_prev (&gsi);
4159 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4162 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4163 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4164 specifies whether the function should care about type incompatibility the
4165 current and new expressions. If it is false, the function will leave
4166 incompatibility issues to the caller. Return true iff the expression
4167 was modified. */
4169 bool
4170 ipa_modify_expr (tree *expr, bool convert,
4171 ipa_parm_adjustment_vec adjustments)
4173 struct ipa_parm_adjustment *cand
4174 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4175 if (!cand)
4176 return false;
4178 tree src;
4179 if (cand->by_ref)
4180 src = build_simple_mem_ref (cand->new_decl);
4181 else
4182 src = cand->new_decl;
4184 if (dump_file && (dump_flags & TDF_DETAILS))
4186 fprintf (dump_file, "About to replace expr ");
4187 print_generic_expr (dump_file, *expr, 0);
4188 fprintf (dump_file, " with ");
4189 print_generic_expr (dump_file, src, 0);
4190 fprintf (dump_file, "\n");
4193 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4195 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4196 *expr = vce;
4198 else
4199 *expr = src;
4200 return true;
4203 /* If T is an SSA_NAME, return NULL if it is not a default def or
4204 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4205 the base variable is always returned, regardless if it is a default
4206 def. Return T if it is not an SSA_NAME. */
4208 static tree
4209 get_ssa_base_param (tree t, bool ignore_default_def)
4211 if (TREE_CODE (t) == SSA_NAME)
4213 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4214 return SSA_NAME_VAR (t);
4215 else
4216 return NULL_TREE;
4218 return t;
4221 /* Given an expression, return an adjustment entry specifying the
4222 transformation to be done on EXPR. If no suitable adjustment entry
4223 was found, returns NULL.
4225 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4226 default def, otherwise bail on them.
4228 If CONVERT is non-NULL, this function will set *CONVERT if the
4229 expression provided is a component reference. ADJUSTMENTS is the
4230 adjustments vector. */
4232 ipa_parm_adjustment *
4233 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4234 ipa_parm_adjustment_vec adjustments,
4235 bool ignore_default_def)
4237 if (TREE_CODE (**expr) == BIT_FIELD_REF
4238 || TREE_CODE (**expr) == IMAGPART_EXPR
4239 || TREE_CODE (**expr) == REALPART_EXPR)
4241 *expr = &TREE_OPERAND (**expr, 0);
4242 if (convert)
4243 *convert = true;
4246 HOST_WIDE_INT offset, size, max_size;
4247 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
4248 if (!base || size == -1 || max_size == -1)
4249 return NULL;
4251 if (TREE_CODE (base) == MEM_REF)
4253 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4254 base = TREE_OPERAND (base, 0);
4257 base = get_ssa_base_param (base, ignore_default_def);
4258 if (!base || TREE_CODE (base) != PARM_DECL)
4259 return NULL;
4261 struct ipa_parm_adjustment *cand = NULL;
4262 unsigned int len = adjustments.length ();
4263 for (unsigned i = 0; i < len; i++)
4265 struct ipa_parm_adjustment *adj = &adjustments[i];
4267 if (adj->base == base
4268 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4270 cand = adj;
4271 break;
4275 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4276 return NULL;
4277 return cand;
4280 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4282 static bool
4283 index_in_adjustments_multiple_times_p (int base_index,
4284 ipa_parm_adjustment_vec adjustments)
4286 int i, len = adjustments.length ();
4287 bool one = false;
4289 for (i = 0; i < len; i++)
4291 struct ipa_parm_adjustment *adj;
4292 adj = &adjustments[i];
4294 if (adj->base_index == base_index)
4296 if (one)
4297 return true;
4298 else
4299 one = true;
4302 return false;
4306 /* Return adjustments that should have the same effect on function parameters
4307 and call arguments as if they were first changed according to adjustments in
4308 INNER and then by adjustments in OUTER. */
4310 ipa_parm_adjustment_vec
4311 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4312 ipa_parm_adjustment_vec outer)
4314 int i, outlen = outer.length ();
4315 int inlen = inner.length ();
4316 int removals = 0;
4317 ipa_parm_adjustment_vec adjustments, tmp;
4319 tmp.create (inlen);
4320 for (i = 0; i < inlen; i++)
4322 struct ipa_parm_adjustment *n;
4323 n = &inner[i];
4325 if (n->op == IPA_PARM_OP_REMOVE)
4326 removals++;
4327 else
4329 /* FIXME: Handling of new arguments are not implemented yet. */
4330 gcc_assert (n->op != IPA_PARM_OP_NEW);
4331 tmp.quick_push (*n);
4335 adjustments.create (outlen + removals);
4336 for (i = 0; i < outlen; i++)
4338 struct ipa_parm_adjustment r;
4339 struct ipa_parm_adjustment *out = &outer[i];
4340 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4342 memset (&r, 0, sizeof (r));
4343 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4344 if (out->op == IPA_PARM_OP_REMOVE)
4346 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4348 r.op = IPA_PARM_OP_REMOVE;
4349 adjustments.quick_push (r);
4351 continue;
4353 else
4355 /* FIXME: Handling of new arguments are not implemented yet. */
4356 gcc_assert (out->op != IPA_PARM_OP_NEW);
4359 r.base_index = in->base_index;
4360 r.type = out->type;
4362 /* FIXME: Create nonlocal value too. */
4364 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4365 r.op = IPA_PARM_OP_COPY;
4366 else if (in->op == IPA_PARM_OP_COPY)
4367 r.offset = out->offset;
4368 else if (out->op == IPA_PARM_OP_COPY)
4369 r.offset = in->offset;
4370 else
4371 r.offset = in->offset + out->offset;
4372 adjustments.quick_push (r);
4375 for (i = 0; i < inlen; i++)
4377 struct ipa_parm_adjustment *n = &inner[i];
4379 if (n->op == IPA_PARM_OP_REMOVE)
4380 adjustments.quick_push (*n);
4383 tmp.release ();
4384 return adjustments;
4387 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4388 friendly way, assuming they are meant to be applied to FNDECL. */
4390 void
4391 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4392 tree fndecl)
4394 int i, len = adjustments.length ();
4395 bool first = true;
4396 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4398 fprintf (file, "IPA param adjustments: ");
4399 for (i = 0; i < len; i++)
4401 struct ipa_parm_adjustment *adj;
4402 adj = &adjustments[i];
4404 if (!first)
4405 fprintf (file, " ");
4406 else
4407 first = false;
4409 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4410 print_generic_expr (file, parms[adj->base_index], 0);
4411 if (adj->base)
4413 fprintf (file, ", base: ");
4414 print_generic_expr (file, adj->base, 0);
4416 if (adj->new_decl)
4418 fprintf (file, ", new_decl: ");
4419 print_generic_expr (file, adj->new_decl, 0);
4421 if (adj->new_ssa_base)
4423 fprintf (file, ", new_ssa_base: ");
4424 print_generic_expr (file, adj->new_ssa_base, 0);
4427 if (adj->op == IPA_PARM_OP_COPY)
4428 fprintf (file, ", copy_param");
4429 else if (adj->op == IPA_PARM_OP_REMOVE)
4430 fprintf (file, ", remove_param");
4431 else
4432 fprintf (file, ", offset %li", (long) adj->offset);
4433 if (adj->by_ref)
4434 fprintf (file, ", by_ref");
4435 print_node_brief (file, ", type: ", adj->type, 0);
4436 fprintf (file, "\n");
4438 parms.release ();
4441 /* Dump the AV linked list. */
4443 void
4444 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4446 bool comma = false;
4447 fprintf (f, " Aggregate replacements:");
4448 for (; av; av = av->next)
4450 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4451 av->index, av->offset);
4452 print_generic_expr (f, av->value, 0);
4453 comma = true;
4455 fprintf (f, "\n");
4458 /* Stream out jump function JUMP_FUNC to OB. */
4460 static void
4461 ipa_write_jump_function (struct output_block *ob,
4462 struct ipa_jump_func *jump_func)
4464 struct ipa_agg_jf_item *item;
4465 struct bitpack_d bp;
4466 int i, count;
4468 streamer_write_uhwi (ob, jump_func->type);
4469 switch (jump_func->type)
4471 case IPA_JF_UNKNOWN:
4472 break;
4473 case IPA_JF_CONST:
4474 gcc_assert (
4475 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4476 stream_write_tree (ob, jump_func->value.constant.value, true);
4477 break;
4478 case IPA_JF_PASS_THROUGH:
4479 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4480 if (jump_func->value.pass_through.operation == NOP_EXPR)
4482 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4483 bp = bitpack_create (ob->main_stream);
4484 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4485 streamer_write_bitpack (&bp);
4487 else
4489 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4490 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4492 break;
4493 case IPA_JF_ANCESTOR:
4494 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4495 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4496 bp = bitpack_create (ob->main_stream);
4497 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4498 streamer_write_bitpack (&bp);
4499 break;
4502 count = vec_safe_length (jump_func->agg.items);
4503 streamer_write_uhwi (ob, count);
4504 if (count)
4506 bp = bitpack_create (ob->main_stream);
4507 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4508 streamer_write_bitpack (&bp);
4511 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4513 streamer_write_uhwi (ob, item->offset);
4514 stream_write_tree (ob, item->value, true);
4517 bp = bitpack_create (ob->main_stream);
4518 bp_pack_value (&bp, jump_func->alignment.known, 1);
4519 streamer_write_bitpack (&bp);
4520 if (jump_func->alignment.known)
4522 streamer_write_uhwi (ob, jump_func->alignment.align);
4523 streamer_write_uhwi (ob, jump_func->alignment.misalign);
4527 /* Read in jump function JUMP_FUNC from IB. */
4529 static void
4530 ipa_read_jump_function (struct lto_input_block *ib,
4531 struct ipa_jump_func *jump_func,
4532 struct cgraph_edge *cs,
4533 struct data_in *data_in)
4535 enum jump_func_type jftype;
4536 enum tree_code operation;
4537 int i, count;
4539 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4540 switch (jftype)
4542 case IPA_JF_UNKNOWN:
4543 ipa_set_jf_unknown (jump_func);
4544 break;
4545 case IPA_JF_CONST:
4546 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4547 break;
4548 case IPA_JF_PASS_THROUGH:
4549 operation = (enum tree_code) streamer_read_uhwi (ib);
4550 if (operation == NOP_EXPR)
4552 int formal_id = streamer_read_uhwi (ib);
4553 struct bitpack_d bp = streamer_read_bitpack (ib);
4554 bool agg_preserved = bp_unpack_value (&bp, 1);
4555 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4557 else
4559 tree operand = stream_read_tree (ib, data_in);
4560 int formal_id = streamer_read_uhwi (ib);
4561 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4562 operation);
4564 break;
4565 case IPA_JF_ANCESTOR:
4567 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4568 int formal_id = streamer_read_uhwi (ib);
4569 struct bitpack_d bp = streamer_read_bitpack (ib);
4570 bool agg_preserved = bp_unpack_value (&bp, 1);
4571 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4572 break;
4576 count = streamer_read_uhwi (ib);
4577 vec_alloc (jump_func->agg.items, count);
4578 if (count)
4580 struct bitpack_d bp = streamer_read_bitpack (ib);
4581 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4583 for (i = 0; i < count; i++)
4585 struct ipa_agg_jf_item item;
4586 item.offset = streamer_read_uhwi (ib);
4587 item.value = stream_read_tree (ib, data_in);
4588 jump_func->agg.items->quick_push (item);
4591 struct bitpack_d bp = streamer_read_bitpack (ib);
4592 bool alignment_known = bp_unpack_value (&bp, 1);
4593 if (alignment_known)
4595 jump_func->alignment.known = true;
4596 jump_func->alignment.align = streamer_read_uhwi (ib);
4597 jump_func->alignment.misalign = streamer_read_uhwi (ib);
4599 else
4600 jump_func->alignment.known = false;
4603 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4604 relevant to indirect inlining to OB. */
4606 static void
4607 ipa_write_indirect_edge_info (struct output_block *ob,
4608 struct cgraph_edge *cs)
4610 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4611 struct bitpack_d bp;
4613 streamer_write_hwi (ob, ii->param_index);
4614 bp = bitpack_create (ob->main_stream);
4615 bp_pack_value (&bp, ii->polymorphic, 1);
4616 bp_pack_value (&bp, ii->agg_contents, 1);
4617 bp_pack_value (&bp, ii->member_ptr, 1);
4618 bp_pack_value (&bp, ii->by_ref, 1);
4619 bp_pack_value (&bp, ii->vptr_changed, 1);
4620 streamer_write_bitpack (&bp);
4621 if (ii->agg_contents || ii->polymorphic)
4622 streamer_write_hwi (ob, ii->offset);
4623 else
4624 gcc_assert (ii->offset == 0);
4626 if (ii->polymorphic)
4628 streamer_write_hwi (ob, ii->otr_token);
4629 stream_write_tree (ob, ii->otr_type, true);
4630 ii->context.stream_out (ob);
4634 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4635 relevant to indirect inlining from IB. */
4637 static void
4638 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4639 struct data_in *data_in,
4640 struct cgraph_edge *cs)
4642 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4643 struct bitpack_d bp;
4645 ii->param_index = (int) streamer_read_hwi (ib);
4646 bp = streamer_read_bitpack (ib);
4647 ii->polymorphic = bp_unpack_value (&bp, 1);
4648 ii->agg_contents = bp_unpack_value (&bp, 1);
4649 ii->member_ptr = bp_unpack_value (&bp, 1);
4650 ii->by_ref = bp_unpack_value (&bp, 1);
4651 ii->vptr_changed = bp_unpack_value (&bp, 1);
4652 if (ii->agg_contents || ii->polymorphic)
4653 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4654 else
4655 ii->offset = 0;
4656 if (ii->polymorphic)
4658 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4659 ii->otr_type = stream_read_tree (ib, data_in);
4660 ii->context.stream_in (ib, data_in);
4664 /* Stream out NODE info to OB. */
4666 static void
4667 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4669 int node_ref;
4670 lto_symtab_encoder_t encoder;
4671 struct ipa_node_params *info = IPA_NODE_REF (node);
4672 int j;
4673 struct cgraph_edge *e;
4674 struct bitpack_d bp;
4676 encoder = ob->decl_state->symtab_node_encoder;
4677 node_ref = lto_symtab_encoder_encode (encoder, node);
4678 streamer_write_uhwi (ob, node_ref);
4680 streamer_write_uhwi (ob, ipa_get_param_count (info));
4681 for (j = 0; j < ipa_get_param_count (info); j++)
4682 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4683 bp = bitpack_create (ob->main_stream);
4684 gcc_assert (info->analysis_done
4685 || ipa_get_param_count (info) == 0);
4686 gcc_assert (!info->node_enqueued);
4687 gcc_assert (!info->ipcp_orig_node);
4688 for (j = 0; j < ipa_get_param_count (info); j++)
4689 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4690 streamer_write_bitpack (&bp);
4691 for (j = 0; j < ipa_get_param_count (info); j++)
4692 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4693 for (e = node->callees; e; e = e->next_callee)
4695 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4697 streamer_write_uhwi (ob,
4698 ipa_get_cs_argument_count (args) * 2
4699 + (args->polymorphic_call_contexts != NULL));
4700 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4702 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4703 if (args->polymorphic_call_contexts != NULL)
4704 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4707 for (e = node->indirect_calls; e; e = e->next_callee)
4709 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4711 streamer_write_uhwi (ob,
4712 ipa_get_cs_argument_count (args) * 2
4713 + (args->polymorphic_call_contexts != NULL));
4714 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4716 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4717 if (args->polymorphic_call_contexts != NULL)
4718 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4720 ipa_write_indirect_edge_info (ob, e);
4724 /* Stream in NODE info from IB. */
4726 static void
4727 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4728 struct data_in *data_in)
4730 struct ipa_node_params *info = IPA_NODE_REF (node);
4731 int k;
4732 struct cgraph_edge *e;
4733 struct bitpack_d bp;
4735 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4737 for (k = 0; k < ipa_get_param_count (info); k++)
4738 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4740 bp = streamer_read_bitpack (ib);
4741 if (ipa_get_param_count (info) != 0)
4742 info->analysis_done = true;
4743 info->node_enqueued = false;
4744 for (k = 0; k < ipa_get_param_count (info); k++)
4745 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4746 for (k = 0; k < ipa_get_param_count (info); k++)
4747 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4748 for (e = node->callees; e; e = e->next_callee)
4750 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4751 int count = streamer_read_uhwi (ib);
4752 bool contexts_computed = count & 1;
4753 count /= 2;
4755 if (!count)
4756 continue;
4757 vec_safe_grow_cleared (args->jump_functions, count);
4758 if (contexts_computed)
4759 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4761 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4763 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4764 data_in);
4765 if (contexts_computed)
4766 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4769 for (e = node->indirect_calls; e; e = e->next_callee)
4771 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4772 int count = streamer_read_uhwi (ib);
4773 bool contexts_computed = count & 1;
4774 count /= 2;
4776 if (count)
4778 vec_safe_grow_cleared (args->jump_functions, count);
4779 if (contexts_computed)
4780 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4781 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4783 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4784 data_in);
4785 if (contexts_computed)
4786 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4789 ipa_read_indirect_edge_info (ib, data_in, e);
4793 /* Write jump functions for nodes in SET. */
4795 void
4796 ipa_prop_write_jump_functions (void)
4798 struct cgraph_node *node;
4799 struct output_block *ob;
4800 unsigned int count = 0;
4801 lto_symtab_encoder_iterator lsei;
4802 lto_symtab_encoder_t encoder;
4805 if (!ipa_node_params_vector.exists ())
4806 return;
4808 ob = create_output_block (LTO_section_jump_functions);
4809 encoder = ob->decl_state->symtab_node_encoder;
4810 ob->symbol = NULL;
4811 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4812 lsei_next_function_in_partition (&lsei))
4814 node = lsei_cgraph_node (lsei);
4815 if (node->has_gimple_body_p ()
4816 && IPA_NODE_REF (node) != NULL)
4817 count++;
4820 streamer_write_uhwi (ob, count);
4822 /* Process all of the functions. */
4823 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4824 lsei_next_function_in_partition (&lsei))
4826 node = lsei_cgraph_node (lsei);
4827 if (node->has_gimple_body_p ()
4828 && IPA_NODE_REF (node) != NULL)
4829 ipa_write_node_info (ob, node);
4831 streamer_write_char_stream (ob->main_stream, 0);
4832 produce_asm (ob, NULL);
4833 destroy_output_block (ob);
4836 /* Read section in file FILE_DATA of length LEN with data DATA. */
4838 static void
4839 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4840 size_t len)
4842 const struct lto_function_header *header =
4843 (const struct lto_function_header *) data;
4844 const int cfg_offset = sizeof (struct lto_function_header);
4845 const int main_offset = cfg_offset + header->cfg_size;
4846 const int string_offset = main_offset + header->main_size;
4847 struct data_in *data_in;
4848 unsigned int i;
4849 unsigned int count;
4851 lto_input_block ib_main ((const char *) data + main_offset,
4852 header->main_size);
4854 data_in =
4855 lto_data_in_create (file_data, (const char *) data + string_offset,
4856 header->string_size, vNULL);
4857 count = streamer_read_uhwi (&ib_main);
4859 for (i = 0; i < count; i++)
4861 unsigned int index;
4862 struct cgraph_node *node;
4863 lto_symtab_encoder_t encoder;
4865 index = streamer_read_uhwi (&ib_main);
4866 encoder = file_data->symtab_node_encoder;
4867 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4868 index));
4869 gcc_assert (node->definition);
4870 ipa_read_node_info (&ib_main, node, data_in);
4872 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4873 len);
4874 lto_data_in_delete (data_in);
4877 /* Read ipcp jump functions. */
4879 void
4880 ipa_prop_read_jump_functions (void)
4882 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4883 struct lto_file_decl_data *file_data;
4884 unsigned int j = 0;
4886 ipa_check_create_node_params ();
4887 ipa_check_create_edge_args ();
4888 ipa_register_cgraph_hooks ();
4890 while ((file_data = file_data_vec[j++]))
4892 size_t len;
4893 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4895 if (data)
4896 ipa_prop_read_section (file_data, data, len);
4900 /* After merging units, we can get mismatch in argument counts.
4901 Also decl merging might've rendered parameter lists obsolete.
4902 Also compute called_with_variable_arg info. */
4904 void
4905 ipa_update_after_lto_read (void)
4907 ipa_check_create_node_params ();
4908 ipa_check_create_edge_args ();
4911 void
4912 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
4914 int node_ref;
4915 unsigned int count = 0;
4916 lto_symtab_encoder_t encoder;
4917 struct ipa_agg_replacement_value *aggvals, *av;
4919 aggvals = ipa_get_agg_replacements_for_node (node);
4920 encoder = ob->decl_state->symtab_node_encoder;
4921 node_ref = lto_symtab_encoder_encode (encoder, node);
4922 streamer_write_uhwi (ob, node_ref);
4924 for (av = aggvals; av; av = av->next)
4925 count++;
4926 streamer_write_uhwi (ob, count);
4928 for (av = aggvals; av; av = av->next)
4930 struct bitpack_d bp;
4932 streamer_write_uhwi (ob, av->offset);
4933 streamer_write_uhwi (ob, av->index);
4934 stream_write_tree (ob, av->value, true);
4936 bp = bitpack_create (ob->main_stream);
4937 bp_pack_value (&bp, av->by_ref, 1);
4938 streamer_write_bitpack (&bp);
4941 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4942 if (ts && vec_safe_length (ts->alignments) > 0)
4944 count = ts->alignments->length ();
4946 streamer_write_uhwi (ob, count);
4947 for (unsigned i = 0; i < count; ++i)
4949 ipa_alignment *parm_al = &(*ts->alignments)[i];
4951 struct bitpack_d bp;
4952 bp = bitpack_create (ob->main_stream);
4953 bp_pack_value (&bp, parm_al->known, 1);
4954 streamer_write_bitpack (&bp);
4955 if (parm_al->known)
4957 streamer_write_uhwi (ob, parm_al->align);
4958 streamer_write_hwi_in_range (ob->main_stream, 0, parm_al->align,
4959 parm_al->misalign);
4963 else
4964 streamer_write_uhwi (ob, 0);
4967 /* Stream in the aggregate value replacement chain for NODE from IB. */
4969 static void
4970 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
4971 data_in *data_in)
4973 struct ipa_agg_replacement_value *aggvals = NULL;
4974 unsigned int count, i;
4976 count = streamer_read_uhwi (ib);
4977 for (i = 0; i <count; i++)
4979 struct ipa_agg_replacement_value *av;
4980 struct bitpack_d bp;
4982 av = ggc_alloc<ipa_agg_replacement_value> ();
4983 av->offset = streamer_read_uhwi (ib);
4984 av->index = streamer_read_uhwi (ib);
4985 av->value = stream_read_tree (ib, data_in);
4986 bp = streamer_read_bitpack (ib);
4987 av->by_ref = bp_unpack_value (&bp, 1);
4988 av->next = aggvals;
4989 aggvals = av;
4991 ipa_set_node_agg_value_chain (node, aggvals);
4993 count = streamer_read_uhwi (ib);
4994 if (count > 0)
4996 ipcp_grow_transformations_if_necessary ();
4998 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4999 vec_safe_grow_cleared (ts->alignments, count);
5001 for (i = 0; i < count; i++)
5003 ipa_alignment *parm_al;
5004 parm_al = &(*ts->alignments)[i];
5005 struct bitpack_d bp;
5006 bp = streamer_read_bitpack (ib);
5007 parm_al->known = bp_unpack_value (&bp, 1);
5008 if (parm_al->known)
5010 parm_al->align = streamer_read_uhwi (ib);
5011 parm_al->misalign
5012 = streamer_read_hwi_in_range (ib, "ipa-prop misalign",
5013 0, parm_al->align);
5019 /* Write all aggregate replacement for nodes in set. */
5021 void
5022 ipcp_write_transformation_summaries (void)
5024 struct cgraph_node *node;
5025 struct output_block *ob;
5026 unsigned int count = 0;
5027 lto_symtab_encoder_iterator lsei;
5028 lto_symtab_encoder_t encoder;
5030 ob = create_output_block (LTO_section_ipcp_transform);
5031 encoder = ob->decl_state->symtab_node_encoder;
5032 ob->symbol = NULL;
5033 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5034 lsei_next_function_in_partition (&lsei))
5036 node = lsei_cgraph_node (lsei);
5037 if (node->has_gimple_body_p ())
5038 count++;
5041 streamer_write_uhwi (ob, count);
5043 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5044 lsei_next_function_in_partition (&lsei))
5046 node = lsei_cgraph_node (lsei);
5047 if (node->has_gimple_body_p ())
5048 write_ipcp_transformation_info (ob, node);
5050 streamer_write_char_stream (ob->main_stream, 0);
5051 produce_asm (ob, NULL);
5052 destroy_output_block (ob);
5055 /* Read replacements section in file FILE_DATA of length LEN with data
5056 DATA. */
5058 static void
5059 read_replacements_section (struct lto_file_decl_data *file_data,
5060 const char *data,
5061 size_t len)
5063 const struct lto_function_header *header =
5064 (const struct lto_function_header *) data;
5065 const int cfg_offset = sizeof (struct lto_function_header);
5066 const int main_offset = cfg_offset + header->cfg_size;
5067 const int string_offset = main_offset + header->main_size;
5068 struct data_in *data_in;
5069 unsigned int i;
5070 unsigned int count;
5072 lto_input_block ib_main ((const char *) data + main_offset,
5073 header->main_size);
5075 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5076 header->string_size, vNULL);
5077 count = streamer_read_uhwi (&ib_main);
5079 for (i = 0; i < count; i++)
5081 unsigned int index;
5082 struct cgraph_node *node;
5083 lto_symtab_encoder_t encoder;
5085 index = streamer_read_uhwi (&ib_main);
5086 encoder = file_data->symtab_node_encoder;
5087 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5088 index));
5089 gcc_assert (node->definition);
5090 read_ipcp_transformation_info (&ib_main, node, data_in);
5092 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5093 len);
5094 lto_data_in_delete (data_in);
5097 /* Read IPA-CP aggregate replacements. */
5099 void
5100 ipcp_read_transformation_summaries (void)
5102 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5103 struct lto_file_decl_data *file_data;
5104 unsigned int j = 0;
5106 while ((file_data = file_data_vec[j++]))
5108 size_t len;
5109 const char *data = lto_get_section_data (file_data,
5110 LTO_section_ipcp_transform,
5111 NULL, &len);
5112 if (data)
5113 read_replacements_section (file_data, data, len);
5117 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5118 NODE. */
5120 static void
5121 adjust_agg_replacement_values (struct cgraph_node *node,
5122 struct ipa_agg_replacement_value *aggval)
5124 struct ipa_agg_replacement_value *v;
5125 int i, c = 0, d = 0, *adj;
5127 if (!node->clone.combined_args_to_skip)
5128 return;
5130 for (v = aggval; v; v = v->next)
5132 gcc_assert (v->index >= 0);
5133 if (c < v->index)
5134 c = v->index;
5136 c++;
5138 adj = XALLOCAVEC (int, c);
5139 for (i = 0; i < c; i++)
5140 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5142 adj[i] = -1;
5143 d++;
5145 else
5146 adj[i] = i - d;
5148 for (v = aggval; v; v = v->next)
5149 v->index = adj[v->index];
5152 /* Dominator walker driving the ipcp modification phase. */
5154 class ipcp_modif_dom_walker : public dom_walker
5156 public:
5157 ipcp_modif_dom_walker (struct func_body_info *fbi,
5158 vec<ipa_param_descriptor> descs,
5159 struct ipa_agg_replacement_value *av,
5160 bool *sc, bool *cc)
5161 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5162 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5164 virtual void before_dom_children (basic_block);
5166 private:
5167 struct func_body_info *m_fbi;
5168 vec<ipa_param_descriptor> m_descriptors;
5169 struct ipa_agg_replacement_value *m_aggval;
5170 bool *m_something_changed, *m_cfg_changed;
5173 void
5174 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5176 gimple_stmt_iterator gsi;
5177 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5179 struct ipa_agg_replacement_value *v;
5180 gimple stmt = gsi_stmt (gsi);
5181 tree rhs, val, t;
5182 HOST_WIDE_INT offset, size;
5183 int index;
5184 bool by_ref, vce;
5186 if (!gimple_assign_load_p (stmt))
5187 continue;
5188 rhs = gimple_assign_rhs1 (stmt);
5189 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5190 continue;
5192 vce = false;
5193 t = rhs;
5194 while (handled_component_p (t))
5196 /* V_C_E can do things like convert an array of integers to one
5197 bigger integer and similar things we do not handle below. */
5198 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5200 vce = true;
5201 break;
5203 t = TREE_OPERAND (t, 0);
5205 if (vce)
5206 continue;
5208 if (!ipa_load_from_parm_agg_1 (m_fbi, m_descriptors, stmt, rhs, &index,
5209 &offset, &size, &by_ref))
5210 continue;
5211 for (v = m_aggval; v; v = v->next)
5212 if (v->index == index
5213 && v->offset == offset)
5214 break;
5215 if (!v
5216 || v->by_ref != by_ref
5217 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5218 continue;
5220 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5221 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5223 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5224 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5225 else if (TYPE_SIZE (TREE_TYPE (rhs))
5226 == TYPE_SIZE (TREE_TYPE (v->value)))
5227 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5228 else
5230 if (dump_file)
5232 fprintf (dump_file, " const ");
5233 print_generic_expr (dump_file, v->value, 0);
5234 fprintf (dump_file, " can't be converted to type of ");
5235 print_generic_expr (dump_file, rhs, 0);
5236 fprintf (dump_file, "\n");
5238 continue;
5241 else
5242 val = v->value;
5244 if (dump_file && (dump_flags & TDF_DETAILS))
5246 fprintf (dump_file, "Modifying stmt:\n ");
5247 print_gimple_stmt (dump_file, stmt, 0, 0);
5249 gimple_assign_set_rhs_from_tree (&gsi, val);
5250 update_stmt (stmt);
5252 if (dump_file && (dump_flags & TDF_DETAILS))
5254 fprintf (dump_file, "into:\n ");
5255 print_gimple_stmt (dump_file, stmt, 0, 0);
5256 fprintf (dump_file, "\n");
5259 *m_something_changed = true;
5260 if (maybe_clean_eh_stmt (stmt)
5261 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5262 *m_cfg_changed = true;
5267 /* Update alignment of formal parameters as described in
5268 ipcp_transformation_summary. */
5270 static void
5271 ipcp_update_alignments (struct cgraph_node *node)
5273 tree fndecl = node->decl;
5274 tree parm = DECL_ARGUMENTS (fndecl);
5275 tree next_parm = parm;
5276 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5277 if (!ts || vec_safe_length (ts->alignments) == 0)
5278 return;
5279 const vec<ipa_alignment, va_gc> &alignments = *ts->alignments;
5280 unsigned count = alignments.length ();
5282 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5284 if (node->clone.combined_args_to_skip
5285 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5286 continue;
5287 gcc_checking_assert (parm);
5288 next_parm = DECL_CHAIN (parm);
5290 if (!alignments[i].known || !is_gimple_reg (parm))
5291 continue;
5292 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5293 if (!ddef)
5294 continue;
5296 if (dump_file)
5297 fprintf (dump_file, " Adjusting alignment of param %u to %u, "
5298 "misalignment to %u\n", i, alignments[i].align,
5299 alignments[i].misalign);
5301 struct ptr_info_def *pi = get_ptr_info (ddef);
5302 gcc_checking_assert (pi);
5303 unsigned old_align;
5304 unsigned old_misalign;
5305 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5307 if (old_known
5308 && old_align >= alignments[i].align)
5310 if (dump_file)
5311 fprintf (dump_file, " But the alignment was already %u.\n",
5312 old_align);
5313 continue;
5315 set_ptr_info_alignment (pi, alignments[i].align, alignments[i].misalign);
5319 /* IPCP transformation phase doing propagation of aggregate values. */
5321 unsigned int
5322 ipcp_transform_function (struct cgraph_node *node)
5324 vec<ipa_param_descriptor> descriptors = vNULL;
5325 struct func_body_info fbi;
5326 struct ipa_agg_replacement_value *aggval;
5327 int param_count;
5328 bool cfg_changed = false, something_changed = false;
5330 gcc_checking_assert (cfun);
5331 gcc_checking_assert (current_function_decl);
5333 if (dump_file)
5334 fprintf (dump_file, "Modification phase of node %s/%i\n",
5335 node->name (), node->order);
5337 ipcp_update_alignments (node);
5338 aggval = ipa_get_agg_replacements_for_node (node);
5339 if (!aggval)
5340 return 0;
5341 param_count = count_formal_params (node->decl);
5342 if (param_count == 0)
5343 return 0;
5344 adjust_agg_replacement_values (node, aggval);
5345 if (dump_file)
5346 ipa_dump_agg_replacement_values (dump_file, aggval);
5348 fbi.node = node;
5349 fbi.info = NULL;
5350 fbi.bb_infos = vNULL;
5351 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5352 fbi.param_count = param_count;
5353 fbi.aa_walked = 0;
5355 descriptors.safe_grow_cleared (param_count);
5356 ipa_populate_param_decls (node, descriptors);
5357 calculate_dominance_info (CDI_DOMINATORS);
5358 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5359 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5361 int i;
5362 struct ipa_bb_info *bi;
5363 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5364 free_ipa_bb_info (bi);
5365 fbi.bb_infos.release ();
5366 free_dominance_info (CDI_DOMINATORS);
5367 (*ipcp_transformations)[node->uid].agg_values = NULL;
5368 (*ipcp_transformations)[node->uid].alignments = NULL;
5369 descriptors.release ();
5371 if (!something_changed)
5372 return 0;
5373 else if (cfg_changed)
5374 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5375 else
5376 return TODO_update_ssa_only_virtuals;