* gcc.dg/store-motion-fgcse-sm.c (dg-final): Cleanup
[official-gcc.git] / gcc / ipa-prop.c
blob2e0016bfbe666544974ab7095e96ac75f8574982
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "predict.h"
25 #include "vec.h"
26 #include "hashtab.h"
27 #include "hash-set.h"
28 #include "machmode.h"
29 #include "tm.h"
30 #include "hard-reg-set.h"
31 #include "input.h"
32 #include "function.h"
33 #include "dominance.h"
34 #include "cfg.h"
35 #include "basic-block.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-fold.h"
39 #include "tree-eh.h"
40 #include "gimple-expr.h"
41 #include "is-a.h"
42 #include "gimple.h"
43 #include "expr.h"
44 #include "stor-layout.h"
45 #include "print-tree.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "langhooks.h"
51 #include "target.h"
52 #include "hash-map.h"
53 #include "plugin-api.h"
54 #include "ipa-ref.h"
55 #include "cgraph.h"
56 #include "alloc-pool.h"
57 #include "ipa-prop.h"
58 #include "bitmap.h"
59 #include "gimple-ssa.h"
60 #include "tree-cfg.h"
61 #include "tree-phinodes.h"
62 #include "ssa-iterators.h"
63 #include "tree-into-ssa.h"
64 #include "tree-dfa.h"
65 #include "tree-pass.h"
66 #include "tree-inline.h"
67 #include "ipa-inline.h"
68 #include "flags.h"
69 #include "diagnostic.h"
70 #include "gimple-pretty-print.h"
71 #include "lto-streamer.h"
72 #include "data-streamer.h"
73 #include "tree-streamer.h"
74 #include "params.h"
75 #include "ipa-utils.h"
76 #include "stringpool.h"
77 #include "tree-ssanames.h"
78 #include "dbgcnt.h"
79 #include "domwalk.h"
80 #include "builtins.h"
81 #include "calls.h"
83 /* Intermediate information that we get from alias analysis about a particular
84 parameter in a particular basic_block. When a parameter or the memory it
85 references is marked modified, we use that information in all dominatd
86 blocks without cosulting alias analysis oracle. */
88 struct param_aa_status
90 /* Set when this structure contains meaningful information. If not, the
91 structure describing a dominating BB should be used instead. */
92 bool valid;
94 /* Whether we have seen something which might have modified the data in
95 question. PARM is for the parameter itself, REF is for data it points to
96 but using the alias type of individual accesses and PT is the same thing
97 but for computing aggregate pass-through functions using a very inclusive
98 ao_ref. */
99 bool parm_modified, ref_modified, pt_modified;
102 /* Information related to a given BB that used only when looking at function
103 body. */
105 struct ipa_bb_info
107 /* Call graph edges going out of this BB. */
108 vec<cgraph_edge *> cg_edges;
109 /* Alias analysis statuses of each formal parameter at this bb. */
110 vec<param_aa_status> param_aa_statuses;
113 /* Structure with global information that is only used when looking at function
114 body. */
116 struct func_body_info
118 /* The node that is being analyzed. */
119 cgraph_node *node;
121 /* Its info. */
122 struct ipa_node_params *info;
124 /* Information about individual BBs. */
125 vec<ipa_bb_info> bb_infos;
127 /* Number of parameters. */
128 int param_count;
130 /* Number of statements already walked by when analyzing this function. */
131 unsigned int aa_walked;
134 /* Vector where the parameter infos are actually stored. */
135 vec<ipa_node_params> ipa_node_params_vector;
136 /* Vector of known aggregate values in cloned nodes. */
137 vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
138 /* Vector where the parameter infos are actually stored. */
139 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
141 /* Holders of ipa cgraph hooks: */
142 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
143 static struct cgraph_node_hook_list *node_removal_hook_holder;
144 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
145 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
146 static struct cgraph_node_hook_list *function_insertion_hook_holder;
148 /* Description of a reference to an IPA constant. */
149 struct ipa_cst_ref_desc
151 /* Edge that corresponds to the statement which took the reference. */
152 struct cgraph_edge *cs;
153 /* Linked list of duplicates created when call graph edges are cloned. */
154 struct ipa_cst_ref_desc *next_duplicate;
155 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
156 if out of control. */
157 int refcount;
160 /* Allocation pool for reference descriptions. */
162 static alloc_pool ipa_refdesc_pool;
164 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
165 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
167 static bool
168 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
170 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
172 if (!fs_opts)
173 return false;
174 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
177 /* Return index of the formal whose tree is PTREE in function which corresponds
178 to INFO. */
180 static int
181 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
183 int i, count;
185 count = descriptors.length ();
186 for (i = 0; i < count; i++)
187 if (descriptors[i].decl == ptree)
188 return i;
190 return -1;
193 /* Return index of the formal whose tree is PTREE in function which corresponds
194 to INFO. */
197 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
199 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
202 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
203 NODE. */
205 static void
206 ipa_populate_param_decls (struct cgraph_node *node,
207 vec<ipa_param_descriptor> &descriptors)
209 tree fndecl;
210 tree fnargs;
211 tree parm;
212 int param_num;
214 fndecl = node->decl;
215 gcc_assert (gimple_has_body_p (fndecl));
216 fnargs = DECL_ARGUMENTS (fndecl);
217 param_num = 0;
218 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
220 descriptors[param_num].decl = parm;
221 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
222 true);
223 param_num++;
227 /* Return how many formal parameters FNDECL has. */
230 count_formal_params (tree fndecl)
232 tree parm;
233 int count = 0;
234 gcc_assert (gimple_has_body_p (fndecl));
236 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
237 count++;
239 return count;
242 /* Return the declaration of Ith formal parameter of the function corresponding
243 to INFO. Note there is no setter function as this array is built just once
244 using ipa_initialize_node_params. */
246 void
247 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
249 fprintf (file, "param #%i", i);
250 if (info->descriptors[i].decl)
252 fprintf (file, " ");
253 print_generic_expr (file, info->descriptors[i].decl, 0);
257 /* Initialize the ipa_node_params structure associated with NODE
258 to hold PARAM_COUNT parameters. */
260 void
261 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
263 struct ipa_node_params *info = IPA_NODE_REF (node);
265 if (!info->descriptors.exists () && param_count)
266 info->descriptors.safe_grow_cleared (param_count);
269 /* Initialize the ipa_node_params structure associated with NODE by counting
270 the function parameters, creating the descriptors and populating their
271 param_decls. */
273 void
274 ipa_initialize_node_params (struct cgraph_node *node)
276 struct ipa_node_params *info = IPA_NODE_REF (node);
278 if (!info->descriptors.exists ())
280 ipa_alloc_node_params (node, count_formal_params (node->decl));
281 ipa_populate_param_decls (node, info->descriptors);
285 /* Print the jump functions associated with call graph edge CS to file F. */
287 static void
288 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
290 int i, count;
292 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
293 for (i = 0; i < count; i++)
295 struct ipa_jump_func *jump_func;
296 enum jump_func_type type;
298 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
299 type = jump_func->type;
301 fprintf (f, " param %d: ", i);
302 if (type == IPA_JF_UNKNOWN)
303 fprintf (f, "UNKNOWN\n");
304 else if (type == IPA_JF_CONST)
306 tree val = jump_func->value.constant.value;
307 fprintf (f, "CONST: ");
308 print_generic_expr (f, val, 0);
309 if (TREE_CODE (val) == ADDR_EXPR
310 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
312 fprintf (f, " -> ");
313 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
316 fprintf (f, "\n");
318 else if (type == IPA_JF_PASS_THROUGH)
320 fprintf (f, "PASS THROUGH: ");
321 fprintf (f, "%d, op %s",
322 jump_func->value.pass_through.formal_id,
323 get_tree_code_name(jump_func->value.pass_through.operation));
324 if (jump_func->value.pass_through.operation != NOP_EXPR)
326 fprintf (f, " ");
327 print_generic_expr (f,
328 jump_func->value.pass_through.operand, 0);
330 if (jump_func->value.pass_through.agg_preserved)
331 fprintf (f, ", agg_preserved");
332 fprintf (f, "\n");
334 else if (type == IPA_JF_ANCESTOR)
336 fprintf (f, "ANCESTOR: ");
337 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC,
338 jump_func->value.ancestor.formal_id,
339 jump_func->value.ancestor.offset);
340 if (jump_func->value.ancestor.agg_preserved)
341 fprintf (f, ", agg_preserved");
342 fprintf (f, "\n");
345 if (jump_func->agg.items)
347 struct ipa_agg_jf_item *item;
348 int j;
350 fprintf (f, " Aggregate passed by %s:\n",
351 jump_func->agg.by_ref ? "reference" : "value");
352 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
354 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
355 item->offset);
356 if (TYPE_P (item->value))
357 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
358 tree_to_uhwi (TYPE_SIZE (item->value)));
359 else
361 fprintf (f, "cst: ");
362 print_generic_expr (f, item->value, 0);
364 fprintf (f, "\n");
368 struct ipa_polymorphic_call_context *ctx
369 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
370 if (ctx && !ctx->useless_p ())
372 fprintf (f, " Context: ");
373 ctx->dump (dump_file);
379 /* Print the jump functions of all arguments on all call graph edges going from
380 NODE to file F. */
382 void
383 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
385 struct cgraph_edge *cs;
387 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
388 node->order);
389 for (cs = node->callees; cs; cs = cs->next_callee)
391 if (!ipa_edge_args_info_available_for_edge_p (cs))
392 continue;
394 fprintf (f, " callsite %s/%i -> %s/%i : \n",
395 xstrdup (node->name ()), node->order,
396 xstrdup (cs->callee->name ()),
397 cs->callee->order);
398 ipa_print_node_jump_functions_for_edge (f, cs);
401 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
403 struct cgraph_indirect_call_info *ii;
404 if (!ipa_edge_args_info_available_for_edge_p (cs))
405 continue;
407 ii = cs->indirect_info;
408 if (ii->agg_contents)
409 fprintf (f, " indirect %s callsite, calling param %i, "
410 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
411 ii->member_ptr ? "member ptr" : "aggregate",
412 ii->param_index, ii->offset,
413 ii->by_ref ? "by reference" : "by_value");
414 else
415 fprintf (f, " indirect %s callsite, calling param %i, "
416 "offset " HOST_WIDE_INT_PRINT_DEC,
417 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
418 ii->offset);
420 if (cs->call_stmt)
422 fprintf (f, ", for stmt ");
423 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
425 else
426 fprintf (f, "\n");
427 if (ii->polymorphic)
428 ii->context.dump (f);
429 ipa_print_node_jump_functions_for_edge (f, cs);
433 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
435 void
436 ipa_print_all_jump_functions (FILE *f)
438 struct cgraph_node *node;
440 fprintf (f, "\nJump functions:\n");
441 FOR_EACH_FUNCTION (node)
443 ipa_print_node_jump_functions (f, node);
447 /* Set JFUNC to be a copy of another jmp (to be used by jump function
448 combination code). The two functions will share their rdesc. */
450 static void
451 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
452 struct ipa_jump_func *src)
455 gcc_checking_assert (src->type == IPA_JF_CONST);
456 dst->type = IPA_JF_CONST;
457 dst->value.constant = src->value.constant;
460 /* Set JFUNC to be a constant jmp function. */
462 static void
463 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
464 struct cgraph_edge *cs)
466 constant = unshare_expr (constant);
467 if (constant && EXPR_P (constant))
468 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
469 jfunc->type = IPA_JF_CONST;
470 jfunc->value.constant.value = unshare_expr_without_location (constant);
472 if (TREE_CODE (constant) == ADDR_EXPR
473 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
475 struct ipa_cst_ref_desc *rdesc;
476 if (!ipa_refdesc_pool)
477 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
478 sizeof (struct ipa_cst_ref_desc), 32);
480 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
481 rdesc->cs = cs;
482 rdesc->next_duplicate = NULL;
483 rdesc->refcount = 1;
484 jfunc->value.constant.rdesc = rdesc;
486 else
487 jfunc->value.constant.rdesc = NULL;
490 /* Set JFUNC to be a simple pass-through jump function. */
491 static void
492 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
493 bool agg_preserved)
495 jfunc->type = IPA_JF_PASS_THROUGH;
496 jfunc->value.pass_through.operand = NULL_TREE;
497 jfunc->value.pass_through.formal_id = formal_id;
498 jfunc->value.pass_through.operation = NOP_EXPR;
499 jfunc->value.pass_through.agg_preserved = agg_preserved;
502 /* Set JFUNC to be an arithmetic pass through jump function. */
504 static void
505 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
506 tree operand, enum tree_code operation)
508 jfunc->type = IPA_JF_PASS_THROUGH;
509 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
510 jfunc->value.pass_through.formal_id = formal_id;
511 jfunc->value.pass_through.operation = operation;
512 jfunc->value.pass_through.agg_preserved = false;
515 /* Set JFUNC to be an ancestor jump function. */
517 static void
518 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
519 int formal_id, bool agg_preserved)
521 jfunc->type = IPA_JF_ANCESTOR;
522 jfunc->value.ancestor.formal_id = formal_id;
523 jfunc->value.ancestor.offset = offset;
524 jfunc->value.ancestor.agg_preserved = agg_preserved;
527 /* Get IPA BB information about the given BB. FBI is the context of analyzis
528 of this function body. */
530 static struct ipa_bb_info *
531 ipa_get_bb_info (struct func_body_info *fbi, basic_block bb)
533 gcc_checking_assert (fbi);
534 return &fbi->bb_infos[bb->index];
537 /* Structure to be passed in between detect_type_change and
538 check_stmt_for_type_change. */
540 struct prop_type_change_info
542 /* Offset into the object where there is the virtual method pointer we are
543 looking for. */
544 HOST_WIDE_INT offset;
545 /* The declaration or SSA_NAME pointer of the base that we are checking for
546 type change. */
547 tree object;
548 /* Set to true if dynamic type change has been detected. */
549 bool type_maybe_changed;
552 /* Return true if STMT can modify a virtual method table pointer.
554 This function makes special assumptions about both constructors and
555 destructors which are all the functions that are allowed to alter the VMT
556 pointers. It assumes that destructors begin with assignment into all VMT
557 pointers and that constructors essentially look in the following way:
559 1) The very first thing they do is that they call constructors of ancestor
560 sub-objects that have them.
562 2) Then VMT pointers of this and all its ancestors is set to new values
563 corresponding to the type corresponding to the constructor.
565 3) Only afterwards, other stuff such as constructor of member sub-objects
566 and the code written by the user is run. Only this may include calling
567 virtual functions, directly or indirectly.
569 There is no way to call a constructor of an ancestor sub-object in any
570 other way.
572 This means that we do not have to care whether constructors get the correct
573 type information because they will always change it (in fact, if we define
574 the type to be given by the VMT pointer, it is undefined).
576 The most important fact to derive from the above is that if, for some
577 statement in the section 3, we try to detect whether the dynamic type has
578 changed, we can safely ignore all calls as we examine the function body
579 backwards until we reach statements in section 2 because these calls cannot
580 be ancestor constructors or destructors (if the input is not bogus) and so
581 do not change the dynamic type (this holds true only for automatically
582 allocated objects but at the moment we devirtualize only these). We then
583 must detect that statements in section 2 change the dynamic type and can try
584 to derive the new type. That is enough and we can stop, we will never see
585 the calls into constructors of sub-objects in this code. Therefore we can
586 safely ignore all call statements that we traverse.
589 static bool
590 stmt_may_be_vtbl_ptr_store (gimple stmt)
592 if (is_gimple_call (stmt))
593 return false;
594 if (gimple_clobber_p (stmt))
595 return false;
596 else if (is_gimple_assign (stmt))
598 tree lhs = gimple_assign_lhs (stmt);
600 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
602 if (flag_strict_aliasing
603 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
604 return false;
606 if (TREE_CODE (lhs) == COMPONENT_REF
607 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
608 return false;
609 /* In the future we might want to use get_base_ref_and_offset to find
610 if there is a field corresponding to the offset and if so, proceed
611 almost like if it was a component ref. */
614 return true;
617 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
618 to check whether a particular statement may modify the virtual table
619 pointerIt stores its result into DATA, which points to a
620 prop_type_change_info structure. */
622 static bool
623 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
625 gimple stmt = SSA_NAME_DEF_STMT (vdef);
626 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
628 if (stmt_may_be_vtbl_ptr_store (stmt))
630 tci->type_maybe_changed = true;
631 return true;
633 else
634 return false;
637 /* See if ARG is PARAM_DECl describing instance passed by pointer
638 or reference in FUNCTION. Return false if the dynamic type may change
639 in between beggining of the function until CALL is invoked.
641 Generally functions are not allowed to change type of such instances,
642 but they call destructors. We assume that methods can not destroy the THIS
643 pointer. Also as a special cases, constructor and destructors may change
644 type of the THIS pointer. */
646 static bool
647 param_type_may_change_p (tree function, tree arg, gimple call)
649 /* Pure functions can not do any changes on the dynamic type;
650 that require writting to memory. */
651 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
652 return false;
653 /* We need to check if we are within inlined consturctor
654 or destructor (ideally we would have way to check that the
655 inline cdtor is actually working on ARG, but we don't have
656 easy tie on this, so punt on all non-pure cdtors.
657 We may also record the types of cdtors and once we know type
658 of the instance match them.
660 Also code unification optimizations may merge calls from
661 different blocks making return values unreliable. So
662 do nothing during late optimization. */
663 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
664 return true;
665 if (TREE_CODE (arg) == SSA_NAME
666 && SSA_NAME_IS_DEFAULT_DEF (arg)
667 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
669 /* Normal (non-THIS) argument. */
670 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
671 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
672 /* THIS pointer of an method - here we we want to watch constructors
673 and destructors as those definitely may change the dynamic
674 type. */
675 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
676 && !DECL_CXX_CONSTRUCTOR_P (function)
677 && !DECL_CXX_DESTRUCTOR_P (function)
678 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
680 /* Walk the inline stack and watch out for ctors/dtors. */
681 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
682 block = BLOCK_SUPERCONTEXT (block))
683 if (BLOCK_ABSTRACT_ORIGIN (block)
684 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
686 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
688 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
689 continue;
690 if (TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
691 && (DECL_CXX_CONSTRUCTOR_P (fn)
692 || DECL_CXX_DESTRUCTOR_P (fn)))
693 return true;
695 return false;
698 return true;
701 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
702 callsite CALL) by looking for assignments to its virtual table pointer. If
703 it is, return true and fill in the jump function JFUNC with relevant type
704 information or set it to unknown. ARG is the object itself (not a pointer
705 to it, unless dereferenced). BASE is the base of the memory access as
706 returned by get_ref_base_and_extent, as is the offset.
708 This is helper function for detect_type_change and detect_type_change_ssa
709 that does the heavy work which is usually unnecesary. */
711 static bool
712 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
713 gcall *call, struct ipa_jump_func *jfunc,
714 HOST_WIDE_INT offset)
716 struct prop_type_change_info tci;
717 ao_ref ao;
718 bool entry_reached = false;
720 gcc_checking_assert (DECL_P (arg)
721 || TREE_CODE (arg) == MEM_REF
722 || handled_component_p (arg));
724 comp_type = TYPE_MAIN_VARIANT (comp_type);
726 /* Const calls cannot call virtual methods through VMT and so type changes do
727 not matter. */
728 if (!flag_devirtualize || !gimple_vuse (call)
729 /* Be sure expected_type is polymorphic. */
730 || !comp_type
731 || TREE_CODE (comp_type) != RECORD_TYPE
732 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
733 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
734 return true;
736 ao_ref_init (&ao, arg);
737 ao.base = base;
738 ao.offset = offset;
739 ao.size = POINTER_SIZE;
740 ao.max_size = ao.size;
742 tci.offset = offset;
743 tci.object = get_base_address (arg);
744 tci.type_maybe_changed = false;
746 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
747 &tci, NULL, &entry_reached);
748 if (!tci.type_maybe_changed)
749 return false;
751 jfunc->type = IPA_JF_UNKNOWN;
752 return true;
755 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
756 If it is, return true and fill in the jump function JFUNC with relevant type
757 information or set it to unknown. ARG is the object itself (not a pointer
758 to it, unless dereferenced). BASE is the base of the memory access as
759 returned by get_ref_base_and_extent, as is the offset. */
761 static bool
762 detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
763 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
765 if (!flag_devirtualize)
766 return false;
768 if (TREE_CODE (base) == MEM_REF
769 && !param_type_may_change_p (current_function_decl,
770 TREE_OPERAND (base, 0),
771 call))
772 return false;
773 return detect_type_change_from_memory_writes (arg, base, comp_type,
774 call, jfunc, offset);
777 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
778 SSA name (its dereference will become the base and the offset is assumed to
779 be zero). */
781 static bool
782 detect_type_change_ssa (tree arg, tree comp_type,
783 gcall *call, struct ipa_jump_func *jfunc)
785 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
786 if (!flag_devirtualize
787 || !POINTER_TYPE_P (TREE_TYPE (arg)))
788 return false;
790 if (!param_type_may_change_p (current_function_decl, arg, call))
791 return false;
793 arg = build2 (MEM_REF, ptr_type_node, arg,
794 build_int_cst (ptr_type_node, 0));
796 return detect_type_change_from_memory_writes (arg, arg, comp_type,
797 call, jfunc, 0);
800 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
801 boolean variable pointed to by DATA. */
803 static bool
804 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
805 void *data)
807 bool *b = (bool *) data;
808 *b = true;
809 return true;
812 /* Return true if we have already walked so many statements in AA that we
813 should really just start giving up. */
815 static bool
816 aa_overwalked (struct func_body_info *fbi)
818 gcc_checking_assert (fbi);
819 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
822 /* Find the nearest valid aa status for parameter specified by INDEX that
823 dominates BB. */
825 static struct param_aa_status *
826 find_dominating_aa_status (struct func_body_info *fbi, basic_block bb,
827 int index)
829 while (true)
831 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
832 if (!bb)
833 return NULL;
834 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
835 if (!bi->param_aa_statuses.is_empty ()
836 && bi->param_aa_statuses[index].valid)
837 return &bi->param_aa_statuses[index];
841 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
842 structures and/or intialize the result with a dominating description as
843 necessary. */
845 static struct param_aa_status *
846 parm_bb_aa_status_for_bb (struct func_body_info *fbi, basic_block bb,
847 int index)
849 gcc_checking_assert (fbi);
850 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
851 if (bi->param_aa_statuses.is_empty ())
852 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
853 struct param_aa_status *paa = &bi->param_aa_statuses[index];
854 if (!paa->valid)
856 gcc_checking_assert (!paa->parm_modified
857 && !paa->ref_modified
858 && !paa->pt_modified);
859 struct param_aa_status *dom_paa;
860 dom_paa = find_dominating_aa_status (fbi, bb, index);
861 if (dom_paa)
862 *paa = *dom_paa;
863 else
864 paa->valid = true;
867 return paa;
870 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
871 a value known not to be modified in this function before reaching the
872 statement STMT. FBI holds information about the function we have so far
873 gathered but do not survive the summary building stage. */
875 static bool
876 parm_preserved_before_stmt_p (struct func_body_info *fbi, int index,
877 gimple stmt, tree parm_load)
879 struct param_aa_status *paa;
880 bool modified = false;
881 ao_ref refd;
883 /* FIXME: FBI can be NULL if we are being called from outside
884 ipa_node_analysis or ipcp_transform_function, which currently happens
885 during inlining analysis. It would be great to extend fbi's lifetime and
886 always have it. Currently, we are just not afraid of too much walking in
887 that case. */
888 if (fbi)
890 if (aa_overwalked (fbi))
891 return false;
892 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
893 if (paa->parm_modified)
894 return false;
896 else
897 paa = NULL;
899 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
900 ao_ref_init (&refd, parm_load);
901 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
902 &modified, NULL);
903 if (fbi)
904 fbi->aa_walked += walked;
905 if (paa && modified)
906 paa->parm_modified = true;
907 return !modified;
910 /* If STMT is an assignment that loads a value from an parameter declaration,
911 return the index of the parameter in ipa_node_params which has not been
912 modified. Otherwise return -1. */
914 static int
915 load_from_unmodified_param (struct func_body_info *fbi,
916 vec<ipa_param_descriptor> descriptors,
917 gimple stmt)
919 int index;
920 tree op1;
922 if (!gimple_assign_single_p (stmt))
923 return -1;
925 op1 = gimple_assign_rhs1 (stmt);
926 if (TREE_CODE (op1) != PARM_DECL)
927 return -1;
929 index = ipa_get_param_decl_index_1 (descriptors, op1);
930 if (index < 0
931 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
932 return -1;
934 return index;
937 /* Return true if memory reference REF (which must be a load through parameter
938 with INDEX) loads data that are known to be unmodified in this function
939 before reaching statement STMT. */
941 static bool
942 parm_ref_data_preserved_p (struct func_body_info *fbi,
943 int index, gimple stmt, tree ref)
945 struct param_aa_status *paa;
946 bool modified = false;
947 ao_ref refd;
949 /* FIXME: FBI can be NULL if we are being called from outside
950 ipa_node_analysis or ipcp_transform_function, which currently happens
951 during inlining analysis. It would be great to extend fbi's lifetime and
952 always have it. Currently, we are just not afraid of too much walking in
953 that case. */
954 if (fbi)
956 if (aa_overwalked (fbi))
957 return false;
958 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
959 if (paa->ref_modified)
960 return false;
962 else
963 paa = NULL;
965 gcc_checking_assert (gimple_vuse (stmt));
966 ao_ref_init (&refd, ref);
967 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
968 &modified, NULL);
969 if (fbi)
970 fbi->aa_walked += walked;
971 if (paa && modified)
972 paa->ref_modified = true;
973 return !modified;
976 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
977 is known to be unmodified in this function before reaching call statement
978 CALL into which it is passed. FBI describes the function body. */
980 static bool
981 parm_ref_data_pass_through_p (struct func_body_info *fbi, int index,
982 gimple call, tree parm)
984 bool modified = false;
985 ao_ref refd;
987 /* It's unnecessary to calculate anything about memory contnets for a const
988 function because it is not goin to use it. But do not cache the result
989 either. Also, no such calculations for non-pointers. */
990 if (!gimple_vuse (call)
991 || !POINTER_TYPE_P (TREE_TYPE (parm))
992 || aa_overwalked (fbi))
993 return false;
995 struct param_aa_status *paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (call),
996 index);
997 if (paa->pt_modified)
998 return false;
1000 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1001 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1002 &modified, NULL);
1003 fbi->aa_walked += walked;
1004 if (modified)
1005 paa->pt_modified = true;
1006 return !modified;
1009 /* Return true if we can prove that OP is a memory reference loading unmodified
1010 data from an aggregate passed as a parameter and if the aggregate is passed
1011 by reference, that the alias type of the load corresponds to the type of the
1012 formal parameter (so that we can rely on this type for TBAA in callers).
1013 INFO and PARMS_AINFO describe parameters of the current function (but the
1014 latter can be NULL), STMT is the load statement. If function returns true,
1015 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1016 within the aggregate and whether it is a load from a value passed by
1017 reference respectively. */
1019 static bool
1020 ipa_load_from_parm_agg_1 (struct func_body_info *fbi,
1021 vec<ipa_param_descriptor> descriptors,
1022 gimple stmt, tree op, int *index_p,
1023 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1024 bool *by_ref_p)
1026 int index;
1027 HOST_WIDE_INT size, max_size;
1028 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
1030 if (max_size == -1 || max_size != size || *offset_p < 0)
1031 return false;
1033 if (DECL_P (base))
1035 int index = ipa_get_param_decl_index_1 (descriptors, base);
1036 if (index >= 0
1037 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1039 *index_p = index;
1040 *by_ref_p = false;
1041 if (size_p)
1042 *size_p = size;
1043 return true;
1045 return false;
1048 if (TREE_CODE (base) != MEM_REF
1049 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1050 || !integer_zerop (TREE_OPERAND (base, 1)))
1051 return false;
1053 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1055 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1056 index = ipa_get_param_decl_index_1 (descriptors, parm);
1058 else
1060 /* This branch catches situations where a pointer parameter is not a
1061 gimple register, for example:
1063 void hip7(S*) (struct S * p)
1065 void (*<T2e4>) (struct S *) D.1867;
1066 struct S * p.1;
1068 <bb 2>:
1069 p.1_1 = p;
1070 D.1867_2 = p.1_1->f;
1071 D.1867_2 ();
1072 gdp = &p;
1075 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1076 index = load_from_unmodified_param (fbi, descriptors, def);
1079 if (index >= 0
1080 && parm_ref_data_preserved_p (fbi, index, stmt, op))
1082 *index_p = index;
1083 *by_ref_p = true;
1084 if (size_p)
1085 *size_p = size;
1086 return true;
1088 return false;
1091 /* Just like the previous function, just without the param_analysis_info
1092 pointer, for users outside of this file. */
1094 bool
1095 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
1096 tree op, int *index_p, HOST_WIDE_INT *offset_p,
1097 bool *by_ref_p)
1099 return ipa_load_from_parm_agg_1 (NULL, info->descriptors, stmt, op, index_p,
1100 offset_p, NULL, by_ref_p);
1103 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1104 of an assignment statement STMT, try to determine whether we are actually
1105 handling any of the following cases and construct an appropriate jump
1106 function into JFUNC if so:
1108 1) The passed value is loaded from a formal parameter which is not a gimple
1109 register (most probably because it is addressable, the value has to be
1110 scalar) and we can guarantee the value has not changed. This case can
1111 therefore be described by a simple pass-through jump function. For example:
1113 foo (int a)
1115 int a.0;
1117 a.0_2 = a;
1118 bar (a.0_2);
1120 2) The passed value can be described by a simple arithmetic pass-through
1121 jump function. E.g.
1123 foo (int a)
1125 int D.2064;
1127 D.2064_4 = a.1(D) + 4;
1128 bar (D.2064_4);
1130 This case can also occur in combination of the previous one, e.g.:
1132 foo (int a, int z)
1134 int a.0;
1135 int D.2064;
1137 a.0_3 = a;
1138 D.2064_4 = a.0_3 + 4;
1139 foo (D.2064_4);
1141 3) The passed value is an address of an object within another one (which
1142 also passed by reference). Such situations are described by an ancestor
1143 jump function and describe situations such as:
1145 B::foo() (struct B * const this)
1147 struct A * D.1845;
1149 D.1845_2 = &this_1(D)->D.1748;
1150 A::bar (D.1845_2);
1152 INFO is the structure describing individual parameters access different
1153 stages of IPA optimizations. PARMS_AINFO contains the information that is
1154 only needed for intraprocedural analysis. */
1156 static void
1157 compute_complex_assign_jump_func (struct func_body_info *fbi,
1158 struct ipa_node_params *info,
1159 struct ipa_jump_func *jfunc,
1160 gcall *call, gimple stmt, tree name,
1161 tree param_type)
1163 HOST_WIDE_INT offset, size, max_size;
1164 tree op1, tc_ssa, base, ssa;
1165 int index;
1167 op1 = gimple_assign_rhs1 (stmt);
1169 if (TREE_CODE (op1) == SSA_NAME)
1171 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1172 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1173 else
1174 index = load_from_unmodified_param (fbi, info->descriptors,
1175 SSA_NAME_DEF_STMT (op1));
1176 tc_ssa = op1;
1178 else
1180 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1181 tc_ssa = gimple_assign_lhs (stmt);
1184 if (index >= 0)
1186 tree op2 = gimple_assign_rhs2 (stmt);
1188 if (op2)
1190 if (!is_gimple_ip_invariant (op2)
1191 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1192 && !useless_type_conversion_p (TREE_TYPE (name),
1193 TREE_TYPE (op1))))
1194 return;
1196 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1197 gimple_assign_rhs_code (stmt));
1199 else if (gimple_assign_single_p (stmt))
1201 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1202 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1204 return;
1207 if (TREE_CODE (op1) != ADDR_EXPR)
1208 return;
1209 op1 = TREE_OPERAND (op1, 0);
1210 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1211 return;
1212 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1213 if (TREE_CODE (base) != MEM_REF
1214 /* If this is a varying address, punt. */
1215 || max_size == -1
1216 || max_size != size)
1217 return;
1218 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1219 ssa = TREE_OPERAND (base, 0);
1220 if (TREE_CODE (ssa) != SSA_NAME
1221 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1222 || offset < 0)
1223 return;
1225 /* Dynamic types are changed in constructors and destructors. */
1226 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1227 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1228 ipa_set_ancestor_jf (jfunc, offset, index,
1229 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1232 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1233 it looks like:
1235 iftmp.1_3 = &obj_2(D)->D.1762;
1237 The base of the MEM_REF must be a default definition SSA NAME of a
1238 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1239 whole MEM_REF expression is returned and the offset calculated from any
1240 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1241 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1243 static tree
1244 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1246 HOST_WIDE_INT size, max_size;
1247 tree expr, parm, obj;
1249 if (!gimple_assign_single_p (assign))
1250 return NULL_TREE;
1251 expr = gimple_assign_rhs1 (assign);
1253 if (TREE_CODE (expr) != ADDR_EXPR)
1254 return NULL_TREE;
1255 expr = TREE_OPERAND (expr, 0);
1256 obj = expr;
1257 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1259 if (TREE_CODE (expr) != MEM_REF
1260 /* If this is a varying address, punt. */
1261 || max_size == -1
1262 || max_size != size
1263 || *offset < 0)
1264 return NULL_TREE;
1265 parm = TREE_OPERAND (expr, 0);
1266 if (TREE_CODE (parm) != SSA_NAME
1267 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1268 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1269 return NULL_TREE;
1271 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1272 *obj_p = obj;
1273 return expr;
1277 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1278 statement PHI, try to find out whether NAME is in fact a
1279 multiple-inheritance typecast from a descendant into an ancestor of a formal
1280 parameter and thus can be described by an ancestor jump function and if so,
1281 write the appropriate function into JFUNC.
1283 Essentially we want to match the following pattern:
1285 if (obj_2(D) != 0B)
1286 goto <bb 3>;
1287 else
1288 goto <bb 4>;
1290 <bb 3>:
1291 iftmp.1_3 = &obj_2(D)->D.1762;
1293 <bb 4>:
1294 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1295 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1296 return D.1879_6; */
1298 static void
1299 compute_complex_ancestor_jump_func (struct func_body_info *fbi,
1300 struct ipa_node_params *info,
1301 struct ipa_jump_func *jfunc,
1302 gcall *call, gphi *phi)
1304 HOST_WIDE_INT offset;
1305 gimple assign, cond;
1306 basic_block phi_bb, assign_bb, cond_bb;
1307 tree tmp, parm, expr, obj;
1308 int index, i;
1310 if (gimple_phi_num_args (phi) != 2)
1311 return;
1313 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1314 tmp = PHI_ARG_DEF (phi, 0);
1315 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1316 tmp = PHI_ARG_DEF (phi, 1);
1317 else
1318 return;
1319 if (TREE_CODE (tmp) != SSA_NAME
1320 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1321 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1322 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1323 return;
1325 assign = SSA_NAME_DEF_STMT (tmp);
1326 assign_bb = gimple_bb (assign);
1327 if (!single_pred_p (assign_bb))
1328 return;
1329 expr = get_ancestor_addr_info (assign, &obj, &offset);
1330 if (!expr)
1331 return;
1332 parm = TREE_OPERAND (expr, 0);
1333 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1334 if (index < 0)
1335 return;
1337 cond_bb = single_pred (assign_bb);
1338 cond = last_stmt (cond_bb);
1339 if (!cond
1340 || gimple_code (cond) != GIMPLE_COND
1341 || gimple_cond_code (cond) != NE_EXPR
1342 || gimple_cond_lhs (cond) != parm
1343 || !integer_zerop (gimple_cond_rhs (cond)))
1344 return;
1346 phi_bb = gimple_bb (phi);
1347 for (i = 0; i < 2; i++)
1349 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1350 if (pred != assign_bb && pred != cond_bb)
1351 return;
1354 ipa_set_ancestor_jf (jfunc, offset, index,
1355 parm_ref_data_pass_through_p (fbi, index, call, parm));
1358 /* Inspect the given TYPE and return true iff it has the same structure (the
1359 same number of fields of the same types) as a C++ member pointer. If
1360 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1361 corresponding fields there. */
1363 static bool
1364 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1366 tree fld;
1368 if (TREE_CODE (type) != RECORD_TYPE)
1369 return false;
1371 fld = TYPE_FIELDS (type);
1372 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1373 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1374 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1375 return false;
1377 if (method_ptr)
1378 *method_ptr = fld;
1380 fld = DECL_CHAIN (fld);
1381 if (!fld || INTEGRAL_TYPE_P (fld)
1382 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1383 return false;
1384 if (delta)
1385 *delta = fld;
1387 if (DECL_CHAIN (fld))
1388 return false;
1390 return true;
1393 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1394 return the rhs of its defining statement. Otherwise return RHS as it
1395 is. */
1397 static inline tree
1398 get_ssa_def_if_simple_copy (tree rhs)
1400 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1402 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1404 if (gimple_assign_single_p (def_stmt))
1405 rhs = gimple_assign_rhs1 (def_stmt);
1406 else
1407 break;
1409 return rhs;
1412 /* Simple linked list, describing known contents of an aggregate beforere
1413 call. */
1415 struct ipa_known_agg_contents_list
1417 /* Offset and size of the described part of the aggregate. */
1418 HOST_WIDE_INT offset, size;
1419 /* Known constant value or NULL if the contents is known to be unknown. */
1420 tree constant;
1421 /* Pointer to the next structure in the list. */
1422 struct ipa_known_agg_contents_list *next;
1425 /* Find the proper place in linked list of ipa_known_agg_contents_list
1426 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1427 unless there is a partial overlap, in which case return NULL, or such
1428 element is already there, in which case set *ALREADY_THERE to true. */
1430 static struct ipa_known_agg_contents_list **
1431 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1432 HOST_WIDE_INT lhs_offset,
1433 HOST_WIDE_INT lhs_size,
1434 bool *already_there)
1436 struct ipa_known_agg_contents_list **p = list;
1437 while (*p && (*p)->offset < lhs_offset)
1439 if ((*p)->offset + (*p)->size > lhs_offset)
1440 return NULL;
1441 p = &(*p)->next;
1444 if (*p && (*p)->offset < lhs_offset + lhs_size)
1446 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1447 /* We already know this value is subsequently overwritten with
1448 something else. */
1449 *already_there = true;
1450 else
1451 /* Otherwise this is a partial overlap which we cannot
1452 represent. */
1453 return NULL;
1455 return p;
1458 /* Build aggregate jump function from LIST, assuming there are exactly
1459 CONST_COUNT constant entries there and that th offset of the passed argument
1460 is ARG_OFFSET and store it into JFUNC. */
1462 static void
1463 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1464 int const_count, HOST_WIDE_INT arg_offset,
1465 struct ipa_jump_func *jfunc)
1467 vec_alloc (jfunc->agg.items, const_count);
1468 while (list)
1470 if (list->constant)
1472 struct ipa_agg_jf_item item;
1473 item.offset = list->offset - arg_offset;
1474 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1475 item.value = unshare_expr_without_location (list->constant);
1476 jfunc->agg.items->quick_push (item);
1478 list = list->next;
1482 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1483 in ARG is filled in with constant values. ARG can either be an aggregate
1484 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1485 aggregate. JFUNC is the jump function into which the constants are
1486 subsequently stored. */
1488 static void
1489 determine_locally_known_aggregate_parts (gcall *call, tree arg,
1490 tree arg_type,
1491 struct ipa_jump_func *jfunc)
1493 struct ipa_known_agg_contents_list *list = NULL;
1494 int item_count = 0, const_count = 0;
1495 HOST_WIDE_INT arg_offset, arg_size;
1496 gimple_stmt_iterator gsi;
1497 tree arg_base;
1498 bool check_ref, by_ref;
1499 ao_ref r;
1501 /* The function operates in three stages. First, we prepare check_ref, r,
1502 arg_base and arg_offset based on what is actually passed as an actual
1503 argument. */
1505 if (POINTER_TYPE_P (arg_type))
1507 by_ref = true;
1508 if (TREE_CODE (arg) == SSA_NAME)
1510 tree type_size;
1511 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1512 return;
1513 check_ref = true;
1514 arg_base = arg;
1515 arg_offset = 0;
1516 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1517 arg_size = tree_to_uhwi (type_size);
1518 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1520 else if (TREE_CODE (arg) == ADDR_EXPR)
1522 HOST_WIDE_INT arg_max_size;
1524 arg = TREE_OPERAND (arg, 0);
1525 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1526 &arg_max_size);
1527 if (arg_max_size == -1
1528 || arg_max_size != arg_size
1529 || arg_offset < 0)
1530 return;
1531 if (DECL_P (arg_base))
1533 check_ref = false;
1534 ao_ref_init (&r, arg_base);
1536 else
1537 return;
1539 else
1540 return;
1542 else
1544 HOST_WIDE_INT arg_max_size;
1546 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1548 by_ref = false;
1549 check_ref = false;
1550 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1551 &arg_max_size);
1552 if (arg_max_size == -1
1553 || arg_max_size != arg_size
1554 || arg_offset < 0)
1555 return;
1557 ao_ref_init (&r, arg);
1560 /* Second stage walks back the BB, looks at individual statements and as long
1561 as it is confident of how the statements affect contents of the
1562 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1563 describing it. */
1564 gsi = gsi_for_stmt (call);
1565 gsi_prev (&gsi);
1566 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1568 struct ipa_known_agg_contents_list *n, **p;
1569 gimple stmt = gsi_stmt (gsi);
1570 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1571 tree lhs, rhs, lhs_base;
1573 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1574 continue;
1575 if (!gimple_assign_single_p (stmt))
1576 break;
1578 lhs = gimple_assign_lhs (stmt);
1579 rhs = gimple_assign_rhs1 (stmt);
1580 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1581 || TREE_CODE (lhs) == BIT_FIELD_REF
1582 || contains_bitfld_component_ref_p (lhs))
1583 break;
1585 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1586 &lhs_max_size);
1587 if (lhs_max_size == -1
1588 || lhs_max_size != lhs_size)
1589 break;
1591 if (check_ref)
1593 if (TREE_CODE (lhs_base) != MEM_REF
1594 || TREE_OPERAND (lhs_base, 0) != arg_base
1595 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1596 break;
1598 else if (lhs_base != arg_base)
1600 if (DECL_P (lhs_base))
1601 continue;
1602 else
1603 break;
1606 bool already_there = false;
1607 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1608 &already_there);
1609 if (!p)
1610 break;
1611 if (already_there)
1612 continue;
1614 rhs = get_ssa_def_if_simple_copy (rhs);
1615 n = XALLOCA (struct ipa_known_agg_contents_list);
1616 n->size = lhs_size;
1617 n->offset = lhs_offset;
1618 if (is_gimple_ip_invariant (rhs))
1620 n->constant = rhs;
1621 const_count++;
1623 else
1624 n->constant = NULL_TREE;
1625 n->next = *p;
1626 *p = n;
1628 item_count++;
1629 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1630 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1631 break;
1634 /* Third stage just goes over the list and creates an appropriate vector of
1635 ipa_agg_jf_item structures out of it, of sourse only if there are
1636 any known constants to begin with. */
1638 if (const_count)
1640 jfunc->agg.by_ref = by_ref;
1641 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1645 static tree
1646 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1648 int n;
1649 tree type = (e->callee
1650 ? TREE_TYPE (e->callee->decl)
1651 : gimple_call_fntype (e->call_stmt));
1652 tree t = TYPE_ARG_TYPES (type);
1654 for (n = 0; n < i; n++)
1656 if (!t)
1657 break;
1658 t = TREE_CHAIN (t);
1660 if (t)
1661 return TREE_VALUE (t);
1662 if (!e->callee)
1663 return NULL;
1664 t = DECL_ARGUMENTS (e->callee->decl);
1665 for (n = 0; n < i; n++)
1667 if (!t)
1668 return NULL;
1669 t = TREE_CHAIN (t);
1671 if (t)
1672 return TREE_TYPE (t);
1673 return NULL;
1676 /* Compute jump function for all arguments of callsite CS and insert the
1677 information in the jump_functions array in the ipa_edge_args corresponding
1678 to this callsite. */
1680 static void
1681 ipa_compute_jump_functions_for_edge (struct func_body_info *fbi,
1682 struct cgraph_edge *cs)
1684 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1685 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1686 gcall *call = cs->call_stmt;
1687 int n, arg_num = gimple_call_num_args (call);
1688 bool useful_context = false;
1690 if (arg_num == 0 || args->jump_functions)
1691 return;
1692 vec_safe_grow_cleared (args->jump_functions, arg_num);
1693 if (flag_devirtualize)
1694 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1696 if (gimple_call_internal_p (call))
1697 return;
1698 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1699 return;
1701 for (n = 0; n < arg_num; n++)
1703 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1704 tree arg = gimple_call_arg (call, n);
1705 tree param_type = ipa_get_callee_param_type (cs, n);
1706 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1708 tree instance;
1709 struct ipa_polymorphic_call_context context (cs->caller->decl,
1710 arg, cs->call_stmt,
1711 &instance);
1712 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1713 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1714 if (!context.useless_p ())
1715 useful_context = true;
1718 if (is_gimple_ip_invariant (arg))
1719 ipa_set_jf_constant (jfunc, arg, cs);
1720 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1721 && TREE_CODE (arg) == PARM_DECL)
1723 int index = ipa_get_param_decl_index (info, arg);
1725 gcc_assert (index >=0);
1726 /* Aggregate passed by value, check for pass-through, otherwise we
1727 will attempt to fill in aggregate contents later in this
1728 for cycle. */
1729 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1731 ipa_set_jf_simple_pass_through (jfunc, index, false);
1732 continue;
1735 else if (TREE_CODE (arg) == SSA_NAME)
1737 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1739 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1740 if (index >= 0)
1742 bool agg_p;
1743 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1744 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1747 else
1749 gimple stmt = SSA_NAME_DEF_STMT (arg);
1750 if (is_gimple_assign (stmt))
1751 compute_complex_assign_jump_func (fbi, info, jfunc,
1752 call, stmt, arg, param_type);
1753 else if (gimple_code (stmt) == GIMPLE_PHI)
1754 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1755 call,
1756 as_a <gphi *> (stmt));
1760 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1761 passed (because type conversions are ignored in gimple). Usually we can
1762 safely get type from function declaration, but in case of K&R prototypes or
1763 variadic functions we can try our luck with type of the pointer passed.
1764 TODO: Since we look for actual initialization of the memory object, we may better
1765 work out the type based on the memory stores we find. */
1766 if (!param_type)
1767 param_type = TREE_TYPE (arg);
1769 if ((jfunc->type != IPA_JF_PASS_THROUGH
1770 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1771 && (jfunc->type != IPA_JF_ANCESTOR
1772 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1773 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1774 || POINTER_TYPE_P (param_type)))
1775 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1777 if (!useful_context)
1778 vec_free (args->polymorphic_call_contexts);
1781 /* Compute jump functions for all edges - both direct and indirect - outgoing
1782 from BB. */
1784 static void
1785 ipa_compute_jump_functions_for_bb (struct func_body_info *fbi, basic_block bb)
1787 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1788 int i;
1789 struct cgraph_edge *cs;
1791 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1793 struct cgraph_node *callee = cs->callee;
1795 if (callee)
1797 callee->ultimate_alias_target ();
1798 /* We do not need to bother analyzing calls to unknown functions
1799 unless they may become known during lto/whopr. */
1800 if (!callee->definition && !flag_lto)
1801 continue;
1803 ipa_compute_jump_functions_for_edge (fbi, cs);
1807 /* If STMT looks like a statement loading a value from a member pointer formal
1808 parameter, return that parameter and store the offset of the field to
1809 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1810 might be clobbered). If USE_DELTA, then we look for a use of the delta
1811 field rather than the pfn. */
1813 static tree
1814 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1815 HOST_WIDE_INT *offset_p)
1817 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1819 if (!gimple_assign_single_p (stmt))
1820 return NULL_TREE;
1822 rhs = gimple_assign_rhs1 (stmt);
1823 if (TREE_CODE (rhs) == COMPONENT_REF)
1825 ref_field = TREE_OPERAND (rhs, 1);
1826 rhs = TREE_OPERAND (rhs, 0);
1828 else
1829 ref_field = NULL_TREE;
1830 if (TREE_CODE (rhs) != MEM_REF)
1831 return NULL_TREE;
1832 rec = TREE_OPERAND (rhs, 0);
1833 if (TREE_CODE (rec) != ADDR_EXPR)
1834 return NULL_TREE;
1835 rec = TREE_OPERAND (rec, 0);
1836 if (TREE_CODE (rec) != PARM_DECL
1837 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1838 return NULL_TREE;
1839 ref_offset = TREE_OPERAND (rhs, 1);
1841 if (use_delta)
1842 fld = delta_field;
1843 else
1844 fld = ptr_field;
1845 if (offset_p)
1846 *offset_p = int_bit_position (fld);
1848 if (ref_field)
1850 if (integer_nonzerop (ref_offset))
1851 return NULL_TREE;
1852 return ref_field == fld ? rec : NULL_TREE;
1854 else
1855 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1856 : NULL_TREE;
1859 /* Returns true iff T is an SSA_NAME defined by a statement. */
1861 static bool
1862 ipa_is_ssa_with_stmt_def (tree t)
1864 if (TREE_CODE (t) == SSA_NAME
1865 && !SSA_NAME_IS_DEFAULT_DEF (t))
1866 return true;
1867 else
1868 return false;
1871 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1872 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1873 indirect call graph edge. */
1875 static struct cgraph_edge *
1876 ipa_note_param_call (struct cgraph_node *node, int param_index,
1877 gcall *stmt)
1879 struct cgraph_edge *cs;
1881 cs = node->get_edge (stmt);
1882 cs->indirect_info->param_index = param_index;
1883 cs->indirect_info->agg_contents = 0;
1884 cs->indirect_info->member_ptr = 0;
1885 return cs;
1888 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1889 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1890 intermediate information about each formal parameter. Currently it checks
1891 whether the call calls a pointer that is a formal parameter and if so, the
1892 parameter is marked with the called flag and an indirect call graph edge
1893 describing the call is created. This is very simple for ordinary pointers
1894 represented in SSA but not-so-nice when it comes to member pointers. The
1895 ugly part of this function does nothing more than trying to match the
1896 pattern of such a call. An example of such a pattern is the gimple dump
1897 below, the call is on the last line:
1899 <bb 2>:
1900 f$__delta_5 = f.__delta;
1901 f$__pfn_24 = f.__pfn;
1904 <bb 2>:
1905 f$__delta_5 = MEM[(struct *)&f];
1906 f$__pfn_24 = MEM[(struct *)&f + 4B];
1908 and a few lines below:
1910 <bb 5>
1911 D.2496_3 = (int) f$__pfn_24;
1912 D.2497_4 = D.2496_3 & 1;
1913 if (D.2497_4 != 0)
1914 goto <bb 3>;
1915 else
1916 goto <bb 4>;
1918 <bb 6>:
1919 D.2500_7 = (unsigned int) f$__delta_5;
1920 D.2501_8 = &S + D.2500_7;
1921 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1922 D.2503_10 = *D.2502_9;
1923 D.2504_12 = f$__pfn_24 + -1;
1924 D.2505_13 = (unsigned int) D.2504_12;
1925 D.2506_14 = D.2503_10 + D.2505_13;
1926 D.2507_15 = *D.2506_14;
1927 iftmp.11_16 = (String:: *) D.2507_15;
1929 <bb 7>:
1930 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1931 D.2500_19 = (unsigned int) f$__delta_5;
1932 D.2508_20 = &S + D.2500_19;
1933 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1935 Such patterns are results of simple calls to a member pointer:
1937 int doprinting (int (MyString::* f)(int) const)
1939 MyString S ("somestring");
1941 return (S.*f)(4);
1944 Moreover, the function also looks for called pointers loaded from aggregates
1945 passed by value or reference. */
1947 static void
1948 ipa_analyze_indirect_call_uses (struct func_body_info *fbi, gcall *call,
1949 tree target)
1951 struct ipa_node_params *info = fbi->info;
1952 HOST_WIDE_INT offset;
1953 bool by_ref;
1955 if (SSA_NAME_IS_DEFAULT_DEF (target))
1957 tree var = SSA_NAME_VAR (target);
1958 int index = ipa_get_param_decl_index (info, var);
1959 if (index >= 0)
1960 ipa_note_param_call (fbi->node, index, call);
1961 return;
1964 int index;
1965 gimple def = SSA_NAME_DEF_STMT (target);
1966 if (gimple_assign_single_p (def)
1967 && ipa_load_from_parm_agg_1 (fbi, info->descriptors, def,
1968 gimple_assign_rhs1 (def), &index, &offset,
1969 NULL, &by_ref))
1971 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
1972 cs->indirect_info->offset = offset;
1973 cs->indirect_info->agg_contents = 1;
1974 cs->indirect_info->by_ref = by_ref;
1975 return;
1978 /* Now we need to try to match the complex pattern of calling a member
1979 pointer. */
1980 if (gimple_code (def) != GIMPLE_PHI
1981 || gimple_phi_num_args (def) != 2
1982 || !POINTER_TYPE_P (TREE_TYPE (target))
1983 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1984 return;
1986 /* First, we need to check whether one of these is a load from a member
1987 pointer that is a parameter to this function. */
1988 tree n1 = PHI_ARG_DEF (def, 0);
1989 tree n2 = PHI_ARG_DEF (def, 1);
1990 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
1991 return;
1992 gimple d1 = SSA_NAME_DEF_STMT (n1);
1993 gimple d2 = SSA_NAME_DEF_STMT (n2);
1995 tree rec;
1996 basic_block bb, virt_bb;
1997 basic_block join = gimple_bb (def);
1998 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2000 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2001 return;
2003 bb = EDGE_PRED (join, 0)->src;
2004 virt_bb = gimple_bb (d2);
2006 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2008 bb = EDGE_PRED (join, 1)->src;
2009 virt_bb = gimple_bb (d1);
2011 else
2012 return;
2014 /* Second, we need to check that the basic blocks are laid out in the way
2015 corresponding to the pattern. */
2017 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2018 || single_pred (virt_bb) != bb
2019 || single_succ (virt_bb) != join)
2020 return;
2022 /* Third, let's see that the branching is done depending on the least
2023 significant bit of the pfn. */
2025 gimple branch = last_stmt (bb);
2026 if (!branch || gimple_code (branch) != GIMPLE_COND)
2027 return;
2029 if ((gimple_cond_code (branch) != NE_EXPR
2030 && gimple_cond_code (branch) != EQ_EXPR)
2031 || !integer_zerop (gimple_cond_rhs (branch)))
2032 return;
2034 tree cond = gimple_cond_lhs (branch);
2035 if (!ipa_is_ssa_with_stmt_def (cond))
2036 return;
2038 def = SSA_NAME_DEF_STMT (cond);
2039 if (!is_gimple_assign (def)
2040 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2041 || !integer_onep (gimple_assign_rhs2 (def)))
2042 return;
2044 cond = gimple_assign_rhs1 (def);
2045 if (!ipa_is_ssa_with_stmt_def (cond))
2046 return;
2048 def = SSA_NAME_DEF_STMT (cond);
2050 if (is_gimple_assign (def)
2051 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2053 cond = gimple_assign_rhs1 (def);
2054 if (!ipa_is_ssa_with_stmt_def (cond))
2055 return;
2056 def = SSA_NAME_DEF_STMT (cond);
2059 tree rec2;
2060 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2061 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2062 == ptrmemfunc_vbit_in_delta),
2063 NULL);
2064 if (rec != rec2)
2065 return;
2067 index = ipa_get_param_decl_index (info, rec);
2068 if (index >= 0
2069 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2071 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2072 cs->indirect_info->offset = offset;
2073 cs->indirect_info->agg_contents = 1;
2074 cs->indirect_info->member_ptr = 1;
2077 return;
2080 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2081 object referenced in the expression is a formal parameter of the caller
2082 FBI->node (described by FBI->info), create a call note for the
2083 statement. */
2085 static void
2086 ipa_analyze_virtual_call_uses (struct func_body_info *fbi,
2087 gcall *call, tree target)
2089 tree obj = OBJ_TYPE_REF_OBJECT (target);
2090 int index;
2091 HOST_WIDE_INT anc_offset;
2093 if (!flag_devirtualize)
2094 return;
2096 if (TREE_CODE (obj) != SSA_NAME)
2097 return;
2099 struct ipa_node_params *info = fbi->info;
2100 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2102 struct ipa_jump_func jfunc;
2103 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2104 return;
2106 anc_offset = 0;
2107 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2108 gcc_assert (index >= 0);
2109 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2110 call, &jfunc))
2111 return;
2113 else
2115 struct ipa_jump_func jfunc;
2116 gimple stmt = SSA_NAME_DEF_STMT (obj);
2117 tree expr;
2119 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2120 if (!expr)
2121 return;
2122 index = ipa_get_param_decl_index (info,
2123 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2124 gcc_assert (index >= 0);
2125 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2126 call, &jfunc, anc_offset))
2127 return;
2130 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2131 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2132 ii->offset = anc_offset;
2133 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2134 ii->otr_type = obj_type_ref_class (target);
2135 ii->polymorphic = 1;
2138 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2139 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2140 containing intermediate information about each formal parameter. */
2142 static void
2143 ipa_analyze_call_uses (struct func_body_info *fbi, gcall *call)
2145 tree target = gimple_call_fn (call);
2147 if (!target
2148 || (TREE_CODE (target) != SSA_NAME
2149 && !virtual_method_call_p (target)))
2150 return;
2152 struct cgraph_edge *cs = fbi->node->get_edge (call);
2153 /* If we previously turned the call into a direct call, there is
2154 no need to analyze. */
2155 if (cs && !cs->indirect_unknown_callee)
2156 return;
2158 if (cs->indirect_info->polymorphic)
2160 tree instance;
2161 tree target = gimple_call_fn (call);
2162 ipa_polymorphic_call_context context (current_function_decl,
2163 target, call, &instance);
2165 gcc_checking_assert (cs->indirect_info->otr_type
2166 == obj_type_ref_class (target));
2167 gcc_checking_assert (cs->indirect_info->otr_token
2168 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2170 cs->indirect_info->vptr_changed
2171 = !context.get_dynamic_type (instance,
2172 OBJ_TYPE_REF_OBJECT (target),
2173 obj_type_ref_class (target), call);
2174 cs->indirect_info->context = context;
2177 if (TREE_CODE (target) == SSA_NAME)
2178 ipa_analyze_indirect_call_uses (fbi, call, target);
2179 else if (virtual_method_call_p (target))
2180 ipa_analyze_virtual_call_uses (fbi, call, target);
2184 /* Analyze the call statement STMT with respect to formal parameters (described
2185 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2186 formal parameters are called. */
2188 static void
2189 ipa_analyze_stmt_uses (struct func_body_info *fbi, gimple stmt)
2191 if (is_gimple_call (stmt))
2192 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2195 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2196 If OP is a parameter declaration, mark it as used in the info structure
2197 passed in DATA. */
2199 static bool
2200 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2202 struct ipa_node_params *info = (struct ipa_node_params *) data;
2204 op = get_base_address (op);
2205 if (op
2206 && TREE_CODE (op) == PARM_DECL)
2208 int index = ipa_get_param_decl_index (info, op);
2209 gcc_assert (index >= 0);
2210 ipa_set_param_used (info, index, true);
2213 return false;
2216 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2217 the findings in various structures of the associated ipa_node_params
2218 structure, such as parameter flags, notes etc. FBI holds various data about
2219 the function being analyzed. */
2221 static void
2222 ipa_analyze_params_uses_in_bb (struct func_body_info *fbi, basic_block bb)
2224 gimple_stmt_iterator gsi;
2225 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2227 gimple stmt = gsi_stmt (gsi);
2229 if (is_gimple_debug (stmt))
2230 continue;
2232 ipa_analyze_stmt_uses (fbi, stmt);
2233 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2234 visit_ref_for_mod_analysis,
2235 visit_ref_for_mod_analysis,
2236 visit_ref_for_mod_analysis);
2238 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2239 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2240 visit_ref_for_mod_analysis,
2241 visit_ref_for_mod_analysis,
2242 visit_ref_for_mod_analysis);
2245 /* Calculate controlled uses of parameters of NODE. */
2247 static void
2248 ipa_analyze_controlled_uses (struct cgraph_node *node)
2250 struct ipa_node_params *info = IPA_NODE_REF (node);
2252 for (int i = 0; i < ipa_get_param_count (info); i++)
2254 tree parm = ipa_get_param (info, i);
2255 int controlled_uses = 0;
2257 /* For SSA regs see if parameter is used. For non-SSA we compute
2258 the flag during modification analysis. */
2259 if (is_gimple_reg (parm))
2261 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2262 parm);
2263 if (ddef && !has_zero_uses (ddef))
2265 imm_use_iterator imm_iter;
2266 use_operand_p use_p;
2268 ipa_set_param_used (info, i, true);
2269 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2270 if (!is_gimple_call (USE_STMT (use_p)))
2272 if (!is_gimple_debug (USE_STMT (use_p)))
2274 controlled_uses = IPA_UNDESCRIBED_USE;
2275 break;
2278 else
2279 controlled_uses++;
2281 else
2282 controlled_uses = 0;
2284 else
2285 controlled_uses = IPA_UNDESCRIBED_USE;
2286 ipa_set_controlled_uses (info, i, controlled_uses);
2290 /* Free stuff in BI. */
2292 static void
2293 free_ipa_bb_info (struct ipa_bb_info *bi)
2295 bi->cg_edges.release ();
2296 bi->param_aa_statuses.release ();
2299 /* Dominator walker driving the analysis. */
2301 class analysis_dom_walker : public dom_walker
2303 public:
2304 analysis_dom_walker (struct func_body_info *fbi)
2305 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2307 virtual void before_dom_children (basic_block);
2309 private:
2310 struct func_body_info *m_fbi;
2313 void
2314 analysis_dom_walker::before_dom_children (basic_block bb)
2316 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2317 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2320 /* Initialize the array describing properties of of formal parameters
2321 of NODE, analyze their uses and compute jump functions associated
2322 with actual arguments of calls from within NODE. */
2324 void
2325 ipa_analyze_node (struct cgraph_node *node)
2327 struct func_body_info fbi;
2328 struct ipa_node_params *info;
2330 ipa_check_create_node_params ();
2331 ipa_check_create_edge_args ();
2332 info = IPA_NODE_REF (node);
2334 if (info->analysis_done)
2335 return;
2336 info->analysis_done = 1;
2338 if (ipa_func_spec_opts_forbid_analysis_p (node))
2340 for (int i = 0; i < ipa_get_param_count (info); i++)
2342 ipa_set_param_used (info, i, true);
2343 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2345 return;
2348 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2349 push_cfun (func);
2350 calculate_dominance_info (CDI_DOMINATORS);
2351 ipa_initialize_node_params (node);
2352 ipa_analyze_controlled_uses (node);
2354 fbi.node = node;
2355 fbi.info = IPA_NODE_REF (node);
2356 fbi.bb_infos = vNULL;
2357 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2358 fbi.param_count = ipa_get_param_count (info);
2359 fbi.aa_walked = 0;
2361 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2363 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2364 bi->cg_edges.safe_push (cs);
2367 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2369 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2370 bi->cg_edges.safe_push (cs);
2373 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2375 int i;
2376 struct ipa_bb_info *bi;
2377 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
2378 free_ipa_bb_info (bi);
2379 fbi.bb_infos.release ();
2380 free_dominance_info (CDI_DOMINATORS);
2381 pop_cfun ();
2384 /* Update the jump functions associated with call graph edge E when the call
2385 graph edge CS is being inlined, assuming that E->caller is already (possibly
2386 indirectly) inlined into CS->callee and that E has not been inlined. */
2388 static void
2389 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2390 struct cgraph_edge *e)
2392 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2393 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2394 int count = ipa_get_cs_argument_count (args);
2395 int i;
2397 for (i = 0; i < count; i++)
2399 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2400 struct ipa_polymorphic_call_context *dst_ctx
2401 = ipa_get_ith_polymorhic_call_context (args, i);
2403 if (dst->type == IPA_JF_ANCESTOR)
2405 struct ipa_jump_func *src;
2406 int dst_fid = dst->value.ancestor.formal_id;
2407 struct ipa_polymorphic_call_context *src_ctx
2408 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2410 /* Variable number of arguments can cause havoc if we try to access
2411 one that does not exist in the inlined edge. So make sure we
2412 don't. */
2413 if (dst_fid >= ipa_get_cs_argument_count (top))
2415 dst->type = IPA_JF_UNKNOWN;
2416 continue;
2419 src = ipa_get_ith_jump_func (top, dst_fid);
2421 if (src_ctx && !src_ctx->useless_p ())
2423 struct ipa_polymorphic_call_context ctx = *src_ctx;
2425 /* TODO: Make type preserved safe WRT contexts. */
2426 if (!ipa_get_jf_ancestor_type_preserved (dst))
2427 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2428 ctx.offset_by (dst->value.ancestor.offset);
2429 if (!ctx.useless_p ())
2431 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2432 count);
2433 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2435 dst_ctx->combine_with (ctx);
2438 if (src->agg.items
2439 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2441 struct ipa_agg_jf_item *item;
2442 int j;
2444 /* Currently we do not produce clobber aggregate jump functions,
2445 replace with merging when we do. */
2446 gcc_assert (!dst->agg.items);
2448 dst->agg.items = vec_safe_copy (src->agg.items);
2449 dst->agg.by_ref = src->agg.by_ref;
2450 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2451 item->offset -= dst->value.ancestor.offset;
2454 if (src->type == IPA_JF_PASS_THROUGH
2455 && src->value.pass_through.operation == NOP_EXPR)
2457 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2458 dst->value.ancestor.agg_preserved &=
2459 src->value.pass_through.agg_preserved;
2461 else if (src->type == IPA_JF_ANCESTOR)
2463 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2464 dst->value.ancestor.offset += src->value.ancestor.offset;
2465 dst->value.ancestor.agg_preserved &=
2466 src->value.ancestor.agg_preserved;
2468 else
2469 dst->type = IPA_JF_UNKNOWN;
2471 else if (dst->type == IPA_JF_PASS_THROUGH)
2473 struct ipa_jump_func *src;
2474 /* We must check range due to calls with variable number of arguments
2475 and we cannot combine jump functions with operations. */
2476 if (dst->value.pass_through.operation == NOP_EXPR
2477 && (dst->value.pass_through.formal_id
2478 < ipa_get_cs_argument_count (top)))
2480 int dst_fid = dst->value.pass_through.formal_id;
2481 src = ipa_get_ith_jump_func (top, dst_fid);
2482 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2483 struct ipa_polymorphic_call_context *src_ctx
2484 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2486 if (src_ctx && !src_ctx->useless_p ())
2488 struct ipa_polymorphic_call_context ctx = *src_ctx;
2490 /* TODO: Make type preserved safe WRT contexts. */
2491 if (!ipa_get_jf_pass_through_type_preserved (dst))
2492 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2493 if (!ctx.useless_p ())
2495 if (!dst_ctx)
2497 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2498 count);
2499 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2501 dst_ctx->combine_with (ctx);
2504 switch (src->type)
2506 case IPA_JF_UNKNOWN:
2507 dst->type = IPA_JF_UNKNOWN;
2508 break;
2509 case IPA_JF_CONST:
2510 ipa_set_jf_cst_copy (dst, src);
2511 break;
2513 case IPA_JF_PASS_THROUGH:
2515 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2516 enum tree_code operation;
2517 operation = ipa_get_jf_pass_through_operation (src);
2519 if (operation == NOP_EXPR)
2521 bool agg_p;
2522 agg_p = dst_agg_p
2523 && ipa_get_jf_pass_through_agg_preserved (src);
2524 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2526 else
2528 tree operand = ipa_get_jf_pass_through_operand (src);
2529 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2530 operation);
2532 break;
2534 case IPA_JF_ANCESTOR:
2536 bool agg_p;
2537 agg_p = dst_agg_p
2538 && ipa_get_jf_ancestor_agg_preserved (src);
2539 ipa_set_ancestor_jf (dst,
2540 ipa_get_jf_ancestor_offset (src),
2541 ipa_get_jf_ancestor_formal_id (src),
2542 agg_p);
2543 break;
2545 default:
2546 gcc_unreachable ();
2549 if (src->agg.items
2550 && (dst_agg_p || !src->agg.by_ref))
2552 /* Currently we do not produce clobber aggregate jump
2553 functions, replace with merging when we do. */
2554 gcc_assert (!dst->agg.items);
2556 dst->agg.by_ref = src->agg.by_ref;
2557 dst->agg.items = vec_safe_copy (src->agg.items);
2560 else
2561 dst->type = IPA_JF_UNKNOWN;
2566 /* If TARGET is an addr_expr of a function declaration, make it the
2567 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2568 Otherwise, return NULL. */
2570 struct cgraph_edge *
2571 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2572 bool speculative)
2574 struct cgraph_node *callee;
2575 struct inline_edge_summary *es = inline_edge_summary (ie);
2576 bool unreachable = false;
2578 if (TREE_CODE (target) == ADDR_EXPR)
2579 target = TREE_OPERAND (target, 0);
2580 if (TREE_CODE (target) != FUNCTION_DECL)
2582 target = canonicalize_constructor_val (target, NULL);
2583 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2585 if (ie->indirect_info->member_ptr)
2586 /* Member pointer call that goes through a VMT lookup. */
2587 return NULL;
2589 if (dump_enabled_p ())
2591 location_t loc = gimple_location_safe (ie->call_stmt);
2592 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2593 "discovered direct call to non-function in %s/%i, "
2594 "making it __builtin_unreachable\n",
2595 ie->caller->name (), ie->caller->order);
2598 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2599 callee = cgraph_node::get_create (target);
2600 unreachable = true;
2602 else
2603 callee = cgraph_node::get (target);
2605 else
2606 callee = cgraph_node::get (target);
2608 /* Because may-edges are not explicitely represented and vtable may be external,
2609 we may create the first reference to the object in the unit. */
2610 if (!callee || callee->global.inlined_to)
2613 /* We are better to ensure we can refer to it.
2614 In the case of static functions we are out of luck, since we already
2615 removed its body. In the case of public functions we may or may
2616 not introduce the reference. */
2617 if (!canonicalize_constructor_val (target, NULL)
2618 || !TREE_PUBLIC (target))
2620 if (dump_file)
2621 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2622 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2623 xstrdup (ie->caller->name ()),
2624 ie->caller->order,
2625 xstrdup (ie->callee->name ()),
2626 ie->callee->order);
2627 return NULL;
2629 callee = cgraph_node::get_create (target);
2632 /* If the edge is already speculated. */
2633 if (speculative && ie->speculative)
2635 struct cgraph_edge *e2;
2636 struct ipa_ref *ref;
2637 ie->speculative_call_info (e2, ie, ref);
2638 if (e2->callee->ultimate_alias_target ()
2639 != callee->ultimate_alias_target ())
2641 if (dump_file)
2642 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2643 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2644 xstrdup (ie->caller->name ()),
2645 ie->caller->order,
2646 xstrdup (callee->name ()),
2647 callee->order,
2648 xstrdup (e2->callee->name ()),
2649 e2->callee->order);
2651 else
2653 if (dump_file)
2654 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2655 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2656 xstrdup (ie->caller->name ()),
2657 ie->caller->order,
2658 xstrdup (callee->name ()),
2659 callee->order);
2661 return NULL;
2664 if (!dbg_cnt (devirt))
2665 return NULL;
2667 ipa_check_create_node_params ();
2669 /* We can not make edges to inline clones. It is bug that someone removed
2670 the cgraph node too early. */
2671 gcc_assert (!callee->global.inlined_to);
2673 if (dump_file && !unreachable)
2675 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2676 "(%s/%i -> %s/%i), for stmt ",
2677 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2678 speculative ? "speculative" : "known",
2679 xstrdup (ie->caller->name ()),
2680 ie->caller->order,
2681 xstrdup (callee->name ()),
2682 callee->order);
2683 if (ie->call_stmt)
2684 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2685 else
2686 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2688 if (dump_enabled_p ())
2690 location_t loc = gimple_location_safe (ie->call_stmt);
2692 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2693 "converting indirect call in %s to direct call to %s\n",
2694 ie->caller->name (), callee->name ());
2696 if (!speculative)
2697 ie = ie->make_direct (callee);
2698 else
2700 if (!callee->can_be_discarded_p ())
2702 cgraph_node *alias;
2703 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2704 if (alias)
2705 callee = alias;
2707 ie = ie->make_speculative
2708 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2710 es = inline_edge_summary (ie);
2711 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2712 - eni_size_weights.call_cost);
2713 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2714 - eni_time_weights.call_cost);
2716 return ie;
2719 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2720 return NULL if there is not any. BY_REF specifies whether the value has to
2721 be passed by reference or by value. */
2723 tree
2724 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2725 HOST_WIDE_INT offset, bool by_ref)
2727 struct ipa_agg_jf_item *item;
2728 int i;
2730 if (by_ref != agg->by_ref)
2731 return NULL;
2733 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2734 if (item->offset == offset)
2736 /* Currently we do not have clobber values, return NULL for them once
2737 we do. */
2738 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2739 return item->value;
2741 return NULL;
2744 /* Remove a reference to SYMBOL from the list of references of a node given by
2745 reference description RDESC. Return true if the reference has been
2746 successfully found and removed. */
2748 static bool
2749 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2751 struct ipa_ref *to_del;
2752 struct cgraph_edge *origin;
2754 origin = rdesc->cs;
2755 if (!origin)
2756 return false;
2757 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
2758 origin->lto_stmt_uid);
2759 if (!to_del)
2760 return false;
2762 to_del->remove_reference ();
2763 if (dump_file)
2764 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2765 xstrdup (origin->caller->name ()),
2766 origin->caller->order, xstrdup (symbol->name ()));
2767 return true;
2770 /* If JFUNC has a reference description with refcount different from
2771 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2772 NULL. JFUNC must be a constant jump function. */
2774 static struct ipa_cst_ref_desc *
2775 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2777 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2778 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2779 return rdesc;
2780 else
2781 return NULL;
2784 /* If the value of constant jump function JFUNC is an address of a function
2785 declaration, return the associated call graph node. Otherwise return
2786 NULL. */
2788 static cgraph_node *
2789 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2791 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2792 tree cst = ipa_get_jf_constant (jfunc);
2793 if (TREE_CODE (cst) != ADDR_EXPR
2794 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2795 return NULL;
2797 return cgraph_node::get (TREE_OPERAND (cst, 0));
2801 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2802 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2803 the edge specified in the rdesc. Return false if either the symbol or the
2804 reference could not be found, otherwise return true. */
2806 static bool
2807 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2809 struct ipa_cst_ref_desc *rdesc;
2810 if (jfunc->type == IPA_JF_CONST
2811 && (rdesc = jfunc_rdesc_usable (jfunc))
2812 && --rdesc->refcount == 0)
2814 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
2815 if (!symbol)
2816 return false;
2818 return remove_described_reference (symbol, rdesc);
2820 return true;
2823 /* Try to find a destination for indirect edge IE that corresponds to a simple
2824 call or a call of a member function pointer and where the destination is a
2825 pointer formal parameter described by jump function JFUNC. If it can be
2826 determined, return the newly direct edge, otherwise return NULL.
2827 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2829 static struct cgraph_edge *
2830 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2831 struct ipa_jump_func *jfunc,
2832 struct ipa_node_params *new_root_info)
2834 struct cgraph_edge *cs;
2835 tree target;
2836 bool agg_contents = ie->indirect_info->agg_contents;
2838 if (ie->indirect_info->agg_contents)
2839 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2840 ie->indirect_info->offset,
2841 ie->indirect_info->by_ref);
2842 else
2843 target = ipa_value_from_jfunc (new_root_info, jfunc);
2844 if (!target)
2845 return NULL;
2846 cs = ipa_make_edge_direct_to_target (ie, target);
2848 if (cs && !agg_contents)
2850 bool ok;
2851 gcc_checking_assert (cs->callee
2852 && (cs != ie
2853 || jfunc->type != IPA_JF_CONST
2854 || !cgraph_node_for_jfunc (jfunc)
2855 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2856 ok = try_decrement_rdesc_refcount (jfunc);
2857 gcc_checking_assert (ok);
2860 return cs;
2863 /* Return the target to be used in cases of impossible devirtualization. IE
2864 and target (the latter can be NULL) are dumped when dumping is enabled. */
2866 tree
2867 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
2869 if (dump_file)
2871 if (target)
2872 fprintf (dump_file,
2873 "Type inconsistent devirtualization: %s/%i->%s\n",
2874 ie->caller->name (), ie->caller->order,
2875 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
2876 else
2877 fprintf (dump_file,
2878 "No devirtualization target in %s/%i\n",
2879 ie->caller->name (), ie->caller->order);
2881 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2882 cgraph_node::get_create (new_target);
2883 return new_target;
2886 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2887 call based on a formal parameter which is described by jump function JFUNC
2888 and if it can be determined, make it direct and return the direct edge.
2889 Otherwise, return NULL. CTX describes the polymorphic context that the
2890 parameter the call is based on brings along with it. */
2892 static struct cgraph_edge *
2893 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2894 struct ipa_jump_func *jfunc,
2895 struct ipa_polymorphic_call_context ctx)
2897 tree target = NULL;
2898 bool speculative = false;
2900 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
2901 return NULL;
2903 gcc_assert (!ie->indirect_info->by_ref);
2905 /* Try to do lookup via known virtual table pointer value. */
2906 if (!ie->indirect_info->vptr_changed
2907 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
2909 tree vtable;
2910 unsigned HOST_WIDE_INT offset;
2911 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
2912 ie->indirect_info->offset,
2913 true);
2914 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
2916 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2917 vtable, offset);
2918 if (t)
2920 if ((TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
2921 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
2922 || !possible_polymorphic_call_target_p
2923 (ie, cgraph_node::get (t)))
2925 /* Do not speculate builtin_unreachable, it is stpid! */
2926 if (!ie->indirect_info->vptr_changed)
2927 target = ipa_impossible_devirt_target (ie, target);
2929 else
2931 target = t;
2932 speculative = ie->indirect_info->vptr_changed;
2938 ipa_polymorphic_call_context ie_context (ie);
2939 vec <cgraph_node *>targets;
2940 bool final;
2942 ctx.offset_by (ie->indirect_info->offset);
2943 if (ie->indirect_info->vptr_changed)
2944 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
2945 ie->indirect_info->otr_type);
2946 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
2947 targets = possible_polymorphic_call_targets
2948 (ie->indirect_info->otr_type,
2949 ie->indirect_info->otr_token,
2950 ctx, &final);
2951 if (final && targets.length () <= 1)
2953 if (targets.length () == 1)
2954 target = targets[0]->decl;
2955 else
2956 target = ipa_impossible_devirt_target (ie, NULL_TREE);
2958 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
2959 && !ie->speculative && ie->maybe_hot_p ())
2961 cgraph_node *n;
2962 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
2963 ie->indirect_info->otr_token,
2964 ie->indirect_info->context);
2965 if (n)
2967 target = n->decl;
2968 speculative = true;
2972 if (target)
2974 if (!possible_polymorphic_call_target_p
2975 (ie, cgraph_node::get_create (target)))
2977 if (speculative)
2978 return NULL;
2979 target = ipa_impossible_devirt_target (ie, target);
2981 return ipa_make_edge_direct_to_target (ie, target, speculative);
2983 else
2984 return NULL;
2987 /* Update the param called notes associated with NODE when CS is being inlined,
2988 assuming NODE is (potentially indirectly) inlined into CS->callee.
2989 Moreover, if the callee is discovered to be constant, create a new cgraph
2990 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
2991 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
2993 static bool
2994 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2995 struct cgraph_node *node,
2996 vec<cgraph_edge *> *new_edges)
2998 struct ipa_edge_args *top;
2999 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3000 struct ipa_node_params *new_root_info;
3001 bool res = false;
3003 ipa_check_create_edge_args ();
3004 top = IPA_EDGE_REF (cs);
3005 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3006 ? cs->caller->global.inlined_to
3007 : cs->caller);
3009 for (ie = node->indirect_calls; ie; ie = next_ie)
3011 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3012 struct ipa_jump_func *jfunc;
3013 int param_index;
3015 next_ie = ie->next_callee;
3017 if (ici->param_index == -1)
3018 continue;
3020 /* We must check range due to calls with variable number of arguments: */
3021 if (ici->param_index >= ipa_get_cs_argument_count (top))
3023 ici->param_index = -1;
3024 continue;
3027 param_index = ici->param_index;
3028 jfunc = ipa_get_ith_jump_func (top, param_index);
3030 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3031 new_direct_edge = NULL;
3032 else if (ici->polymorphic)
3034 ipa_polymorphic_call_context ctx;
3035 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3036 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3038 else
3039 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3040 new_root_info);
3041 /* If speculation was removed, then we need to do nothing. */
3042 if (new_direct_edge && new_direct_edge != ie)
3044 new_direct_edge->indirect_inlining_edge = 1;
3045 top = IPA_EDGE_REF (cs);
3046 res = true;
3048 else if (new_direct_edge)
3050 new_direct_edge->indirect_inlining_edge = 1;
3051 if (new_direct_edge->call_stmt)
3052 new_direct_edge->call_stmt_cannot_inline_p
3053 = !gimple_check_call_matching_types (
3054 new_direct_edge->call_stmt,
3055 new_direct_edge->callee->decl, false);
3056 if (new_edges)
3058 new_edges->safe_push (new_direct_edge);
3059 res = true;
3061 top = IPA_EDGE_REF (cs);
3063 else if (jfunc->type == IPA_JF_PASS_THROUGH
3064 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3066 if ((ici->agg_contents
3067 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
3068 || (ici->polymorphic
3069 && !ipa_get_jf_pass_through_type_preserved (jfunc)))
3070 ici->param_index = -1;
3071 else
3072 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3074 else if (jfunc->type == IPA_JF_ANCESTOR)
3076 if ((ici->agg_contents
3077 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
3078 || (ici->polymorphic
3079 && !ipa_get_jf_ancestor_type_preserved (jfunc)))
3080 ici->param_index = -1;
3081 else
3083 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3084 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3087 else
3088 /* Either we can find a destination for this edge now or never. */
3089 ici->param_index = -1;
3092 return res;
3095 /* Recursively traverse subtree of NODE (including node) made of inlined
3096 cgraph_edges when CS has been inlined and invoke
3097 update_indirect_edges_after_inlining on all nodes and
3098 update_jump_functions_after_inlining on all non-inlined edges that lead out
3099 of this subtree. Newly discovered indirect edges will be added to
3100 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3101 created. */
3103 static bool
3104 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3105 struct cgraph_node *node,
3106 vec<cgraph_edge *> *new_edges)
3108 struct cgraph_edge *e;
3109 bool res;
3111 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3113 for (e = node->callees; e; e = e->next_callee)
3114 if (!e->inline_failed)
3115 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3116 else
3117 update_jump_functions_after_inlining (cs, e);
3118 for (e = node->indirect_calls; e; e = e->next_callee)
3119 update_jump_functions_after_inlining (cs, e);
3121 return res;
3124 /* Combine two controlled uses counts as done during inlining. */
3126 static int
3127 combine_controlled_uses_counters (int c, int d)
3129 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3130 return IPA_UNDESCRIBED_USE;
3131 else
3132 return c + d - 1;
3135 /* Propagate number of controlled users from CS->caleee to the new root of the
3136 tree of inlined nodes. */
3138 static void
3139 propagate_controlled_uses (struct cgraph_edge *cs)
3141 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3142 struct cgraph_node *new_root = cs->caller->global.inlined_to
3143 ? cs->caller->global.inlined_to : cs->caller;
3144 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3145 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3146 int count, i;
3148 count = MIN (ipa_get_cs_argument_count (args),
3149 ipa_get_param_count (old_root_info));
3150 for (i = 0; i < count; i++)
3152 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3153 struct ipa_cst_ref_desc *rdesc;
3155 if (jf->type == IPA_JF_PASS_THROUGH)
3157 int src_idx, c, d;
3158 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3159 c = ipa_get_controlled_uses (new_root_info, src_idx);
3160 d = ipa_get_controlled_uses (old_root_info, i);
3162 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3163 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3164 c = combine_controlled_uses_counters (c, d);
3165 ipa_set_controlled_uses (new_root_info, src_idx, c);
3166 if (c == 0 && new_root_info->ipcp_orig_node)
3168 struct cgraph_node *n;
3169 struct ipa_ref *ref;
3170 tree t = new_root_info->known_csts[src_idx];
3172 if (t && TREE_CODE (t) == ADDR_EXPR
3173 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3174 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3175 && (ref = new_root->find_reference (n, NULL, 0)))
3177 if (dump_file)
3178 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3179 "reference from %s/%i to %s/%i.\n",
3180 xstrdup (new_root->name ()),
3181 new_root->order,
3182 xstrdup (n->name ()), n->order);
3183 ref->remove_reference ();
3187 else if (jf->type == IPA_JF_CONST
3188 && (rdesc = jfunc_rdesc_usable (jf)))
3190 int d = ipa_get_controlled_uses (old_root_info, i);
3191 int c = rdesc->refcount;
3192 rdesc->refcount = combine_controlled_uses_counters (c, d);
3193 if (rdesc->refcount == 0)
3195 tree cst = ipa_get_jf_constant (jf);
3196 struct cgraph_node *n;
3197 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3198 && TREE_CODE (TREE_OPERAND (cst, 0))
3199 == FUNCTION_DECL);
3200 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3201 if (n)
3203 struct cgraph_node *clone;
3204 bool ok;
3205 ok = remove_described_reference (n, rdesc);
3206 gcc_checking_assert (ok);
3208 clone = cs->caller;
3209 while (clone->global.inlined_to
3210 && clone != rdesc->cs->caller
3211 && IPA_NODE_REF (clone)->ipcp_orig_node)
3213 struct ipa_ref *ref;
3214 ref = clone->find_reference (n, NULL, 0);
3215 if (ref)
3217 if (dump_file)
3218 fprintf (dump_file, "ipa-prop: Removing "
3219 "cloning-created reference "
3220 "from %s/%i to %s/%i.\n",
3221 xstrdup (clone->name ()),
3222 clone->order,
3223 xstrdup (n->name ()),
3224 n->order);
3225 ref->remove_reference ();
3227 clone = clone->callers->caller;
3234 for (i = ipa_get_param_count (old_root_info);
3235 i < ipa_get_cs_argument_count (args);
3236 i++)
3238 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3240 if (jf->type == IPA_JF_CONST)
3242 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3243 if (rdesc)
3244 rdesc->refcount = IPA_UNDESCRIBED_USE;
3246 else if (jf->type == IPA_JF_PASS_THROUGH)
3247 ipa_set_controlled_uses (new_root_info,
3248 jf->value.pass_through.formal_id,
3249 IPA_UNDESCRIBED_USE);
3253 /* Update jump functions and call note functions on inlining the call site CS.
3254 CS is expected to lead to a node already cloned by
3255 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3256 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3257 created. */
3259 bool
3260 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3261 vec<cgraph_edge *> *new_edges)
3263 bool changed;
3264 /* Do nothing if the preparation phase has not been carried out yet
3265 (i.e. during early inlining). */
3266 if (!ipa_node_params_vector.exists ())
3267 return false;
3268 gcc_assert (ipa_edge_args_vector);
3270 propagate_controlled_uses (cs);
3271 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3273 return changed;
3276 /* Frees all dynamically allocated structures that the argument info points
3277 to. */
3279 void
3280 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3282 vec_free (args->jump_functions);
3283 memset (args, 0, sizeof (*args));
3286 /* Free all ipa_edge structures. */
3288 void
3289 ipa_free_all_edge_args (void)
3291 int i;
3292 struct ipa_edge_args *args;
3294 if (!ipa_edge_args_vector)
3295 return;
3297 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3298 ipa_free_edge_args_substructures (args);
3300 vec_free (ipa_edge_args_vector);
3303 /* Frees all dynamically allocated structures that the param info points
3304 to. */
3306 void
3307 ipa_free_node_params_substructures (struct ipa_node_params *info)
3309 info->descriptors.release ();
3310 free (info->lattices);
3311 /* Lattice values and their sources are deallocated with their alocation
3312 pool. */
3313 info->known_csts.release ();
3314 info->known_contexts.release ();
3315 memset (info, 0, sizeof (*info));
3318 /* Free all ipa_node_params structures. */
3320 void
3321 ipa_free_all_node_params (void)
3323 int i;
3324 struct ipa_node_params *info;
3326 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
3327 ipa_free_node_params_substructures (info);
3329 ipa_node_params_vector.release ();
3332 /* Set the aggregate replacements of NODE to be AGGVALS. */
3334 void
3335 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3336 struct ipa_agg_replacement_value *aggvals)
3338 if (vec_safe_length (ipa_node_agg_replacements)
3339 <= (unsigned) symtab->cgraph_max_uid)
3340 vec_safe_grow_cleared (ipa_node_agg_replacements,
3341 symtab->cgraph_max_uid + 1);
3343 (*ipa_node_agg_replacements)[node->uid] = aggvals;
3346 /* Hook that is called by cgraph.c when an edge is removed. */
3348 static void
3349 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3351 struct ipa_edge_args *args;
3353 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3354 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3355 return;
3357 args = IPA_EDGE_REF (cs);
3358 if (args->jump_functions)
3360 struct ipa_jump_func *jf;
3361 int i;
3362 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3364 struct ipa_cst_ref_desc *rdesc;
3365 try_decrement_rdesc_refcount (jf);
3366 if (jf->type == IPA_JF_CONST
3367 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3368 && rdesc->cs == cs)
3369 rdesc->cs = NULL;
3373 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3376 /* Hook that is called by cgraph.c when a node is removed. */
3378 static void
3379 ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3381 /* During IPA-CP updating we can be called on not-yet analyze clones. */
3382 if (ipa_node_params_vector.length () > (unsigned)node->uid)
3383 ipa_free_node_params_substructures (IPA_NODE_REF (node));
3384 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3385 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
3388 /* Hook that is called by cgraph.c when an edge is duplicated. */
3390 static void
3391 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3392 __attribute__((unused)) void *data)
3394 struct ipa_edge_args *old_args, *new_args;
3395 unsigned int i;
3397 ipa_check_create_edge_args ();
3399 old_args = IPA_EDGE_REF (src);
3400 new_args = IPA_EDGE_REF (dst);
3402 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3403 if (old_args->polymorphic_call_contexts)
3404 new_args->polymorphic_call_contexts
3405 = vec_safe_copy (old_args->polymorphic_call_contexts);
3407 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3409 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3410 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3412 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3414 if (src_jf->type == IPA_JF_CONST)
3416 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3418 if (!src_rdesc)
3419 dst_jf->value.constant.rdesc = NULL;
3420 else if (src->caller == dst->caller)
3422 struct ipa_ref *ref;
3423 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3424 gcc_checking_assert (n);
3425 ref = src->caller->find_reference (n, src->call_stmt,
3426 src->lto_stmt_uid);
3427 gcc_checking_assert (ref);
3428 dst->caller->clone_reference (ref, ref->stmt);
3430 gcc_checking_assert (ipa_refdesc_pool);
3431 struct ipa_cst_ref_desc *dst_rdesc
3432 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3433 dst_rdesc->cs = dst;
3434 dst_rdesc->refcount = src_rdesc->refcount;
3435 dst_rdesc->next_duplicate = NULL;
3436 dst_jf->value.constant.rdesc = dst_rdesc;
3438 else if (src_rdesc->cs == src)
3440 struct ipa_cst_ref_desc *dst_rdesc;
3441 gcc_checking_assert (ipa_refdesc_pool);
3442 dst_rdesc
3443 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3444 dst_rdesc->cs = dst;
3445 dst_rdesc->refcount = src_rdesc->refcount;
3446 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3447 src_rdesc->next_duplicate = dst_rdesc;
3448 dst_jf->value.constant.rdesc = dst_rdesc;
3450 else
3452 struct ipa_cst_ref_desc *dst_rdesc;
3453 /* This can happen during inlining, when a JFUNC can refer to a
3454 reference taken in a function up in the tree of inline clones.
3455 We need to find the duplicate that refers to our tree of
3456 inline clones. */
3458 gcc_assert (dst->caller->global.inlined_to);
3459 for (dst_rdesc = src_rdesc->next_duplicate;
3460 dst_rdesc;
3461 dst_rdesc = dst_rdesc->next_duplicate)
3463 struct cgraph_node *top;
3464 top = dst_rdesc->cs->caller->global.inlined_to
3465 ? dst_rdesc->cs->caller->global.inlined_to
3466 : dst_rdesc->cs->caller;
3467 if (dst->caller->global.inlined_to == top)
3468 break;
3470 gcc_assert (dst_rdesc);
3471 dst_jf->value.constant.rdesc = dst_rdesc;
3474 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3475 && src->caller == dst->caller)
3477 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3478 ? dst->caller->global.inlined_to : dst->caller;
3479 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3480 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3482 int c = ipa_get_controlled_uses (root_info, idx);
3483 if (c != IPA_UNDESCRIBED_USE)
3485 c++;
3486 ipa_set_controlled_uses (root_info, idx, c);
3492 /* Hook that is called by cgraph.c when a node is duplicated. */
3494 static void
3495 ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
3496 ATTRIBUTE_UNUSED void *data)
3498 struct ipa_node_params *old_info, *new_info;
3499 struct ipa_agg_replacement_value *old_av, *new_av;
3501 ipa_check_create_node_params ();
3502 old_info = IPA_NODE_REF (src);
3503 new_info = IPA_NODE_REF (dst);
3505 new_info->descriptors = old_info->descriptors.copy ();
3506 new_info->lattices = NULL;
3507 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3509 new_info->analysis_done = old_info->analysis_done;
3510 new_info->node_enqueued = old_info->node_enqueued;
3512 old_av = ipa_get_agg_replacements_for_node (src);
3513 if (!old_av)
3514 return;
3516 new_av = NULL;
3517 while (old_av)
3519 struct ipa_agg_replacement_value *v;
3521 v = ggc_alloc<ipa_agg_replacement_value> ();
3522 memcpy (v, old_av, sizeof (*v));
3523 v->next = new_av;
3524 new_av = v;
3525 old_av = old_av->next;
3527 ipa_set_node_agg_value_chain (dst, new_av);
3531 /* Analyze newly added function into callgraph. */
3533 static void
3534 ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3536 if (node->has_gimple_body_p ())
3537 ipa_analyze_node (node);
3540 /* Register our cgraph hooks if they are not already there. */
3542 void
3543 ipa_register_cgraph_hooks (void)
3545 if (!edge_removal_hook_holder)
3546 edge_removal_hook_holder =
3547 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3548 if (!node_removal_hook_holder)
3549 node_removal_hook_holder =
3550 symtab->add_cgraph_removal_hook (&ipa_node_removal_hook, NULL);
3551 if (!edge_duplication_hook_holder)
3552 edge_duplication_hook_holder =
3553 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3554 if (!node_duplication_hook_holder)
3555 node_duplication_hook_holder =
3556 symtab->add_cgraph_duplication_hook (&ipa_node_duplication_hook, NULL);
3557 function_insertion_hook_holder =
3558 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3561 /* Unregister our cgraph hooks if they are not already there. */
3563 static void
3564 ipa_unregister_cgraph_hooks (void)
3566 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
3567 edge_removal_hook_holder = NULL;
3568 symtab->remove_cgraph_removal_hook (node_removal_hook_holder);
3569 node_removal_hook_holder = NULL;
3570 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
3571 edge_duplication_hook_holder = NULL;
3572 symtab->remove_cgraph_duplication_hook (node_duplication_hook_holder);
3573 node_duplication_hook_holder = NULL;
3574 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3575 function_insertion_hook_holder = NULL;
3578 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3579 longer needed after ipa-cp. */
3581 void
3582 ipa_free_all_structures_after_ipa_cp (void)
3584 if (!optimize && !in_lto_p)
3586 ipa_free_all_edge_args ();
3587 ipa_free_all_node_params ();
3588 free_alloc_pool (ipcp_sources_pool);
3589 free_alloc_pool (ipcp_cst_values_pool);
3590 free_alloc_pool (ipcp_poly_ctx_values_pool);
3591 free_alloc_pool (ipcp_agg_lattice_pool);
3592 ipa_unregister_cgraph_hooks ();
3593 if (ipa_refdesc_pool)
3594 free_alloc_pool (ipa_refdesc_pool);
3598 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3599 longer needed after indirect inlining. */
3601 void
3602 ipa_free_all_structures_after_iinln (void)
3604 ipa_free_all_edge_args ();
3605 ipa_free_all_node_params ();
3606 ipa_unregister_cgraph_hooks ();
3607 if (ipcp_sources_pool)
3608 free_alloc_pool (ipcp_sources_pool);
3609 if (ipcp_cst_values_pool)
3610 free_alloc_pool (ipcp_cst_values_pool);
3611 if (ipcp_poly_ctx_values_pool)
3612 free_alloc_pool (ipcp_poly_ctx_values_pool);
3613 if (ipcp_agg_lattice_pool)
3614 free_alloc_pool (ipcp_agg_lattice_pool);
3615 if (ipa_refdesc_pool)
3616 free_alloc_pool (ipa_refdesc_pool);
3619 /* Print ipa_tree_map data structures of all functions in the
3620 callgraph to F. */
3622 void
3623 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3625 int i, count;
3626 struct ipa_node_params *info;
3628 if (!node->definition)
3629 return;
3630 info = IPA_NODE_REF (node);
3631 fprintf (f, " function %s/%i parameter descriptors:\n",
3632 node->name (), node->order);
3633 count = ipa_get_param_count (info);
3634 for (i = 0; i < count; i++)
3636 int c;
3638 fprintf (f, " ");
3639 ipa_dump_param (f, info, i);
3640 if (ipa_is_param_used (info, i))
3641 fprintf (f, " used");
3642 c = ipa_get_controlled_uses (info, i);
3643 if (c == IPA_UNDESCRIBED_USE)
3644 fprintf (f, " undescribed_use");
3645 else
3646 fprintf (f, " controlled_uses=%i", c);
3647 fprintf (f, "\n");
3651 /* Print ipa_tree_map data structures of all functions in the
3652 callgraph to F. */
3654 void
3655 ipa_print_all_params (FILE * f)
3657 struct cgraph_node *node;
3659 fprintf (f, "\nFunction parameters:\n");
3660 FOR_EACH_FUNCTION (node)
3661 ipa_print_node_params (f, node);
3664 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3666 vec<tree>
3667 ipa_get_vector_of_formal_parms (tree fndecl)
3669 vec<tree> args;
3670 int count;
3671 tree parm;
3673 gcc_assert (!flag_wpa);
3674 count = count_formal_params (fndecl);
3675 args.create (count);
3676 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3677 args.quick_push (parm);
3679 return args;
3682 /* Return a heap allocated vector containing types of formal parameters of
3683 function type FNTYPE. */
3685 vec<tree>
3686 ipa_get_vector_of_formal_parm_types (tree fntype)
3688 vec<tree> types;
3689 int count = 0;
3690 tree t;
3692 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3693 count++;
3695 types.create (count);
3696 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3697 types.quick_push (TREE_VALUE (t));
3699 return types;
3702 /* Modify the function declaration FNDECL and its type according to the plan in
3703 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3704 to reflect the actual parameters being modified which are determined by the
3705 base_index field. */
3707 void
3708 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3710 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3711 tree orig_type = TREE_TYPE (fndecl);
3712 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3714 /* The following test is an ugly hack, some functions simply don't have any
3715 arguments in their type. This is probably a bug but well... */
3716 bool care_for_types = (old_arg_types != NULL_TREE);
3717 bool last_parm_void;
3718 vec<tree> otypes;
3719 if (care_for_types)
3721 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3722 == void_type_node);
3723 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3724 if (last_parm_void)
3725 gcc_assert (oparms.length () + 1 == otypes.length ());
3726 else
3727 gcc_assert (oparms.length () == otypes.length ());
3729 else
3731 last_parm_void = false;
3732 otypes.create (0);
3735 int len = adjustments.length ();
3736 tree *link = &DECL_ARGUMENTS (fndecl);
3737 tree new_arg_types = NULL;
3738 for (int i = 0; i < len; i++)
3740 struct ipa_parm_adjustment *adj;
3741 gcc_assert (link);
3743 adj = &adjustments[i];
3744 tree parm;
3745 if (adj->op == IPA_PARM_OP_NEW)
3746 parm = NULL;
3747 else
3748 parm = oparms[adj->base_index];
3749 adj->base = parm;
3751 if (adj->op == IPA_PARM_OP_COPY)
3753 if (care_for_types)
3754 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3755 new_arg_types);
3756 *link = parm;
3757 link = &DECL_CHAIN (parm);
3759 else if (adj->op != IPA_PARM_OP_REMOVE)
3761 tree new_parm;
3762 tree ptype;
3764 if (adj->by_ref)
3765 ptype = build_pointer_type (adj->type);
3766 else
3768 ptype = adj->type;
3769 if (is_gimple_reg_type (ptype))
3771 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3772 if (TYPE_ALIGN (ptype) < malign)
3773 ptype = build_aligned_type (ptype, malign);
3777 if (care_for_types)
3778 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3780 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3781 ptype);
3782 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3783 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3784 DECL_ARTIFICIAL (new_parm) = 1;
3785 DECL_ARG_TYPE (new_parm) = ptype;
3786 DECL_CONTEXT (new_parm) = fndecl;
3787 TREE_USED (new_parm) = 1;
3788 DECL_IGNORED_P (new_parm) = 1;
3789 layout_decl (new_parm, 0);
3791 if (adj->op == IPA_PARM_OP_NEW)
3792 adj->base = NULL;
3793 else
3794 adj->base = parm;
3795 adj->new_decl = new_parm;
3797 *link = new_parm;
3798 link = &DECL_CHAIN (new_parm);
3802 *link = NULL_TREE;
3804 tree new_reversed = NULL;
3805 if (care_for_types)
3807 new_reversed = nreverse (new_arg_types);
3808 if (last_parm_void)
3810 if (new_reversed)
3811 TREE_CHAIN (new_arg_types) = void_list_node;
3812 else
3813 new_reversed = void_list_node;
3817 /* Use copy_node to preserve as much as possible from original type
3818 (debug info, attribute lists etc.)
3819 Exception is METHOD_TYPEs must have THIS argument.
3820 When we are asked to remove it, we need to build new FUNCTION_TYPE
3821 instead. */
3822 tree new_type = NULL;
3823 if (TREE_CODE (orig_type) != METHOD_TYPE
3824 || (adjustments[0].op == IPA_PARM_OP_COPY
3825 && adjustments[0].base_index == 0))
3827 new_type = build_distinct_type_copy (orig_type);
3828 TYPE_ARG_TYPES (new_type) = new_reversed;
3830 else
3832 new_type
3833 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3834 new_reversed));
3835 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3836 DECL_VINDEX (fndecl) = NULL_TREE;
3839 /* When signature changes, we need to clear builtin info. */
3840 if (DECL_BUILT_IN (fndecl))
3842 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3843 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3846 TREE_TYPE (fndecl) = new_type;
3847 DECL_VIRTUAL_P (fndecl) = 0;
3848 DECL_LANG_SPECIFIC (fndecl) = NULL;
3849 otypes.release ();
3850 oparms.release ();
3853 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3854 If this is a directly recursive call, CS must be NULL. Otherwise it must
3855 contain the corresponding call graph edge. */
3857 void
3858 ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt,
3859 ipa_parm_adjustment_vec adjustments)
3861 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
3862 vec<tree> vargs;
3863 vec<tree, va_gc> **debug_args = NULL;
3864 gcall *new_stmt;
3865 gimple_stmt_iterator gsi, prev_gsi;
3866 tree callee_decl;
3867 int i, len;
3869 len = adjustments.length ();
3870 vargs.create (len);
3871 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3872 current_node->remove_stmt_references (stmt);
3874 gsi = gsi_for_stmt (stmt);
3875 prev_gsi = gsi;
3876 gsi_prev (&prev_gsi);
3877 for (i = 0; i < len; i++)
3879 struct ipa_parm_adjustment *adj;
3881 adj = &adjustments[i];
3883 if (adj->op == IPA_PARM_OP_COPY)
3885 tree arg = gimple_call_arg (stmt, adj->base_index);
3887 vargs.quick_push (arg);
3889 else if (adj->op != IPA_PARM_OP_REMOVE)
3891 tree expr, base, off;
3892 location_t loc;
3893 unsigned int deref_align = 0;
3894 bool deref_base = false;
3896 /* We create a new parameter out of the value of the old one, we can
3897 do the following kind of transformations:
3899 - A scalar passed by reference is converted to a scalar passed by
3900 value. (adj->by_ref is false and the type of the original
3901 actual argument is a pointer to a scalar).
3903 - A part of an aggregate is passed instead of the whole aggregate.
3904 The part can be passed either by value or by reference, this is
3905 determined by value of adj->by_ref. Moreover, the code below
3906 handles both situations when the original aggregate is passed by
3907 value (its type is not a pointer) and when it is passed by
3908 reference (it is a pointer to an aggregate).
3910 When the new argument is passed by reference (adj->by_ref is true)
3911 it must be a part of an aggregate and therefore we form it by
3912 simply taking the address of a reference inside the original
3913 aggregate. */
3915 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3916 base = gimple_call_arg (stmt, adj->base_index);
3917 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3918 : EXPR_LOCATION (base);
3920 if (TREE_CODE (base) != ADDR_EXPR
3921 && POINTER_TYPE_P (TREE_TYPE (base)))
3922 off = build_int_cst (adj->alias_ptr_type,
3923 adj->offset / BITS_PER_UNIT);
3924 else
3926 HOST_WIDE_INT base_offset;
3927 tree prev_base;
3928 bool addrof;
3930 if (TREE_CODE (base) == ADDR_EXPR)
3932 base = TREE_OPERAND (base, 0);
3933 addrof = true;
3935 else
3936 addrof = false;
3937 prev_base = base;
3938 base = get_addr_base_and_unit_offset (base, &base_offset);
3939 /* Aggregate arguments can have non-invariant addresses. */
3940 if (!base)
3942 base = build_fold_addr_expr (prev_base);
3943 off = build_int_cst (adj->alias_ptr_type,
3944 adj->offset / BITS_PER_UNIT);
3946 else if (TREE_CODE (base) == MEM_REF)
3948 if (!addrof)
3950 deref_base = true;
3951 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3953 off = build_int_cst (adj->alias_ptr_type,
3954 base_offset
3955 + adj->offset / BITS_PER_UNIT);
3956 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
3957 off);
3958 base = TREE_OPERAND (base, 0);
3960 else
3962 off = build_int_cst (adj->alias_ptr_type,
3963 base_offset
3964 + adj->offset / BITS_PER_UNIT);
3965 base = build_fold_addr_expr (base);
3969 if (!adj->by_ref)
3971 tree type = adj->type;
3972 unsigned int align;
3973 unsigned HOST_WIDE_INT misalign;
3975 if (deref_base)
3977 align = deref_align;
3978 misalign = 0;
3980 else
3982 get_pointer_alignment_1 (base, &align, &misalign);
3983 if (TYPE_ALIGN (type) > align)
3984 align = TYPE_ALIGN (type);
3986 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
3987 * BITS_PER_UNIT);
3988 misalign = misalign & (align - 1);
3989 if (misalign != 0)
3990 align = (misalign & -misalign);
3991 if (align < TYPE_ALIGN (type))
3992 type = build_aligned_type (type, align);
3993 base = force_gimple_operand_gsi (&gsi, base,
3994 true, NULL, true, GSI_SAME_STMT);
3995 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
3996 /* If expr is not a valid gimple call argument emit
3997 a load into a temporary. */
3998 if (is_gimple_reg_type (TREE_TYPE (expr)))
4000 gimple tem = gimple_build_assign (NULL_TREE, expr);
4001 if (gimple_in_ssa_p (cfun))
4003 gimple_set_vuse (tem, gimple_vuse (stmt));
4004 expr = make_ssa_name (TREE_TYPE (expr), tem);
4006 else
4007 expr = create_tmp_reg (TREE_TYPE (expr), NULL);
4008 gimple_assign_set_lhs (tem, expr);
4009 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4012 else
4014 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4015 expr = build_fold_addr_expr (expr);
4016 expr = force_gimple_operand_gsi (&gsi, expr,
4017 true, NULL, true, GSI_SAME_STMT);
4019 vargs.quick_push (expr);
4021 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4023 unsigned int ix;
4024 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4025 gimple def_temp;
4027 arg = gimple_call_arg (stmt, adj->base_index);
4028 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4030 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4031 continue;
4032 arg = fold_convert_loc (gimple_location (stmt),
4033 TREE_TYPE (origin), arg);
4035 if (debug_args == NULL)
4036 debug_args = decl_debug_args_insert (callee_decl);
4037 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4038 if (ddecl == origin)
4040 ddecl = (**debug_args)[ix + 1];
4041 break;
4043 if (ddecl == NULL)
4045 ddecl = make_node (DEBUG_EXPR_DECL);
4046 DECL_ARTIFICIAL (ddecl) = 1;
4047 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4048 DECL_MODE (ddecl) = DECL_MODE (origin);
4050 vec_safe_push (*debug_args, origin);
4051 vec_safe_push (*debug_args, ddecl);
4053 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4054 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4058 if (dump_file && (dump_flags & TDF_DETAILS))
4060 fprintf (dump_file, "replacing stmt:");
4061 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4064 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4065 vargs.release ();
4066 if (gimple_call_lhs (stmt))
4067 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4069 gimple_set_block (new_stmt, gimple_block (stmt));
4070 if (gimple_has_location (stmt))
4071 gimple_set_location (new_stmt, gimple_location (stmt));
4072 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4073 gimple_call_copy_flags (new_stmt, stmt);
4074 if (gimple_in_ssa_p (cfun))
4076 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4077 if (gimple_vdef (stmt))
4079 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4080 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4084 if (dump_file && (dump_flags & TDF_DETAILS))
4086 fprintf (dump_file, "with stmt:");
4087 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4088 fprintf (dump_file, "\n");
4090 gsi_replace (&gsi, new_stmt, true);
4091 if (cs)
4092 cs->set_call_stmt (new_stmt);
4095 current_node->record_stmt_references (gsi_stmt (gsi));
4096 gsi_prev (&gsi);
4098 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4101 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4102 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4103 specifies whether the function should care about type incompatibility the
4104 current and new expressions. If it is false, the function will leave
4105 incompatibility issues to the caller. Return true iff the expression
4106 was modified. */
4108 bool
4109 ipa_modify_expr (tree *expr, bool convert,
4110 ipa_parm_adjustment_vec adjustments)
4112 struct ipa_parm_adjustment *cand
4113 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4114 if (!cand)
4115 return false;
4117 tree src;
4118 if (cand->by_ref)
4119 src = build_simple_mem_ref (cand->new_decl);
4120 else
4121 src = cand->new_decl;
4123 if (dump_file && (dump_flags & TDF_DETAILS))
4125 fprintf (dump_file, "About to replace expr ");
4126 print_generic_expr (dump_file, *expr, 0);
4127 fprintf (dump_file, " with ");
4128 print_generic_expr (dump_file, src, 0);
4129 fprintf (dump_file, "\n");
4132 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4134 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4135 *expr = vce;
4137 else
4138 *expr = src;
4139 return true;
4142 /* If T is an SSA_NAME, return NULL if it is not a default def or
4143 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4144 the base variable is always returned, regardless if it is a default
4145 def. Return T if it is not an SSA_NAME. */
4147 static tree
4148 get_ssa_base_param (tree t, bool ignore_default_def)
4150 if (TREE_CODE (t) == SSA_NAME)
4152 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4153 return SSA_NAME_VAR (t);
4154 else
4155 return NULL_TREE;
4157 return t;
4160 /* Given an expression, return an adjustment entry specifying the
4161 transformation to be done on EXPR. If no suitable adjustment entry
4162 was found, returns NULL.
4164 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4165 default def, otherwise bail on them.
4167 If CONVERT is non-NULL, this function will set *CONVERT if the
4168 expression provided is a component reference. ADJUSTMENTS is the
4169 adjustments vector. */
4171 ipa_parm_adjustment *
4172 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4173 ipa_parm_adjustment_vec adjustments,
4174 bool ignore_default_def)
4176 if (TREE_CODE (**expr) == BIT_FIELD_REF
4177 || TREE_CODE (**expr) == IMAGPART_EXPR
4178 || TREE_CODE (**expr) == REALPART_EXPR)
4180 *expr = &TREE_OPERAND (**expr, 0);
4181 if (convert)
4182 *convert = true;
4185 HOST_WIDE_INT offset, size, max_size;
4186 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
4187 if (!base || size == -1 || max_size == -1)
4188 return NULL;
4190 if (TREE_CODE (base) == MEM_REF)
4192 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4193 base = TREE_OPERAND (base, 0);
4196 base = get_ssa_base_param (base, ignore_default_def);
4197 if (!base || TREE_CODE (base) != PARM_DECL)
4198 return NULL;
4200 struct ipa_parm_adjustment *cand = NULL;
4201 unsigned int len = adjustments.length ();
4202 for (unsigned i = 0; i < len; i++)
4204 struct ipa_parm_adjustment *adj = &adjustments[i];
4206 if (adj->base == base
4207 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4209 cand = adj;
4210 break;
4214 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4215 return NULL;
4216 return cand;
4219 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4221 static bool
4222 index_in_adjustments_multiple_times_p (int base_index,
4223 ipa_parm_adjustment_vec adjustments)
4225 int i, len = adjustments.length ();
4226 bool one = false;
4228 for (i = 0; i < len; i++)
4230 struct ipa_parm_adjustment *adj;
4231 adj = &adjustments[i];
4233 if (adj->base_index == base_index)
4235 if (one)
4236 return true;
4237 else
4238 one = true;
4241 return false;
4245 /* Return adjustments that should have the same effect on function parameters
4246 and call arguments as if they were first changed according to adjustments in
4247 INNER and then by adjustments in OUTER. */
4249 ipa_parm_adjustment_vec
4250 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4251 ipa_parm_adjustment_vec outer)
4253 int i, outlen = outer.length ();
4254 int inlen = inner.length ();
4255 int removals = 0;
4256 ipa_parm_adjustment_vec adjustments, tmp;
4258 tmp.create (inlen);
4259 for (i = 0; i < inlen; i++)
4261 struct ipa_parm_adjustment *n;
4262 n = &inner[i];
4264 if (n->op == IPA_PARM_OP_REMOVE)
4265 removals++;
4266 else
4268 /* FIXME: Handling of new arguments are not implemented yet. */
4269 gcc_assert (n->op != IPA_PARM_OP_NEW);
4270 tmp.quick_push (*n);
4274 adjustments.create (outlen + removals);
4275 for (i = 0; i < outlen; i++)
4277 struct ipa_parm_adjustment r;
4278 struct ipa_parm_adjustment *out = &outer[i];
4279 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4281 memset (&r, 0, sizeof (r));
4282 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4283 if (out->op == IPA_PARM_OP_REMOVE)
4285 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4287 r.op = IPA_PARM_OP_REMOVE;
4288 adjustments.quick_push (r);
4290 continue;
4292 else
4294 /* FIXME: Handling of new arguments are not implemented yet. */
4295 gcc_assert (out->op != IPA_PARM_OP_NEW);
4298 r.base_index = in->base_index;
4299 r.type = out->type;
4301 /* FIXME: Create nonlocal value too. */
4303 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4304 r.op = IPA_PARM_OP_COPY;
4305 else if (in->op == IPA_PARM_OP_COPY)
4306 r.offset = out->offset;
4307 else if (out->op == IPA_PARM_OP_COPY)
4308 r.offset = in->offset;
4309 else
4310 r.offset = in->offset + out->offset;
4311 adjustments.quick_push (r);
4314 for (i = 0; i < inlen; i++)
4316 struct ipa_parm_adjustment *n = &inner[i];
4318 if (n->op == IPA_PARM_OP_REMOVE)
4319 adjustments.quick_push (*n);
4322 tmp.release ();
4323 return adjustments;
4326 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4327 friendly way, assuming they are meant to be applied to FNDECL. */
4329 void
4330 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4331 tree fndecl)
4333 int i, len = adjustments.length ();
4334 bool first = true;
4335 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4337 fprintf (file, "IPA param adjustments: ");
4338 for (i = 0; i < len; i++)
4340 struct ipa_parm_adjustment *adj;
4341 adj = &adjustments[i];
4343 if (!first)
4344 fprintf (file, " ");
4345 else
4346 first = false;
4348 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4349 print_generic_expr (file, parms[adj->base_index], 0);
4350 if (adj->base)
4352 fprintf (file, ", base: ");
4353 print_generic_expr (file, adj->base, 0);
4355 if (adj->new_decl)
4357 fprintf (file, ", new_decl: ");
4358 print_generic_expr (file, adj->new_decl, 0);
4360 if (adj->new_ssa_base)
4362 fprintf (file, ", new_ssa_base: ");
4363 print_generic_expr (file, adj->new_ssa_base, 0);
4366 if (adj->op == IPA_PARM_OP_COPY)
4367 fprintf (file, ", copy_param");
4368 else if (adj->op == IPA_PARM_OP_REMOVE)
4369 fprintf (file, ", remove_param");
4370 else
4371 fprintf (file, ", offset %li", (long) adj->offset);
4372 if (adj->by_ref)
4373 fprintf (file, ", by_ref");
4374 print_node_brief (file, ", type: ", adj->type, 0);
4375 fprintf (file, "\n");
4377 parms.release ();
4380 /* Dump the AV linked list. */
4382 void
4383 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4385 bool comma = false;
4386 fprintf (f, " Aggregate replacements:");
4387 for (; av; av = av->next)
4389 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4390 av->index, av->offset);
4391 print_generic_expr (f, av->value, 0);
4392 comma = true;
4394 fprintf (f, "\n");
4397 /* Stream out jump function JUMP_FUNC to OB. */
4399 static void
4400 ipa_write_jump_function (struct output_block *ob,
4401 struct ipa_jump_func *jump_func)
4403 struct ipa_agg_jf_item *item;
4404 struct bitpack_d bp;
4405 int i, count;
4407 streamer_write_uhwi (ob, jump_func->type);
4408 switch (jump_func->type)
4410 case IPA_JF_UNKNOWN:
4411 break;
4412 case IPA_JF_CONST:
4413 gcc_assert (
4414 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4415 stream_write_tree (ob, jump_func->value.constant.value, true);
4416 break;
4417 case IPA_JF_PASS_THROUGH:
4418 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4419 if (jump_func->value.pass_through.operation == NOP_EXPR)
4421 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4422 bp = bitpack_create (ob->main_stream);
4423 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4424 streamer_write_bitpack (&bp);
4426 else
4428 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4429 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4431 break;
4432 case IPA_JF_ANCESTOR:
4433 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4434 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4435 bp = bitpack_create (ob->main_stream);
4436 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4437 streamer_write_bitpack (&bp);
4438 break;
4441 count = vec_safe_length (jump_func->agg.items);
4442 streamer_write_uhwi (ob, count);
4443 if (count)
4445 bp = bitpack_create (ob->main_stream);
4446 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4447 streamer_write_bitpack (&bp);
4450 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4452 streamer_write_uhwi (ob, item->offset);
4453 stream_write_tree (ob, item->value, true);
4457 /* Read in jump function JUMP_FUNC from IB. */
4459 static void
4460 ipa_read_jump_function (struct lto_input_block *ib,
4461 struct ipa_jump_func *jump_func,
4462 struct cgraph_edge *cs,
4463 struct data_in *data_in)
4465 enum jump_func_type jftype;
4466 enum tree_code operation;
4467 int i, count;
4469 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4470 switch (jftype)
4472 case IPA_JF_UNKNOWN:
4473 jump_func->type = IPA_JF_UNKNOWN;
4474 break;
4475 case IPA_JF_CONST:
4476 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4477 break;
4478 case IPA_JF_PASS_THROUGH:
4479 operation = (enum tree_code) streamer_read_uhwi (ib);
4480 if (operation == NOP_EXPR)
4482 int formal_id = streamer_read_uhwi (ib);
4483 struct bitpack_d bp = streamer_read_bitpack (ib);
4484 bool agg_preserved = bp_unpack_value (&bp, 1);
4485 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4487 else
4489 tree operand = stream_read_tree (ib, data_in);
4490 int formal_id = streamer_read_uhwi (ib);
4491 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4492 operation);
4494 break;
4495 case IPA_JF_ANCESTOR:
4497 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4498 int formal_id = streamer_read_uhwi (ib);
4499 struct bitpack_d bp = streamer_read_bitpack (ib);
4500 bool agg_preserved = bp_unpack_value (&bp, 1);
4501 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4502 break;
4506 count = streamer_read_uhwi (ib);
4507 vec_alloc (jump_func->agg.items, count);
4508 if (count)
4510 struct bitpack_d bp = streamer_read_bitpack (ib);
4511 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4513 for (i = 0; i < count; i++)
4515 struct ipa_agg_jf_item item;
4516 item.offset = streamer_read_uhwi (ib);
4517 item.value = stream_read_tree (ib, data_in);
4518 jump_func->agg.items->quick_push (item);
4522 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4523 relevant to indirect inlining to OB. */
4525 static void
4526 ipa_write_indirect_edge_info (struct output_block *ob,
4527 struct cgraph_edge *cs)
4529 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4530 struct bitpack_d bp;
4532 streamer_write_hwi (ob, ii->param_index);
4533 bp = bitpack_create (ob->main_stream);
4534 bp_pack_value (&bp, ii->polymorphic, 1);
4535 bp_pack_value (&bp, ii->agg_contents, 1);
4536 bp_pack_value (&bp, ii->member_ptr, 1);
4537 bp_pack_value (&bp, ii->by_ref, 1);
4538 bp_pack_value (&bp, ii->vptr_changed, 1);
4539 streamer_write_bitpack (&bp);
4540 if (ii->agg_contents || ii->polymorphic)
4541 streamer_write_hwi (ob, ii->offset);
4542 else
4543 gcc_assert (ii->offset == 0);
4545 if (ii->polymorphic)
4547 streamer_write_hwi (ob, ii->otr_token);
4548 stream_write_tree (ob, ii->otr_type, true);
4549 ii->context.stream_out (ob);
4553 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4554 relevant to indirect inlining from IB. */
4556 static void
4557 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4558 struct data_in *data_in,
4559 struct cgraph_edge *cs)
4561 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4562 struct bitpack_d bp;
4564 ii->param_index = (int) streamer_read_hwi (ib);
4565 bp = streamer_read_bitpack (ib);
4566 ii->polymorphic = bp_unpack_value (&bp, 1);
4567 ii->agg_contents = bp_unpack_value (&bp, 1);
4568 ii->member_ptr = bp_unpack_value (&bp, 1);
4569 ii->by_ref = bp_unpack_value (&bp, 1);
4570 ii->vptr_changed = bp_unpack_value (&bp, 1);
4571 if (ii->agg_contents || ii->polymorphic)
4572 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4573 else
4574 ii->offset = 0;
4575 if (ii->polymorphic)
4577 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4578 ii->otr_type = stream_read_tree (ib, data_in);
4579 ii->context.stream_in (ib, data_in);
4583 /* Stream out NODE info to OB. */
4585 static void
4586 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4588 int node_ref;
4589 lto_symtab_encoder_t encoder;
4590 struct ipa_node_params *info = IPA_NODE_REF (node);
4591 int j;
4592 struct cgraph_edge *e;
4593 struct bitpack_d bp;
4595 encoder = ob->decl_state->symtab_node_encoder;
4596 node_ref = lto_symtab_encoder_encode (encoder, node);
4597 streamer_write_uhwi (ob, node_ref);
4599 streamer_write_uhwi (ob, ipa_get_param_count (info));
4600 for (j = 0; j < ipa_get_param_count (info); j++)
4601 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4602 bp = bitpack_create (ob->main_stream);
4603 gcc_assert (info->analysis_done
4604 || ipa_get_param_count (info) == 0);
4605 gcc_assert (!info->node_enqueued);
4606 gcc_assert (!info->ipcp_orig_node);
4607 for (j = 0; j < ipa_get_param_count (info); j++)
4608 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4609 streamer_write_bitpack (&bp);
4610 for (j = 0; j < ipa_get_param_count (info); j++)
4611 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4612 for (e = node->callees; e; e = e->next_callee)
4614 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4616 streamer_write_uhwi (ob,
4617 ipa_get_cs_argument_count (args) * 2
4618 + (args->polymorphic_call_contexts != NULL));
4619 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4621 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4622 if (args->polymorphic_call_contexts != NULL)
4623 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4626 for (e = node->indirect_calls; e; e = e->next_callee)
4628 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4630 streamer_write_uhwi (ob,
4631 ipa_get_cs_argument_count (args) * 2
4632 + (args->polymorphic_call_contexts != NULL));
4633 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4635 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4636 if (args->polymorphic_call_contexts != NULL)
4637 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4639 ipa_write_indirect_edge_info (ob, e);
4643 /* Stream in NODE info from IB. */
4645 static void
4646 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4647 struct data_in *data_in)
4649 struct ipa_node_params *info = IPA_NODE_REF (node);
4650 int k;
4651 struct cgraph_edge *e;
4652 struct bitpack_d bp;
4654 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4656 for (k = 0; k < ipa_get_param_count (info); k++)
4657 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4659 bp = streamer_read_bitpack (ib);
4660 if (ipa_get_param_count (info) != 0)
4661 info->analysis_done = true;
4662 info->node_enqueued = false;
4663 for (k = 0; k < ipa_get_param_count (info); k++)
4664 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4665 for (k = 0; k < ipa_get_param_count (info); k++)
4666 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4667 for (e = node->callees; e; e = e->next_callee)
4669 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4670 int count = streamer_read_uhwi (ib);
4671 bool contexts_computed = count & 1;
4672 count /= 2;
4674 if (!count)
4675 continue;
4676 vec_safe_grow_cleared (args->jump_functions, count);
4677 if (contexts_computed)
4678 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4680 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4682 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4683 data_in);
4684 if (contexts_computed)
4685 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4688 for (e = node->indirect_calls; e; e = e->next_callee)
4690 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4691 int count = streamer_read_uhwi (ib);
4692 bool contexts_computed = count & 1;
4693 count /= 2;
4695 if (count)
4697 vec_safe_grow_cleared (args->jump_functions, count);
4698 if (contexts_computed)
4699 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4700 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4702 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4703 data_in);
4704 if (contexts_computed)
4705 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4708 ipa_read_indirect_edge_info (ib, data_in, e);
4712 /* Write jump functions for nodes in SET. */
4714 void
4715 ipa_prop_write_jump_functions (void)
4717 struct cgraph_node *node;
4718 struct output_block *ob;
4719 unsigned int count = 0;
4720 lto_symtab_encoder_iterator lsei;
4721 lto_symtab_encoder_t encoder;
4724 if (!ipa_node_params_vector.exists ())
4725 return;
4727 ob = create_output_block (LTO_section_jump_functions);
4728 encoder = ob->decl_state->symtab_node_encoder;
4729 ob->symbol = NULL;
4730 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4731 lsei_next_function_in_partition (&lsei))
4733 node = lsei_cgraph_node (lsei);
4734 if (node->has_gimple_body_p ()
4735 && IPA_NODE_REF (node) != NULL)
4736 count++;
4739 streamer_write_uhwi (ob, count);
4741 /* Process all of the functions. */
4742 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4743 lsei_next_function_in_partition (&lsei))
4745 node = lsei_cgraph_node (lsei);
4746 if (node->has_gimple_body_p ()
4747 && IPA_NODE_REF (node) != NULL)
4748 ipa_write_node_info (ob, node);
4750 streamer_write_char_stream (ob->main_stream, 0);
4751 produce_asm (ob, NULL);
4752 destroy_output_block (ob);
4755 /* Read section in file FILE_DATA of length LEN with data DATA. */
4757 static void
4758 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4759 size_t len)
4761 const struct lto_function_header *header =
4762 (const struct lto_function_header *) data;
4763 const int cfg_offset = sizeof (struct lto_function_header);
4764 const int main_offset = cfg_offset + header->cfg_size;
4765 const int string_offset = main_offset + header->main_size;
4766 struct data_in *data_in;
4767 unsigned int i;
4768 unsigned int count;
4770 lto_input_block ib_main ((const char *) data + main_offset,
4771 header->main_size);
4773 data_in =
4774 lto_data_in_create (file_data, (const char *) data + string_offset,
4775 header->string_size, vNULL);
4776 count = streamer_read_uhwi (&ib_main);
4778 for (i = 0; i < count; i++)
4780 unsigned int index;
4781 struct cgraph_node *node;
4782 lto_symtab_encoder_t encoder;
4784 index = streamer_read_uhwi (&ib_main);
4785 encoder = file_data->symtab_node_encoder;
4786 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4787 index));
4788 gcc_assert (node->definition);
4789 ipa_read_node_info (&ib_main, node, data_in);
4791 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4792 len);
4793 lto_data_in_delete (data_in);
4796 /* Read ipcp jump functions. */
4798 void
4799 ipa_prop_read_jump_functions (void)
4801 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4802 struct lto_file_decl_data *file_data;
4803 unsigned int j = 0;
4805 ipa_check_create_node_params ();
4806 ipa_check_create_edge_args ();
4807 ipa_register_cgraph_hooks ();
4809 while ((file_data = file_data_vec[j++]))
4811 size_t len;
4812 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4814 if (data)
4815 ipa_prop_read_section (file_data, data, len);
4819 /* After merging units, we can get mismatch in argument counts.
4820 Also decl merging might've rendered parameter lists obsolete.
4821 Also compute called_with_variable_arg info. */
4823 void
4824 ipa_update_after_lto_read (void)
4826 ipa_check_create_node_params ();
4827 ipa_check_create_edge_args ();
4830 void
4831 write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4833 int node_ref;
4834 unsigned int count = 0;
4835 lto_symtab_encoder_t encoder;
4836 struct ipa_agg_replacement_value *aggvals, *av;
4838 aggvals = ipa_get_agg_replacements_for_node (node);
4839 encoder = ob->decl_state->symtab_node_encoder;
4840 node_ref = lto_symtab_encoder_encode (encoder, node);
4841 streamer_write_uhwi (ob, node_ref);
4843 for (av = aggvals; av; av = av->next)
4844 count++;
4845 streamer_write_uhwi (ob, count);
4847 for (av = aggvals; av; av = av->next)
4849 struct bitpack_d bp;
4851 streamer_write_uhwi (ob, av->offset);
4852 streamer_write_uhwi (ob, av->index);
4853 stream_write_tree (ob, av->value, true);
4855 bp = bitpack_create (ob->main_stream);
4856 bp_pack_value (&bp, av->by_ref, 1);
4857 streamer_write_bitpack (&bp);
4861 /* Stream in the aggregate value replacement chain for NODE from IB. */
4863 static void
4864 read_agg_replacement_chain (struct lto_input_block *ib,
4865 struct cgraph_node *node,
4866 struct data_in *data_in)
4868 struct ipa_agg_replacement_value *aggvals = NULL;
4869 unsigned int count, i;
4871 count = streamer_read_uhwi (ib);
4872 for (i = 0; i <count; i++)
4874 struct ipa_agg_replacement_value *av;
4875 struct bitpack_d bp;
4877 av = ggc_alloc<ipa_agg_replacement_value> ();
4878 av->offset = streamer_read_uhwi (ib);
4879 av->index = streamer_read_uhwi (ib);
4880 av->value = stream_read_tree (ib, data_in);
4881 bp = streamer_read_bitpack (ib);
4882 av->by_ref = bp_unpack_value (&bp, 1);
4883 av->next = aggvals;
4884 aggvals = av;
4886 ipa_set_node_agg_value_chain (node, aggvals);
4889 /* Write all aggregate replacement for nodes in set. */
4891 void
4892 ipa_prop_write_all_agg_replacement (void)
4894 struct cgraph_node *node;
4895 struct output_block *ob;
4896 unsigned int count = 0;
4897 lto_symtab_encoder_iterator lsei;
4898 lto_symtab_encoder_t encoder;
4900 if (!ipa_node_agg_replacements)
4901 return;
4903 ob = create_output_block (LTO_section_ipcp_transform);
4904 encoder = ob->decl_state->symtab_node_encoder;
4905 ob->symbol = NULL;
4906 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4907 lsei_next_function_in_partition (&lsei))
4909 node = lsei_cgraph_node (lsei);
4910 if (node->has_gimple_body_p ()
4911 && ipa_get_agg_replacements_for_node (node) != NULL)
4912 count++;
4915 streamer_write_uhwi (ob, count);
4917 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4918 lsei_next_function_in_partition (&lsei))
4920 node = lsei_cgraph_node (lsei);
4921 if (node->has_gimple_body_p ()
4922 && ipa_get_agg_replacements_for_node (node) != NULL)
4923 write_agg_replacement_chain (ob, node);
4925 streamer_write_char_stream (ob->main_stream, 0);
4926 produce_asm (ob, NULL);
4927 destroy_output_block (ob);
4930 /* Read replacements section in file FILE_DATA of length LEN with data
4931 DATA. */
4933 static void
4934 read_replacements_section (struct lto_file_decl_data *file_data,
4935 const char *data,
4936 size_t len)
4938 const struct lto_function_header *header =
4939 (const struct lto_function_header *) data;
4940 const int cfg_offset = sizeof (struct lto_function_header);
4941 const int main_offset = cfg_offset + header->cfg_size;
4942 const int string_offset = main_offset + header->main_size;
4943 struct data_in *data_in;
4944 unsigned int i;
4945 unsigned int count;
4947 lto_input_block ib_main ((const char *) data + main_offset,
4948 header->main_size);
4950 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4951 header->string_size, vNULL);
4952 count = streamer_read_uhwi (&ib_main);
4954 for (i = 0; i < count; i++)
4956 unsigned int index;
4957 struct cgraph_node *node;
4958 lto_symtab_encoder_t encoder;
4960 index = streamer_read_uhwi (&ib_main);
4961 encoder = file_data->symtab_node_encoder;
4962 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4963 index));
4964 gcc_assert (node->definition);
4965 read_agg_replacement_chain (&ib_main, node, data_in);
4967 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4968 len);
4969 lto_data_in_delete (data_in);
4972 /* Read IPA-CP aggregate replacements. */
4974 void
4975 ipa_prop_read_all_agg_replacement (void)
4977 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4978 struct lto_file_decl_data *file_data;
4979 unsigned int j = 0;
4981 while ((file_data = file_data_vec[j++]))
4983 size_t len;
4984 const char *data = lto_get_section_data (file_data,
4985 LTO_section_ipcp_transform,
4986 NULL, &len);
4987 if (data)
4988 read_replacements_section (file_data, data, len);
4992 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4993 NODE. */
4995 static void
4996 adjust_agg_replacement_values (struct cgraph_node *node,
4997 struct ipa_agg_replacement_value *aggval)
4999 struct ipa_agg_replacement_value *v;
5000 int i, c = 0, d = 0, *adj;
5002 if (!node->clone.combined_args_to_skip)
5003 return;
5005 for (v = aggval; v; v = v->next)
5007 gcc_assert (v->index >= 0);
5008 if (c < v->index)
5009 c = v->index;
5011 c++;
5013 adj = XALLOCAVEC (int, c);
5014 for (i = 0; i < c; i++)
5015 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5017 adj[i] = -1;
5018 d++;
5020 else
5021 adj[i] = i - d;
5023 for (v = aggval; v; v = v->next)
5024 v->index = adj[v->index];
5027 /* Dominator walker driving the ipcp modification phase. */
5029 class ipcp_modif_dom_walker : public dom_walker
5031 public:
5032 ipcp_modif_dom_walker (struct func_body_info *fbi,
5033 vec<ipa_param_descriptor> descs,
5034 struct ipa_agg_replacement_value *av,
5035 bool *sc, bool *cc)
5036 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5037 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5039 virtual void before_dom_children (basic_block);
5041 private:
5042 struct func_body_info *m_fbi;
5043 vec<ipa_param_descriptor> m_descriptors;
5044 struct ipa_agg_replacement_value *m_aggval;
5045 bool *m_something_changed, *m_cfg_changed;
5048 void
5049 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5051 gimple_stmt_iterator gsi;
5052 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5054 struct ipa_agg_replacement_value *v;
5055 gimple stmt = gsi_stmt (gsi);
5056 tree rhs, val, t;
5057 HOST_WIDE_INT offset, size;
5058 int index;
5059 bool by_ref, vce;
5061 if (!gimple_assign_load_p (stmt))
5062 continue;
5063 rhs = gimple_assign_rhs1 (stmt);
5064 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5065 continue;
5067 vce = false;
5068 t = rhs;
5069 while (handled_component_p (t))
5071 /* V_C_E can do things like convert an array of integers to one
5072 bigger integer and similar things we do not handle below. */
5073 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5075 vce = true;
5076 break;
5078 t = TREE_OPERAND (t, 0);
5080 if (vce)
5081 continue;
5083 if (!ipa_load_from_parm_agg_1 (m_fbi, m_descriptors, stmt, rhs, &index,
5084 &offset, &size, &by_ref))
5085 continue;
5086 for (v = m_aggval; v; v = v->next)
5087 if (v->index == index
5088 && v->offset == offset)
5089 break;
5090 if (!v
5091 || v->by_ref != by_ref
5092 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5093 continue;
5095 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5096 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5098 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5099 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5100 else if (TYPE_SIZE (TREE_TYPE (rhs))
5101 == TYPE_SIZE (TREE_TYPE (v->value)))
5102 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5103 else
5105 if (dump_file)
5107 fprintf (dump_file, " const ");
5108 print_generic_expr (dump_file, v->value, 0);
5109 fprintf (dump_file, " can't be converted to type of ");
5110 print_generic_expr (dump_file, rhs, 0);
5111 fprintf (dump_file, "\n");
5113 continue;
5116 else
5117 val = v->value;
5119 if (dump_file && (dump_flags & TDF_DETAILS))
5121 fprintf (dump_file, "Modifying stmt:\n ");
5122 print_gimple_stmt (dump_file, stmt, 0, 0);
5124 gimple_assign_set_rhs_from_tree (&gsi, val);
5125 update_stmt (stmt);
5127 if (dump_file && (dump_flags & TDF_DETAILS))
5129 fprintf (dump_file, "into:\n ");
5130 print_gimple_stmt (dump_file, stmt, 0, 0);
5131 fprintf (dump_file, "\n");
5134 *m_something_changed = true;
5135 if (maybe_clean_eh_stmt (stmt)
5136 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5137 *m_cfg_changed = true;
5142 /* IPCP transformation phase doing propagation of aggregate values. */
5144 unsigned int
5145 ipcp_transform_function (struct cgraph_node *node)
5147 vec<ipa_param_descriptor> descriptors = vNULL;
5148 struct func_body_info fbi;
5149 struct ipa_agg_replacement_value *aggval;
5150 int param_count;
5151 bool cfg_changed = false, something_changed = false;
5153 gcc_checking_assert (cfun);
5154 gcc_checking_assert (current_function_decl);
5156 if (dump_file)
5157 fprintf (dump_file, "Modification phase of node %s/%i\n",
5158 node->name (), node->order);
5160 aggval = ipa_get_agg_replacements_for_node (node);
5161 if (!aggval)
5162 return 0;
5163 param_count = count_formal_params (node->decl);
5164 if (param_count == 0)
5165 return 0;
5166 adjust_agg_replacement_values (node, aggval);
5167 if (dump_file)
5168 ipa_dump_agg_replacement_values (dump_file, aggval);
5170 fbi.node = node;
5171 fbi.info = NULL;
5172 fbi.bb_infos = vNULL;
5173 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5174 fbi.param_count = param_count;
5175 fbi.aa_walked = 0;
5177 descriptors.safe_grow_cleared (param_count);
5178 ipa_populate_param_decls (node, descriptors);
5179 calculate_dominance_info (CDI_DOMINATORS);
5180 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5181 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5183 int i;
5184 struct ipa_bb_info *bi;
5185 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5186 free_ipa_bb_info (bi);
5187 fbi.bb_infos.release ();
5188 free_dominance_info (CDI_DOMINATORS);
5189 (*ipa_node_agg_replacements)[node->uid] = NULL;
5190 descriptors.release ();
5192 if (!something_changed)
5193 return 0;
5194 else if (cfg_changed)
5195 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5196 else
5197 return TODO_update_ssa_only_virtuals;