re PR libfortran/59513 (Fortran runtime error: Sequential READ or WRITE not allowed...
[official-gcc.git] / gcc / ipa-prop.c
blobcfd9c16ed9c17c6ff0fc181379d25d8bac088367
1 /* Interprocedural analyses.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "hash-set.h"
24 #include "machmode.h"
25 #include "vec.h"
26 #include "double-int.h"
27 #include "input.h"
28 #include "alias.h"
29 #include "symtab.h"
30 #include "options.h"
31 #include "wide-int.h"
32 #include "inchash.h"
33 #include "tree.h"
34 #include "fold-const.h"
35 #include "predict.h"
36 #include "tm.h"
37 #include "hard-reg-set.h"
38 #include "function.h"
39 #include "dominance.h"
40 #include "cfg.h"
41 #include "basic-block.h"
42 #include "tree-ssa-alias.h"
43 #include "internal-fn.h"
44 #include "gimple-fold.h"
45 #include "tree-eh.h"
46 #include "gimple-expr.h"
47 #include "is-a.h"
48 #include "gimple.h"
49 #include "hashtab.h"
50 #include "rtl.h"
51 #include "flags.h"
52 #include "statistics.h"
53 #include "real.h"
54 #include "fixed-value.h"
55 #include "insn-config.h"
56 #include "expmed.h"
57 #include "dojump.h"
58 #include "explow.h"
59 #include "calls.h"
60 #include "emit-rtl.h"
61 #include "varasm.h"
62 #include "stmt.h"
63 #include "expr.h"
64 #include "stor-layout.h"
65 #include "print-tree.h"
66 #include "gimplify.h"
67 #include "gimple-iterator.h"
68 #include "gimplify-me.h"
69 #include "gimple-walk.h"
70 #include "langhooks.h"
71 #include "target.h"
72 #include "hash-map.h"
73 #include "plugin-api.h"
74 #include "ipa-ref.h"
75 #include "cgraph.h"
76 #include "alloc-pool.h"
77 #include "symbol-summary.h"
78 #include "ipa-prop.h"
79 #include "bitmap.h"
80 #include "gimple-ssa.h"
81 #include "tree-cfg.h"
82 #include "tree-phinodes.h"
83 #include "ssa-iterators.h"
84 #include "tree-into-ssa.h"
85 #include "tree-dfa.h"
86 #include "tree-pass.h"
87 #include "tree-inline.h"
88 #include "ipa-inline.h"
89 #include "diagnostic.h"
90 #include "gimple-pretty-print.h"
91 #include "lto-streamer.h"
92 #include "data-streamer.h"
93 #include "tree-streamer.h"
94 #include "params.h"
95 #include "ipa-utils.h"
96 #include "stringpool.h"
97 #include "tree-ssanames.h"
98 #include "dbgcnt.h"
99 #include "domwalk.h"
100 #include "builtins.h"
102 /* Intermediate information that we get from alias analysis about a particular
103 parameter in a particular basic_block. When a parameter or the memory it
104 references is marked modified, we use that information in all dominatd
105 blocks without cosulting alias analysis oracle. */
107 struct param_aa_status
109 /* Set when this structure contains meaningful information. If not, the
110 structure describing a dominating BB should be used instead. */
111 bool valid;
113 /* Whether we have seen something which might have modified the data in
114 question. PARM is for the parameter itself, REF is for data it points to
115 but using the alias type of individual accesses and PT is the same thing
116 but for computing aggregate pass-through functions using a very inclusive
117 ao_ref. */
118 bool parm_modified, ref_modified, pt_modified;
121 /* Information related to a given BB that used only when looking at function
122 body. */
124 struct ipa_bb_info
126 /* Call graph edges going out of this BB. */
127 vec<cgraph_edge *> cg_edges;
128 /* Alias analysis statuses of each formal parameter at this bb. */
129 vec<param_aa_status> param_aa_statuses;
132 /* Structure with global information that is only used when looking at function
133 body. */
135 struct func_body_info
137 /* The node that is being analyzed. */
138 cgraph_node *node;
140 /* Its info. */
141 struct ipa_node_params *info;
143 /* Information about individual BBs. */
144 vec<ipa_bb_info> bb_infos;
146 /* Number of parameters. */
147 int param_count;
149 /* Number of statements already walked by when analyzing this function. */
150 unsigned int aa_walked;
153 /* Function summary where the parameter infos are actually stored. */
154 ipa_node_params_t *ipa_node_params_sum = NULL;
155 /* Vector of IPA-CP transformation data for each clone. */
156 vec<ipcp_transformation_summary, va_gc> *ipcp_transformations;
157 /* Vector where the parameter infos are actually stored. */
158 vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
160 /* Holders of ipa cgraph hooks: */
161 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
162 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
163 static struct cgraph_node_hook_list *function_insertion_hook_holder;
165 /* Description of a reference to an IPA constant. */
166 struct ipa_cst_ref_desc
168 /* Edge that corresponds to the statement which took the reference. */
169 struct cgraph_edge *cs;
170 /* Linked list of duplicates created when call graph edges are cloned. */
171 struct ipa_cst_ref_desc *next_duplicate;
172 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
173 if out of control. */
174 int refcount;
177 /* Allocation pool for reference descriptions. */
179 static alloc_pool ipa_refdesc_pool;
181 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
182 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
184 static bool
185 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
187 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
189 if (!fs_opts)
190 return false;
191 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
194 /* Return index of the formal whose tree is PTREE in function which corresponds
195 to INFO. */
197 static int
198 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
200 int i, count;
202 count = descriptors.length ();
203 for (i = 0; i < count; i++)
204 if (descriptors[i].decl == ptree)
205 return i;
207 return -1;
210 /* Return index of the formal whose tree is PTREE in function which corresponds
211 to INFO. */
214 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
216 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
219 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
220 NODE. */
222 static void
223 ipa_populate_param_decls (struct cgraph_node *node,
224 vec<ipa_param_descriptor> &descriptors)
226 tree fndecl;
227 tree fnargs;
228 tree parm;
229 int param_num;
231 fndecl = node->decl;
232 gcc_assert (gimple_has_body_p (fndecl));
233 fnargs = DECL_ARGUMENTS (fndecl);
234 param_num = 0;
235 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
237 descriptors[param_num].decl = parm;
238 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
239 true);
240 param_num++;
244 /* Return how many formal parameters FNDECL has. */
247 count_formal_params (tree fndecl)
249 tree parm;
250 int count = 0;
251 gcc_assert (gimple_has_body_p (fndecl));
253 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
254 count++;
256 return count;
259 /* Return the declaration of Ith formal parameter of the function corresponding
260 to INFO. Note there is no setter function as this array is built just once
261 using ipa_initialize_node_params. */
263 void
264 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
266 fprintf (file, "param #%i", i);
267 if (info->descriptors[i].decl)
269 fprintf (file, " ");
270 print_generic_expr (file, info->descriptors[i].decl, 0);
274 /* Initialize the ipa_node_params structure associated with NODE
275 to hold PARAM_COUNT parameters. */
277 void
278 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
280 struct ipa_node_params *info = IPA_NODE_REF (node);
282 if (!info->descriptors.exists () && param_count)
283 info->descriptors.safe_grow_cleared (param_count);
286 /* Initialize the ipa_node_params structure associated with NODE by counting
287 the function parameters, creating the descriptors and populating their
288 param_decls. */
290 void
291 ipa_initialize_node_params (struct cgraph_node *node)
293 struct ipa_node_params *info = IPA_NODE_REF (node);
295 if (!info->descriptors.exists ())
297 ipa_alloc_node_params (node, count_formal_params (node->decl));
298 ipa_populate_param_decls (node, info->descriptors);
302 /* Print the jump functions associated with call graph edge CS to file F. */
304 static void
305 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
307 int i, count;
309 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
310 for (i = 0; i < count; i++)
312 struct ipa_jump_func *jump_func;
313 enum jump_func_type type;
315 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
316 type = jump_func->type;
318 fprintf (f, " param %d: ", i);
319 if (type == IPA_JF_UNKNOWN)
320 fprintf (f, "UNKNOWN\n");
321 else if (type == IPA_JF_CONST)
323 tree val = jump_func->value.constant.value;
324 fprintf (f, "CONST: ");
325 print_generic_expr (f, val, 0);
326 if (TREE_CODE (val) == ADDR_EXPR
327 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
329 fprintf (f, " -> ");
330 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
333 fprintf (f, "\n");
335 else if (type == IPA_JF_PASS_THROUGH)
337 fprintf (f, "PASS THROUGH: ");
338 fprintf (f, "%d, op %s",
339 jump_func->value.pass_through.formal_id,
340 get_tree_code_name(jump_func->value.pass_through.operation));
341 if (jump_func->value.pass_through.operation != NOP_EXPR)
343 fprintf (f, " ");
344 print_generic_expr (f,
345 jump_func->value.pass_through.operand, 0);
347 if (jump_func->value.pass_through.agg_preserved)
348 fprintf (f, ", agg_preserved");
349 fprintf (f, "\n");
351 else if (type == IPA_JF_ANCESTOR)
353 fprintf (f, "ANCESTOR: ");
354 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC,
355 jump_func->value.ancestor.formal_id,
356 jump_func->value.ancestor.offset);
357 if (jump_func->value.ancestor.agg_preserved)
358 fprintf (f, ", agg_preserved");
359 fprintf (f, "\n");
362 if (jump_func->agg.items)
364 struct ipa_agg_jf_item *item;
365 int j;
367 fprintf (f, " Aggregate passed by %s:\n",
368 jump_func->agg.by_ref ? "reference" : "value");
369 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
371 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
372 item->offset);
373 if (TYPE_P (item->value))
374 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
375 tree_to_uhwi (TYPE_SIZE (item->value)));
376 else
378 fprintf (f, "cst: ");
379 print_generic_expr (f, item->value, 0);
381 fprintf (f, "\n");
385 struct ipa_polymorphic_call_context *ctx
386 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
387 if (ctx && !ctx->useless_p ())
389 fprintf (f, " Context: ");
390 ctx->dump (dump_file);
393 if (jump_func->alignment.known)
395 fprintf (f, " Alignment: %u, misalignment: %u\n",
396 jump_func->alignment.align,
397 jump_func->alignment.misalign);
399 else
400 fprintf (f, " Unknown alignment\n");
405 /* Print the jump functions of all arguments on all call graph edges going from
406 NODE to file F. */
408 void
409 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
411 struct cgraph_edge *cs;
413 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
414 node->order);
415 for (cs = node->callees; cs; cs = cs->next_callee)
417 if (!ipa_edge_args_info_available_for_edge_p (cs))
418 continue;
420 fprintf (f, " callsite %s/%i -> %s/%i : \n",
421 xstrdup_for_dump (node->name ()), node->order,
422 xstrdup_for_dump (cs->callee->name ()),
423 cs->callee->order);
424 ipa_print_node_jump_functions_for_edge (f, cs);
427 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
429 struct cgraph_indirect_call_info *ii;
430 if (!ipa_edge_args_info_available_for_edge_p (cs))
431 continue;
433 ii = cs->indirect_info;
434 if (ii->agg_contents)
435 fprintf (f, " indirect %s callsite, calling param %i, "
436 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
437 ii->member_ptr ? "member ptr" : "aggregate",
438 ii->param_index, ii->offset,
439 ii->by_ref ? "by reference" : "by_value");
440 else
441 fprintf (f, " indirect %s callsite, calling param %i, "
442 "offset " HOST_WIDE_INT_PRINT_DEC,
443 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
444 ii->offset);
446 if (cs->call_stmt)
448 fprintf (f, ", for stmt ");
449 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
451 else
452 fprintf (f, "\n");
453 if (ii->polymorphic)
454 ii->context.dump (f);
455 ipa_print_node_jump_functions_for_edge (f, cs);
459 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
461 void
462 ipa_print_all_jump_functions (FILE *f)
464 struct cgraph_node *node;
466 fprintf (f, "\nJump functions:\n");
467 FOR_EACH_FUNCTION (node)
469 ipa_print_node_jump_functions (f, node);
473 /* Set jfunc to be a know-really nothing jump function. */
475 static void
476 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
478 jfunc->type = IPA_JF_UNKNOWN;
479 jfunc->alignment.known = false;
482 /* Set JFUNC to be a copy of another jmp (to be used by jump function
483 combination code). The two functions will share their rdesc. */
485 static void
486 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
487 struct ipa_jump_func *src)
490 gcc_checking_assert (src->type == IPA_JF_CONST);
491 dst->type = IPA_JF_CONST;
492 dst->value.constant = src->value.constant;
495 /* Set JFUNC to be a constant jmp function. */
497 static void
498 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
499 struct cgraph_edge *cs)
501 constant = unshare_expr (constant);
502 if (constant && EXPR_P (constant))
503 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
504 jfunc->type = IPA_JF_CONST;
505 jfunc->value.constant.value = unshare_expr_without_location (constant);
507 if (TREE_CODE (constant) == ADDR_EXPR
508 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
510 struct ipa_cst_ref_desc *rdesc;
511 if (!ipa_refdesc_pool)
512 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
513 sizeof (struct ipa_cst_ref_desc), 32);
515 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
516 rdesc->cs = cs;
517 rdesc->next_duplicate = NULL;
518 rdesc->refcount = 1;
519 jfunc->value.constant.rdesc = rdesc;
521 else
522 jfunc->value.constant.rdesc = NULL;
525 /* Set JFUNC to be a simple pass-through jump function. */
526 static void
527 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
528 bool agg_preserved)
530 jfunc->type = IPA_JF_PASS_THROUGH;
531 jfunc->value.pass_through.operand = NULL_TREE;
532 jfunc->value.pass_through.formal_id = formal_id;
533 jfunc->value.pass_through.operation = NOP_EXPR;
534 jfunc->value.pass_through.agg_preserved = agg_preserved;
537 /* Set JFUNC to be an arithmetic pass through jump function. */
539 static void
540 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
541 tree operand, enum tree_code operation)
543 jfunc->type = IPA_JF_PASS_THROUGH;
544 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
545 jfunc->value.pass_through.formal_id = formal_id;
546 jfunc->value.pass_through.operation = operation;
547 jfunc->value.pass_through.agg_preserved = false;
550 /* Set JFUNC to be an ancestor jump function. */
552 static void
553 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
554 int formal_id, bool agg_preserved)
556 jfunc->type = IPA_JF_ANCESTOR;
557 jfunc->value.ancestor.formal_id = formal_id;
558 jfunc->value.ancestor.offset = offset;
559 jfunc->value.ancestor.agg_preserved = agg_preserved;
562 /* Get IPA BB information about the given BB. FBI is the context of analyzis
563 of this function body. */
565 static struct ipa_bb_info *
566 ipa_get_bb_info (struct func_body_info *fbi, basic_block bb)
568 gcc_checking_assert (fbi);
569 return &fbi->bb_infos[bb->index];
572 /* Structure to be passed in between detect_type_change and
573 check_stmt_for_type_change. */
575 struct prop_type_change_info
577 /* Offset into the object where there is the virtual method pointer we are
578 looking for. */
579 HOST_WIDE_INT offset;
580 /* The declaration or SSA_NAME pointer of the base that we are checking for
581 type change. */
582 tree object;
583 /* Set to true if dynamic type change has been detected. */
584 bool type_maybe_changed;
587 /* Return true if STMT can modify a virtual method table pointer.
589 This function makes special assumptions about both constructors and
590 destructors which are all the functions that are allowed to alter the VMT
591 pointers. It assumes that destructors begin with assignment into all VMT
592 pointers and that constructors essentially look in the following way:
594 1) The very first thing they do is that they call constructors of ancestor
595 sub-objects that have them.
597 2) Then VMT pointers of this and all its ancestors is set to new values
598 corresponding to the type corresponding to the constructor.
600 3) Only afterwards, other stuff such as constructor of member sub-objects
601 and the code written by the user is run. Only this may include calling
602 virtual functions, directly or indirectly.
604 There is no way to call a constructor of an ancestor sub-object in any
605 other way.
607 This means that we do not have to care whether constructors get the correct
608 type information because they will always change it (in fact, if we define
609 the type to be given by the VMT pointer, it is undefined).
611 The most important fact to derive from the above is that if, for some
612 statement in the section 3, we try to detect whether the dynamic type has
613 changed, we can safely ignore all calls as we examine the function body
614 backwards until we reach statements in section 2 because these calls cannot
615 be ancestor constructors or destructors (if the input is not bogus) and so
616 do not change the dynamic type (this holds true only for automatically
617 allocated objects but at the moment we devirtualize only these). We then
618 must detect that statements in section 2 change the dynamic type and can try
619 to derive the new type. That is enough and we can stop, we will never see
620 the calls into constructors of sub-objects in this code. Therefore we can
621 safely ignore all call statements that we traverse.
624 static bool
625 stmt_may_be_vtbl_ptr_store (gimple stmt)
627 if (is_gimple_call (stmt))
628 return false;
629 if (gimple_clobber_p (stmt))
630 return false;
631 else if (is_gimple_assign (stmt))
633 tree lhs = gimple_assign_lhs (stmt);
635 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
637 if (flag_strict_aliasing
638 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
639 return false;
641 if (TREE_CODE (lhs) == COMPONENT_REF
642 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
643 return false;
644 /* In the future we might want to use get_base_ref_and_offset to find
645 if there is a field corresponding to the offset and if so, proceed
646 almost like if it was a component ref. */
649 return true;
652 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
653 to check whether a particular statement may modify the virtual table
654 pointerIt stores its result into DATA, which points to a
655 prop_type_change_info structure. */
657 static bool
658 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
660 gimple stmt = SSA_NAME_DEF_STMT (vdef);
661 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
663 if (stmt_may_be_vtbl_ptr_store (stmt))
665 tci->type_maybe_changed = true;
666 return true;
668 else
669 return false;
672 /* See if ARG is PARAM_DECl describing instance passed by pointer
673 or reference in FUNCTION. Return false if the dynamic type may change
674 in between beggining of the function until CALL is invoked.
676 Generally functions are not allowed to change type of such instances,
677 but they call destructors. We assume that methods can not destroy the THIS
678 pointer. Also as a special cases, constructor and destructors may change
679 type of the THIS pointer. */
681 static bool
682 param_type_may_change_p (tree function, tree arg, gimple call)
684 /* Pure functions can not do any changes on the dynamic type;
685 that require writting to memory. */
686 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
687 return false;
688 /* We need to check if we are within inlined consturctor
689 or destructor (ideally we would have way to check that the
690 inline cdtor is actually working on ARG, but we don't have
691 easy tie on this, so punt on all non-pure cdtors.
692 We may also record the types of cdtors and once we know type
693 of the instance match them.
695 Also code unification optimizations may merge calls from
696 different blocks making return values unreliable. So
697 do nothing during late optimization. */
698 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
699 return true;
700 if (TREE_CODE (arg) == SSA_NAME
701 && SSA_NAME_IS_DEFAULT_DEF (arg)
702 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
704 /* Normal (non-THIS) argument. */
705 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
706 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
707 /* THIS pointer of an method - here we we want to watch constructors
708 and destructors as those definitely may change the dynamic
709 type. */
710 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
711 && !DECL_CXX_CONSTRUCTOR_P (function)
712 && !DECL_CXX_DESTRUCTOR_P (function)
713 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
715 /* Walk the inline stack and watch out for ctors/dtors. */
716 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
717 block = BLOCK_SUPERCONTEXT (block))
718 if (BLOCK_ABSTRACT_ORIGIN (block)
719 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
721 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
723 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
724 continue;
725 if (TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
726 && (DECL_CXX_CONSTRUCTOR_P (fn)
727 || DECL_CXX_DESTRUCTOR_P (fn)))
728 return true;
730 return false;
733 return true;
736 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
737 callsite CALL) by looking for assignments to its virtual table pointer. If
738 it is, return true and fill in the jump function JFUNC with relevant type
739 information or set it to unknown. ARG is the object itself (not a pointer
740 to it, unless dereferenced). BASE is the base of the memory access as
741 returned by get_ref_base_and_extent, as is the offset.
743 This is helper function for detect_type_change and detect_type_change_ssa
744 that does the heavy work which is usually unnecesary. */
746 static bool
747 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
748 gcall *call, struct ipa_jump_func *jfunc,
749 HOST_WIDE_INT offset)
751 struct prop_type_change_info tci;
752 ao_ref ao;
753 bool entry_reached = false;
755 gcc_checking_assert (DECL_P (arg)
756 || TREE_CODE (arg) == MEM_REF
757 || handled_component_p (arg));
759 comp_type = TYPE_MAIN_VARIANT (comp_type);
761 /* Const calls cannot call virtual methods through VMT and so type changes do
762 not matter. */
763 if (!flag_devirtualize || !gimple_vuse (call)
764 /* Be sure expected_type is polymorphic. */
765 || !comp_type
766 || TREE_CODE (comp_type) != RECORD_TYPE
767 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
768 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
769 return true;
771 ao_ref_init (&ao, arg);
772 ao.base = base;
773 ao.offset = offset;
774 ao.size = POINTER_SIZE;
775 ao.max_size = ao.size;
777 tci.offset = offset;
778 tci.object = get_base_address (arg);
779 tci.type_maybe_changed = false;
781 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
782 &tci, NULL, &entry_reached);
783 if (!tci.type_maybe_changed)
784 return false;
786 ipa_set_jf_unknown (jfunc);
787 return true;
790 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
791 If it is, return true and fill in the jump function JFUNC with relevant type
792 information or set it to unknown. ARG is the object itself (not a pointer
793 to it, unless dereferenced). BASE is the base of the memory access as
794 returned by get_ref_base_and_extent, as is the offset. */
796 static bool
797 detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
798 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
800 if (!flag_devirtualize)
801 return false;
803 if (TREE_CODE (base) == MEM_REF
804 && !param_type_may_change_p (current_function_decl,
805 TREE_OPERAND (base, 0),
806 call))
807 return false;
808 return detect_type_change_from_memory_writes (arg, base, comp_type,
809 call, jfunc, offset);
812 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
813 SSA name (its dereference will become the base and the offset is assumed to
814 be zero). */
816 static bool
817 detect_type_change_ssa (tree arg, tree comp_type,
818 gcall *call, struct ipa_jump_func *jfunc)
820 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
821 if (!flag_devirtualize
822 || !POINTER_TYPE_P (TREE_TYPE (arg)))
823 return false;
825 if (!param_type_may_change_p (current_function_decl, arg, call))
826 return false;
828 arg = build2 (MEM_REF, ptr_type_node, arg,
829 build_int_cst (ptr_type_node, 0));
831 return detect_type_change_from_memory_writes (arg, arg, comp_type,
832 call, jfunc, 0);
835 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
836 boolean variable pointed to by DATA. */
838 static bool
839 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
840 void *data)
842 bool *b = (bool *) data;
843 *b = true;
844 return true;
847 /* Return true if we have already walked so many statements in AA that we
848 should really just start giving up. */
850 static bool
851 aa_overwalked (struct func_body_info *fbi)
853 gcc_checking_assert (fbi);
854 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
857 /* Find the nearest valid aa status for parameter specified by INDEX that
858 dominates BB. */
860 static struct param_aa_status *
861 find_dominating_aa_status (struct func_body_info *fbi, basic_block bb,
862 int index)
864 while (true)
866 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
867 if (!bb)
868 return NULL;
869 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
870 if (!bi->param_aa_statuses.is_empty ()
871 && bi->param_aa_statuses[index].valid)
872 return &bi->param_aa_statuses[index];
876 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
877 structures and/or intialize the result with a dominating description as
878 necessary. */
880 static struct param_aa_status *
881 parm_bb_aa_status_for_bb (struct func_body_info *fbi, basic_block bb,
882 int index)
884 gcc_checking_assert (fbi);
885 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
886 if (bi->param_aa_statuses.is_empty ())
887 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
888 struct param_aa_status *paa = &bi->param_aa_statuses[index];
889 if (!paa->valid)
891 gcc_checking_assert (!paa->parm_modified
892 && !paa->ref_modified
893 && !paa->pt_modified);
894 struct param_aa_status *dom_paa;
895 dom_paa = find_dominating_aa_status (fbi, bb, index);
896 if (dom_paa)
897 *paa = *dom_paa;
898 else
899 paa->valid = true;
902 return paa;
905 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
906 a value known not to be modified in this function before reaching the
907 statement STMT. FBI holds information about the function we have so far
908 gathered but do not survive the summary building stage. */
910 static bool
911 parm_preserved_before_stmt_p (struct func_body_info *fbi, int index,
912 gimple stmt, tree parm_load)
914 struct param_aa_status *paa;
915 bool modified = false;
916 ao_ref refd;
918 /* FIXME: FBI can be NULL if we are being called from outside
919 ipa_node_analysis or ipcp_transform_function, which currently happens
920 during inlining analysis. It would be great to extend fbi's lifetime and
921 always have it. Currently, we are just not afraid of too much walking in
922 that case. */
923 if (fbi)
925 if (aa_overwalked (fbi))
926 return false;
927 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
928 if (paa->parm_modified)
929 return false;
931 else
932 paa = NULL;
934 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
935 ao_ref_init (&refd, parm_load);
936 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
937 &modified, NULL);
938 if (fbi)
939 fbi->aa_walked += walked;
940 if (paa && modified)
941 paa->parm_modified = true;
942 return !modified;
945 /* If STMT is an assignment that loads a value from an parameter declaration,
946 return the index of the parameter in ipa_node_params which has not been
947 modified. Otherwise return -1. */
949 static int
950 load_from_unmodified_param (struct func_body_info *fbi,
951 vec<ipa_param_descriptor> descriptors,
952 gimple stmt)
954 int index;
955 tree op1;
957 if (!gimple_assign_single_p (stmt))
958 return -1;
960 op1 = gimple_assign_rhs1 (stmt);
961 if (TREE_CODE (op1) != PARM_DECL)
962 return -1;
964 index = ipa_get_param_decl_index_1 (descriptors, op1);
965 if (index < 0
966 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
967 return -1;
969 return index;
972 /* Return true if memory reference REF (which must be a load through parameter
973 with INDEX) loads data that are known to be unmodified in this function
974 before reaching statement STMT. */
976 static bool
977 parm_ref_data_preserved_p (struct func_body_info *fbi,
978 int index, gimple stmt, tree ref)
980 struct param_aa_status *paa;
981 bool modified = false;
982 ao_ref refd;
984 /* FIXME: FBI can be NULL if we are being called from outside
985 ipa_node_analysis or ipcp_transform_function, which currently happens
986 during inlining analysis. It would be great to extend fbi's lifetime and
987 always have it. Currently, we are just not afraid of too much walking in
988 that case. */
989 if (fbi)
991 if (aa_overwalked (fbi))
992 return false;
993 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
994 if (paa->ref_modified)
995 return false;
997 else
998 paa = NULL;
1000 gcc_checking_assert (gimple_vuse (stmt));
1001 ao_ref_init (&refd, ref);
1002 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1003 &modified, NULL);
1004 if (fbi)
1005 fbi->aa_walked += walked;
1006 if (paa && modified)
1007 paa->ref_modified = true;
1008 return !modified;
1011 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1012 is known to be unmodified in this function before reaching call statement
1013 CALL into which it is passed. FBI describes the function body. */
1015 static bool
1016 parm_ref_data_pass_through_p (struct func_body_info *fbi, int index,
1017 gimple call, tree parm)
1019 bool modified = false;
1020 ao_ref refd;
1022 /* It's unnecessary to calculate anything about memory contnets for a const
1023 function because it is not goin to use it. But do not cache the result
1024 either. Also, no such calculations for non-pointers. */
1025 if (!gimple_vuse (call)
1026 || !POINTER_TYPE_P (TREE_TYPE (parm))
1027 || aa_overwalked (fbi))
1028 return false;
1030 struct param_aa_status *paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (call),
1031 index);
1032 if (paa->pt_modified)
1033 return false;
1035 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1036 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1037 &modified, NULL);
1038 fbi->aa_walked += walked;
1039 if (modified)
1040 paa->pt_modified = true;
1041 return !modified;
1044 /* Return true if we can prove that OP is a memory reference loading unmodified
1045 data from an aggregate passed as a parameter and if the aggregate is passed
1046 by reference, that the alias type of the load corresponds to the type of the
1047 formal parameter (so that we can rely on this type for TBAA in callers).
1048 INFO and PARMS_AINFO describe parameters of the current function (but the
1049 latter can be NULL), STMT is the load statement. If function returns true,
1050 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1051 within the aggregate and whether it is a load from a value passed by
1052 reference respectively. */
1054 static bool
1055 ipa_load_from_parm_agg_1 (struct func_body_info *fbi,
1056 vec<ipa_param_descriptor> descriptors,
1057 gimple stmt, tree op, int *index_p,
1058 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1059 bool *by_ref_p)
1061 int index;
1062 HOST_WIDE_INT size, max_size;
1063 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
1065 if (max_size == -1 || max_size != size || *offset_p < 0)
1066 return false;
1068 if (DECL_P (base))
1070 int index = ipa_get_param_decl_index_1 (descriptors, base);
1071 if (index >= 0
1072 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1074 *index_p = index;
1075 *by_ref_p = false;
1076 if (size_p)
1077 *size_p = size;
1078 return true;
1080 return false;
1083 if (TREE_CODE (base) != MEM_REF
1084 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1085 || !integer_zerop (TREE_OPERAND (base, 1)))
1086 return false;
1088 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1090 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1091 index = ipa_get_param_decl_index_1 (descriptors, parm);
1093 else
1095 /* This branch catches situations where a pointer parameter is not a
1096 gimple register, for example:
1098 void hip7(S*) (struct S * p)
1100 void (*<T2e4>) (struct S *) D.1867;
1101 struct S * p.1;
1103 <bb 2>:
1104 p.1_1 = p;
1105 D.1867_2 = p.1_1->f;
1106 D.1867_2 ();
1107 gdp = &p;
1110 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1111 index = load_from_unmodified_param (fbi, descriptors, def);
1114 if (index >= 0
1115 && parm_ref_data_preserved_p (fbi, index, stmt, op))
1117 *index_p = index;
1118 *by_ref_p = true;
1119 if (size_p)
1120 *size_p = size;
1121 return true;
1123 return false;
1126 /* Just like the previous function, just without the param_analysis_info
1127 pointer, for users outside of this file. */
1129 bool
1130 ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
1131 tree op, int *index_p, HOST_WIDE_INT *offset_p,
1132 bool *by_ref_p)
1134 return ipa_load_from_parm_agg_1 (NULL, info->descriptors, stmt, op, index_p,
1135 offset_p, NULL, by_ref_p);
1138 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1139 of an assignment statement STMT, try to determine whether we are actually
1140 handling any of the following cases and construct an appropriate jump
1141 function into JFUNC if so:
1143 1) The passed value is loaded from a formal parameter which is not a gimple
1144 register (most probably because it is addressable, the value has to be
1145 scalar) and we can guarantee the value has not changed. This case can
1146 therefore be described by a simple pass-through jump function. For example:
1148 foo (int a)
1150 int a.0;
1152 a.0_2 = a;
1153 bar (a.0_2);
1155 2) The passed value can be described by a simple arithmetic pass-through
1156 jump function. E.g.
1158 foo (int a)
1160 int D.2064;
1162 D.2064_4 = a.1(D) + 4;
1163 bar (D.2064_4);
1165 This case can also occur in combination of the previous one, e.g.:
1167 foo (int a, int z)
1169 int a.0;
1170 int D.2064;
1172 a.0_3 = a;
1173 D.2064_4 = a.0_3 + 4;
1174 foo (D.2064_4);
1176 3) The passed value is an address of an object within another one (which
1177 also passed by reference). Such situations are described by an ancestor
1178 jump function and describe situations such as:
1180 B::foo() (struct B * const this)
1182 struct A * D.1845;
1184 D.1845_2 = &this_1(D)->D.1748;
1185 A::bar (D.1845_2);
1187 INFO is the structure describing individual parameters access different
1188 stages of IPA optimizations. PARMS_AINFO contains the information that is
1189 only needed for intraprocedural analysis. */
1191 static void
1192 compute_complex_assign_jump_func (struct func_body_info *fbi,
1193 struct ipa_node_params *info,
1194 struct ipa_jump_func *jfunc,
1195 gcall *call, gimple stmt, tree name,
1196 tree param_type)
1198 HOST_WIDE_INT offset, size, max_size;
1199 tree op1, tc_ssa, base, ssa;
1200 int index;
1202 op1 = gimple_assign_rhs1 (stmt);
1204 if (TREE_CODE (op1) == SSA_NAME)
1206 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1207 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1208 else
1209 index = load_from_unmodified_param (fbi, info->descriptors,
1210 SSA_NAME_DEF_STMT (op1));
1211 tc_ssa = op1;
1213 else
1215 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1216 tc_ssa = gimple_assign_lhs (stmt);
1219 if (index >= 0)
1221 tree op2 = gimple_assign_rhs2 (stmt);
1223 if (op2)
1225 if (!is_gimple_ip_invariant (op2)
1226 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1227 && !useless_type_conversion_p (TREE_TYPE (name),
1228 TREE_TYPE (op1))))
1229 return;
1231 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1232 gimple_assign_rhs_code (stmt));
1234 else if (gimple_assign_single_p (stmt))
1236 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
1237 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1239 return;
1242 if (TREE_CODE (op1) != ADDR_EXPR)
1243 return;
1244 op1 = TREE_OPERAND (op1, 0);
1245 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1246 return;
1247 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1248 if (TREE_CODE (base) != MEM_REF
1249 /* If this is a varying address, punt. */
1250 || max_size == -1
1251 || max_size != size)
1252 return;
1253 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
1254 ssa = TREE_OPERAND (base, 0);
1255 if (TREE_CODE (ssa) != SSA_NAME
1256 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1257 || offset < 0)
1258 return;
1260 /* Dynamic types are changed in constructors and destructors. */
1261 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1262 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1263 ipa_set_ancestor_jf (jfunc, offset, index,
1264 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1267 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1268 it looks like:
1270 iftmp.1_3 = &obj_2(D)->D.1762;
1272 The base of the MEM_REF must be a default definition SSA NAME of a
1273 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1274 whole MEM_REF expression is returned and the offset calculated from any
1275 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1276 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1278 static tree
1279 get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1281 HOST_WIDE_INT size, max_size;
1282 tree expr, parm, obj;
1284 if (!gimple_assign_single_p (assign))
1285 return NULL_TREE;
1286 expr = gimple_assign_rhs1 (assign);
1288 if (TREE_CODE (expr) != ADDR_EXPR)
1289 return NULL_TREE;
1290 expr = TREE_OPERAND (expr, 0);
1291 obj = expr;
1292 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1294 if (TREE_CODE (expr) != MEM_REF
1295 /* If this is a varying address, punt. */
1296 || max_size == -1
1297 || max_size != size
1298 || *offset < 0)
1299 return NULL_TREE;
1300 parm = TREE_OPERAND (expr, 0);
1301 if (TREE_CODE (parm) != SSA_NAME
1302 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1303 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1304 return NULL_TREE;
1306 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
1307 *obj_p = obj;
1308 return expr;
1312 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1313 statement PHI, try to find out whether NAME is in fact a
1314 multiple-inheritance typecast from a descendant into an ancestor of a formal
1315 parameter and thus can be described by an ancestor jump function and if so,
1316 write the appropriate function into JFUNC.
1318 Essentially we want to match the following pattern:
1320 if (obj_2(D) != 0B)
1321 goto <bb 3>;
1322 else
1323 goto <bb 4>;
1325 <bb 3>:
1326 iftmp.1_3 = &obj_2(D)->D.1762;
1328 <bb 4>:
1329 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1330 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1331 return D.1879_6; */
1333 static void
1334 compute_complex_ancestor_jump_func (struct func_body_info *fbi,
1335 struct ipa_node_params *info,
1336 struct ipa_jump_func *jfunc,
1337 gcall *call, gphi *phi)
1339 HOST_WIDE_INT offset;
1340 gimple assign, cond;
1341 basic_block phi_bb, assign_bb, cond_bb;
1342 tree tmp, parm, expr, obj;
1343 int index, i;
1345 if (gimple_phi_num_args (phi) != 2)
1346 return;
1348 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1349 tmp = PHI_ARG_DEF (phi, 0);
1350 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1351 tmp = PHI_ARG_DEF (phi, 1);
1352 else
1353 return;
1354 if (TREE_CODE (tmp) != SSA_NAME
1355 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1356 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1357 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1358 return;
1360 assign = SSA_NAME_DEF_STMT (tmp);
1361 assign_bb = gimple_bb (assign);
1362 if (!single_pred_p (assign_bb))
1363 return;
1364 expr = get_ancestor_addr_info (assign, &obj, &offset);
1365 if (!expr)
1366 return;
1367 parm = TREE_OPERAND (expr, 0);
1368 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1369 if (index < 0)
1370 return;
1372 cond_bb = single_pred (assign_bb);
1373 cond = last_stmt (cond_bb);
1374 if (!cond
1375 || gimple_code (cond) != GIMPLE_COND
1376 || gimple_cond_code (cond) != NE_EXPR
1377 || gimple_cond_lhs (cond) != parm
1378 || !integer_zerop (gimple_cond_rhs (cond)))
1379 return;
1381 phi_bb = gimple_bb (phi);
1382 for (i = 0; i < 2; i++)
1384 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1385 if (pred != assign_bb && pred != cond_bb)
1386 return;
1389 ipa_set_ancestor_jf (jfunc, offset, index,
1390 parm_ref_data_pass_through_p (fbi, index, call, parm));
1393 /* Inspect the given TYPE and return true iff it has the same structure (the
1394 same number of fields of the same types) as a C++ member pointer. If
1395 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1396 corresponding fields there. */
1398 static bool
1399 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1401 tree fld;
1403 if (TREE_CODE (type) != RECORD_TYPE)
1404 return false;
1406 fld = TYPE_FIELDS (type);
1407 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1408 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1409 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1410 return false;
1412 if (method_ptr)
1413 *method_ptr = fld;
1415 fld = DECL_CHAIN (fld);
1416 if (!fld || INTEGRAL_TYPE_P (fld)
1417 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1418 return false;
1419 if (delta)
1420 *delta = fld;
1422 if (DECL_CHAIN (fld))
1423 return false;
1425 return true;
1428 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1429 return the rhs of its defining statement. Otherwise return RHS as it
1430 is. */
1432 static inline tree
1433 get_ssa_def_if_simple_copy (tree rhs)
1435 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1437 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1439 if (gimple_assign_single_p (def_stmt))
1440 rhs = gimple_assign_rhs1 (def_stmt);
1441 else
1442 break;
1444 return rhs;
1447 /* Simple linked list, describing known contents of an aggregate beforere
1448 call. */
1450 struct ipa_known_agg_contents_list
1452 /* Offset and size of the described part of the aggregate. */
1453 HOST_WIDE_INT offset, size;
1454 /* Known constant value or NULL if the contents is known to be unknown. */
1455 tree constant;
1456 /* Pointer to the next structure in the list. */
1457 struct ipa_known_agg_contents_list *next;
1460 /* Find the proper place in linked list of ipa_known_agg_contents_list
1461 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1462 unless there is a partial overlap, in which case return NULL, or such
1463 element is already there, in which case set *ALREADY_THERE to true. */
1465 static struct ipa_known_agg_contents_list **
1466 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1467 HOST_WIDE_INT lhs_offset,
1468 HOST_WIDE_INT lhs_size,
1469 bool *already_there)
1471 struct ipa_known_agg_contents_list **p = list;
1472 while (*p && (*p)->offset < lhs_offset)
1474 if ((*p)->offset + (*p)->size > lhs_offset)
1475 return NULL;
1476 p = &(*p)->next;
1479 if (*p && (*p)->offset < lhs_offset + lhs_size)
1481 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1482 /* We already know this value is subsequently overwritten with
1483 something else. */
1484 *already_there = true;
1485 else
1486 /* Otherwise this is a partial overlap which we cannot
1487 represent. */
1488 return NULL;
1490 return p;
1493 /* Build aggregate jump function from LIST, assuming there are exactly
1494 CONST_COUNT constant entries there and that th offset of the passed argument
1495 is ARG_OFFSET and store it into JFUNC. */
1497 static void
1498 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1499 int const_count, HOST_WIDE_INT arg_offset,
1500 struct ipa_jump_func *jfunc)
1502 vec_alloc (jfunc->agg.items, const_count);
1503 while (list)
1505 if (list->constant)
1507 struct ipa_agg_jf_item item;
1508 item.offset = list->offset - arg_offset;
1509 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1510 item.value = unshare_expr_without_location (list->constant);
1511 jfunc->agg.items->quick_push (item);
1513 list = list->next;
1517 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1518 in ARG is filled in with constant values. ARG can either be an aggregate
1519 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1520 aggregate. JFUNC is the jump function into which the constants are
1521 subsequently stored. */
1523 static void
1524 determine_locally_known_aggregate_parts (gcall *call, tree arg,
1525 tree arg_type,
1526 struct ipa_jump_func *jfunc)
1528 struct ipa_known_agg_contents_list *list = NULL;
1529 int item_count = 0, const_count = 0;
1530 HOST_WIDE_INT arg_offset, arg_size;
1531 gimple_stmt_iterator gsi;
1532 tree arg_base;
1533 bool check_ref, by_ref;
1534 ao_ref r;
1536 /* The function operates in three stages. First, we prepare check_ref, r,
1537 arg_base and arg_offset based on what is actually passed as an actual
1538 argument. */
1540 if (POINTER_TYPE_P (arg_type))
1542 by_ref = true;
1543 if (TREE_CODE (arg) == SSA_NAME)
1545 tree type_size;
1546 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1547 return;
1548 check_ref = true;
1549 arg_base = arg;
1550 arg_offset = 0;
1551 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1552 arg_size = tree_to_uhwi (type_size);
1553 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1555 else if (TREE_CODE (arg) == ADDR_EXPR)
1557 HOST_WIDE_INT arg_max_size;
1559 arg = TREE_OPERAND (arg, 0);
1560 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1561 &arg_max_size);
1562 if (arg_max_size == -1
1563 || arg_max_size != arg_size
1564 || arg_offset < 0)
1565 return;
1566 if (DECL_P (arg_base))
1568 check_ref = false;
1569 ao_ref_init (&r, arg_base);
1571 else
1572 return;
1574 else
1575 return;
1577 else
1579 HOST_WIDE_INT arg_max_size;
1581 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1583 by_ref = false;
1584 check_ref = false;
1585 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1586 &arg_max_size);
1587 if (arg_max_size == -1
1588 || arg_max_size != arg_size
1589 || arg_offset < 0)
1590 return;
1592 ao_ref_init (&r, arg);
1595 /* Second stage walks back the BB, looks at individual statements and as long
1596 as it is confident of how the statements affect contents of the
1597 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1598 describing it. */
1599 gsi = gsi_for_stmt (call);
1600 gsi_prev (&gsi);
1601 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1603 struct ipa_known_agg_contents_list *n, **p;
1604 gimple stmt = gsi_stmt (gsi);
1605 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1606 tree lhs, rhs, lhs_base;
1608 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1609 continue;
1610 if (!gimple_assign_single_p (stmt))
1611 break;
1613 lhs = gimple_assign_lhs (stmt);
1614 rhs = gimple_assign_rhs1 (stmt);
1615 if (!is_gimple_reg_type (TREE_TYPE (rhs))
1616 || TREE_CODE (lhs) == BIT_FIELD_REF
1617 || contains_bitfld_component_ref_p (lhs))
1618 break;
1620 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1621 &lhs_max_size);
1622 if (lhs_max_size == -1
1623 || lhs_max_size != lhs_size)
1624 break;
1626 if (check_ref)
1628 if (TREE_CODE (lhs_base) != MEM_REF
1629 || TREE_OPERAND (lhs_base, 0) != arg_base
1630 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1631 break;
1633 else if (lhs_base != arg_base)
1635 if (DECL_P (lhs_base))
1636 continue;
1637 else
1638 break;
1641 bool already_there = false;
1642 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1643 &already_there);
1644 if (!p)
1645 break;
1646 if (already_there)
1647 continue;
1649 rhs = get_ssa_def_if_simple_copy (rhs);
1650 n = XALLOCA (struct ipa_known_agg_contents_list);
1651 n->size = lhs_size;
1652 n->offset = lhs_offset;
1653 if (is_gimple_ip_invariant (rhs))
1655 n->constant = rhs;
1656 const_count++;
1658 else
1659 n->constant = NULL_TREE;
1660 n->next = *p;
1661 *p = n;
1663 item_count++;
1664 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1665 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1666 break;
1669 /* Third stage just goes over the list and creates an appropriate vector of
1670 ipa_agg_jf_item structures out of it, of sourse only if there are
1671 any known constants to begin with. */
1673 if (const_count)
1675 jfunc->agg.by_ref = by_ref;
1676 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1680 static tree
1681 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1683 int n;
1684 tree type = (e->callee
1685 ? TREE_TYPE (e->callee->decl)
1686 : gimple_call_fntype (e->call_stmt));
1687 tree t = TYPE_ARG_TYPES (type);
1689 for (n = 0; n < i; n++)
1691 if (!t)
1692 break;
1693 t = TREE_CHAIN (t);
1695 if (t)
1696 return TREE_VALUE (t);
1697 if (!e->callee)
1698 return NULL;
1699 t = DECL_ARGUMENTS (e->callee->decl);
1700 for (n = 0; n < i; n++)
1702 if (!t)
1703 return NULL;
1704 t = TREE_CHAIN (t);
1706 if (t)
1707 return TREE_TYPE (t);
1708 return NULL;
1711 /* Compute jump function for all arguments of callsite CS and insert the
1712 information in the jump_functions array in the ipa_edge_args corresponding
1713 to this callsite. */
1715 static void
1716 ipa_compute_jump_functions_for_edge (struct func_body_info *fbi,
1717 struct cgraph_edge *cs)
1719 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1720 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1721 gcall *call = cs->call_stmt;
1722 int n, arg_num = gimple_call_num_args (call);
1723 bool useful_context = false;
1725 if (arg_num == 0 || args->jump_functions)
1726 return;
1727 vec_safe_grow_cleared (args->jump_functions, arg_num);
1728 if (flag_devirtualize)
1729 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1731 if (gimple_call_internal_p (call))
1732 return;
1733 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1734 return;
1736 for (n = 0; n < arg_num; n++)
1738 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1739 tree arg = gimple_call_arg (call, n);
1740 tree param_type = ipa_get_callee_param_type (cs, n);
1741 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1743 tree instance;
1744 struct ipa_polymorphic_call_context context (cs->caller->decl,
1745 arg, cs->call_stmt,
1746 &instance);
1747 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1748 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1749 if (!context.useless_p ())
1750 useful_context = true;
1753 if (POINTER_TYPE_P (TREE_TYPE(arg)))
1755 unsigned HOST_WIDE_INT hwi_bitpos;
1756 unsigned align;
1758 if (get_pointer_alignment_1 (arg, &align, &hwi_bitpos)
1759 && align % BITS_PER_UNIT == 0
1760 && hwi_bitpos % BITS_PER_UNIT == 0)
1762 jfunc->alignment.known = true;
1763 jfunc->alignment.align = align / BITS_PER_UNIT;
1764 jfunc->alignment.misalign = hwi_bitpos / BITS_PER_UNIT;
1766 else
1767 gcc_assert (!jfunc->alignment.known);
1769 else
1770 gcc_assert (!jfunc->alignment.known);
1772 if (is_gimple_ip_invariant (arg))
1773 ipa_set_jf_constant (jfunc, arg, cs);
1774 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1775 && TREE_CODE (arg) == PARM_DECL)
1777 int index = ipa_get_param_decl_index (info, arg);
1779 gcc_assert (index >=0);
1780 /* Aggregate passed by value, check for pass-through, otherwise we
1781 will attempt to fill in aggregate contents later in this
1782 for cycle. */
1783 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1785 ipa_set_jf_simple_pass_through (jfunc, index, false);
1786 continue;
1789 else if (TREE_CODE (arg) == SSA_NAME)
1791 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1793 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1794 if (index >= 0)
1796 bool agg_p;
1797 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1798 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1801 else
1803 gimple stmt = SSA_NAME_DEF_STMT (arg);
1804 if (is_gimple_assign (stmt))
1805 compute_complex_assign_jump_func (fbi, info, jfunc,
1806 call, stmt, arg, param_type);
1807 else if (gimple_code (stmt) == GIMPLE_PHI)
1808 compute_complex_ancestor_jump_func (fbi, info, jfunc,
1809 call,
1810 as_a <gphi *> (stmt));
1814 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1815 passed (because type conversions are ignored in gimple). Usually we can
1816 safely get type from function declaration, but in case of K&R prototypes or
1817 variadic functions we can try our luck with type of the pointer passed.
1818 TODO: Since we look for actual initialization of the memory object, we may better
1819 work out the type based on the memory stores we find. */
1820 if (!param_type)
1821 param_type = TREE_TYPE (arg);
1823 if ((jfunc->type != IPA_JF_PASS_THROUGH
1824 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1825 && (jfunc->type != IPA_JF_ANCESTOR
1826 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1827 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1828 || POINTER_TYPE_P (param_type)))
1829 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1831 if (!useful_context)
1832 vec_free (args->polymorphic_call_contexts);
1835 /* Compute jump functions for all edges - both direct and indirect - outgoing
1836 from BB. */
1838 static void
1839 ipa_compute_jump_functions_for_bb (struct func_body_info *fbi, basic_block bb)
1841 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1842 int i;
1843 struct cgraph_edge *cs;
1845 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1847 struct cgraph_node *callee = cs->callee;
1849 if (callee)
1851 callee->ultimate_alias_target ();
1852 /* We do not need to bother analyzing calls to unknown functions
1853 unless they may become known during lto/whopr. */
1854 if (!callee->definition && !flag_lto)
1855 continue;
1857 ipa_compute_jump_functions_for_edge (fbi, cs);
1861 /* If STMT looks like a statement loading a value from a member pointer formal
1862 parameter, return that parameter and store the offset of the field to
1863 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1864 might be clobbered). If USE_DELTA, then we look for a use of the delta
1865 field rather than the pfn. */
1867 static tree
1868 ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1869 HOST_WIDE_INT *offset_p)
1871 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1873 if (!gimple_assign_single_p (stmt))
1874 return NULL_TREE;
1876 rhs = gimple_assign_rhs1 (stmt);
1877 if (TREE_CODE (rhs) == COMPONENT_REF)
1879 ref_field = TREE_OPERAND (rhs, 1);
1880 rhs = TREE_OPERAND (rhs, 0);
1882 else
1883 ref_field = NULL_TREE;
1884 if (TREE_CODE (rhs) != MEM_REF)
1885 return NULL_TREE;
1886 rec = TREE_OPERAND (rhs, 0);
1887 if (TREE_CODE (rec) != ADDR_EXPR)
1888 return NULL_TREE;
1889 rec = TREE_OPERAND (rec, 0);
1890 if (TREE_CODE (rec) != PARM_DECL
1891 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
1892 return NULL_TREE;
1893 ref_offset = TREE_OPERAND (rhs, 1);
1895 if (use_delta)
1896 fld = delta_field;
1897 else
1898 fld = ptr_field;
1899 if (offset_p)
1900 *offset_p = int_bit_position (fld);
1902 if (ref_field)
1904 if (integer_nonzerop (ref_offset))
1905 return NULL_TREE;
1906 return ref_field == fld ? rec : NULL_TREE;
1908 else
1909 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1910 : NULL_TREE;
1913 /* Returns true iff T is an SSA_NAME defined by a statement. */
1915 static bool
1916 ipa_is_ssa_with_stmt_def (tree t)
1918 if (TREE_CODE (t) == SSA_NAME
1919 && !SSA_NAME_IS_DEFAULT_DEF (t))
1920 return true;
1921 else
1922 return false;
1925 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1926 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1927 indirect call graph edge. */
1929 static struct cgraph_edge *
1930 ipa_note_param_call (struct cgraph_node *node, int param_index,
1931 gcall *stmt)
1933 struct cgraph_edge *cs;
1935 cs = node->get_edge (stmt);
1936 cs->indirect_info->param_index = param_index;
1937 cs->indirect_info->agg_contents = 0;
1938 cs->indirect_info->member_ptr = 0;
1939 return cs;
1942 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1943 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1944 intermediate information about each formal parameter. Currently it checks
1945 whether the call calls a pointer that is a formal parameter and if so, the
1946 parameter is marked with the called flag and an indirect call graph edge
1947 describing the call is created. This is very simple for ordinary pointers
1948 represented in SSA but not-so-nice when it comes to member pointers. The
1949 ugly part of this function does nothing more than trying to match the
1950 pattern of such a call. An example of such a pattern is the gimple dump
1951 below, the call is on the last line:
1953 <bb 2>:
1954 f$__delta_5 = f.__delta;
1955 f$__pfn_24 = f.__pfn;
1958 <bb 2>:
1959 f$__delta_5 = MEM[(struct *)&f];
1960 f$__pfn_24 = MEM[(struct *)&f + 4B];
1962 and a few lines below:
1964 <bb 5>
1965 D.2496_3 = (int) f$__pfn_24;
1966 D.2497_4 = D.2496_3 & 1;
1967 if (D.2497_4 != 0)
1968 goto <bb 3>;
1969 else
1970 goto <bb 4>;
1972 <bb 6>:
1973 D.2500_7 = (unsigned int) f$__delta_5;
1974 D.2501_8 = &S + D.2500_7;
1975 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1976 D.2503_10 = *D.2502_9;
1977 D.2504_12 = f$__pfn_24 + -1;
1978 D.2505_13 = (unsigned int) D.2504_12;
1979 D.2506_14 = D.2503_10 + D.2505_13;
1980 D.2507_15 = *D.2506_14;
1981 iftmp.11_16 = (String:: *) D.2507_15;
1983 <bb 7>:
1984 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1985 D.2500_19 = (unsigned int) f$__delta_5;
1986 D.2508_20 = &S + D.2500_19;
1987 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1989 Such patterns are results of simple calls to a member pointer:
1991 int doprinting (int (MyString::* f)(int) const)
1993 MyString S ("somestring");
1995 return (S.*f)(4);
1998 Moreover, the function also looks for called pointers loaded from aggregates
1999 passed by value or reference. */
2001 static void
2002 ipa_analyze_indirect_call_uses (struct func_body_info *fbi, gcall *call,
2003 tree target)
2005 struct ipa_node_params *info = fbi->info;
2006 HOST_WIDE_INT offset;
2007 bool by_ref;
2009 if (SSA_NAME_IS_DEFAULT_DEF (target))
2011 tree var = SSA_NAME_VAR (target);
2012 int index = ipa_get_param_decl_index (info, var);
2013 if (index >= 0)
2014 ipa_note_param_call (fbi->node, index, call);
2015 return;
2018 int index;
2019 gimple def = SSA_NAME_DEF_STMT (target);
2020 if (gimple_assign_single_p (def)
2021 && ipa_load_from_parm_agg_1 (fbi, info->descriptors, def,
2022 gimple_assign_rhs1 (def), &index, &offset,
2023 NULL, &by_ref))
2025 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2026 cs->indirect_info->offset = offset;
2027 cs->indirect_info->agg_contents = 1;
2028 cs->indirect_info->by_ref = by_ref;
2029 return;
2032 /* Now we need to try to match the complex pattern of calling a member
2033 pointer. */
2034 if (gimple_code (def) != GIMPLE_PHI
2035 || gimple_phi_num_args (def) != 2
2036 || !POINTER_TYPE_P (TREE_TYPE (target))
2037 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2038 return;
2040 /* First, we need to check whether one of these is a load from a member
2041 pointer that is a parameter to this function. */
2042 tree n1 = PHI_ARG_DEF (def, 0);
2043 tree n2 = PHI_ARG_DEF (def, 1);
2044 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2045 return;
2046 gimple d1 = SSA_NAME_DEF_STMT (n1);
2047 gimple d2 = SSA_NAME_DEF_STMT (n2);
2049 tree rec;
2050 basic_block bb, virt_bb;
2051 basic_block join = gimple_bb (def);
2052 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2054 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2055 return;
2057 bb = EDGE_PRED (join, 0)->src;
2058 virt_bb = gimple_bb (d2);
2060 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2062 bb = EDGE_PRED (join, 1)->src;
2063 virt_bb = gimple_bb (d1);
2065 else
2066 return;
2068 /* Second, we need to check that the basic blocks are laid out in the way
2069 corresponding to the pattern. */
2071 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2072 || single_pred (virt_bb) != bb
2073 || single_succ (virt_bb) != join)
2074 return;
2076 /* Third, let's see that the branching is done depending on the least
2077 significant bit of the pfn. */
2079 gimple branch = last_stmt (bb);
2080 if (!branch || gimple_code (branch) != GIMPLE_COND)
2081 return;
2083 if ((gimple_cond_code (branch) != NE_EXPR
2084 && gimple_cond_code (branch) != EQ_EXPR)
2085 || !integer_zerop (gimple_cond_rhs (branch)))
2086 return;
2088 tree cond = gimple_cond_lhs (branch);
2089 if (!ipa_is_ssa_with_stmt_def (cond))
2090 return;
2092 def = SSA_NAME_DEF_STMT (cond);
2093 if (!is_gimple_assign (def)
2094 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2095 || !integer_onep (gimple_assign_rhs2 (def)))
2096 return;
2098 cond = gimple_assign_rhs1 (def);
2099 if (!ipa_is_ssa_with_stmt_def (cond))
2100 return;
2102 def = SSA_NAME_DEF_STMT (cond);
2104 if (is_gimple_assign (def)
2105 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2107 cond = gimple_assign_rhs1 (def);
2108 if (!ipa_is_ssa_with_stmt_def (cond))
2109 return;
2110 def = SSA_NAME_DEF_STMT (cond);
2113 tree rec2;
2114 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2115 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2116 == ptrmemfunc_vbit_in_delta),
2117 NULL);
2118 if (rec != rec2)
2119 return;
2121 index = ipa_get_param_decl_index (info, rec);
2122 if (index >= 0
2123 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2125 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2126 cs->indirect_info->offset = offset;
2127 cs->indirect_info->agg_contents = 1;
2128 cs->indirect_info->member_ptr = 1;
2131 return;
2134 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2135 object referenced in the expression is a formal parameter of the caller
2136 FBI->node (described by FBI->info), create a call note for the
2137 statement. */
2139 static void
2140 ipa_analyze_virtual_call_uses (struct func_body_info *fbi,
2141 gcall *call, tree target)
2143 tree obj = OBJ_TYPE_REF_OBJECT (target);
2144 int index;
2145 HOST_WIDE_INT anc_offset;
2147 if (!flag_devirtualize)
2148 return;
2150 if (TREE_CODE (obj) != SSA_NAME)
2151 return;
2153 struct ipa_node_params *info = fbi->info;
2154 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2156 struct ipa_jump_func jfunc;
2157 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2158 return;
2160 anc_offset = 0;
2161 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2162 gcc_assert (index >= 0);
2163 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2164 call, &jfunc))
2165 return;
2167 else
2169 struct ipa_jump_func jfunc;
2170 gimple stmt = SSA_NAME_DEF_STMT (obj);
2171 tree expr;
2173 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2174 if (!expr)
2175 return;
2176 index = ipa_get_param_decl_index (info,
2177 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2178 gcc_assert (index >= 0);
2179 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2180 call, &jfunc, anc_offset))
2181 return;
2184 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2185 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2186 ii->offset = anc_offset;
2187 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2188 ii->otr_type = obj_type_ref_class (target);
2189 ii->polymorphic = 1;
2192 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2193 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2194 containing intermediate information about each formal parameter. */
2196 static void
2197 ipa_analyze_call_uses (struct func_body_info *fbi, gcall *call)
2199 tree target = gimple_call_fn (call);
2201 if (!target
2202 || (TREE_CODE (target) != SSA_NAME
2203 && !virtual_method_call_p (target)))
2204 return;
2206 struct cgraph_edge *cs = fbi->node->get_edge (call);
2207 /* If we previously turned the call into a direct call, there is
2208 no need to analyze. */
2209 if (cs && !cs->indirect_unknown_callee)
2210 return;
2212 if (cs->indirect_info->polymorphic && flag_devirtualize)
2214 tree instance;
2215 tree target = gimple_call_fn (call);
2216 ipa_polymorphic_call_context context (current_function_decl,
2217 target, call, &instance);
2219 gcc_checking_assert (cs->indirect_info->otr_type
2220 == obj_type_ref_class (target));
2221 gcc_checking_assert (cs->indirect_info->otr_token
2222 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2224 cs->indirect_info->vptr_changed
2225 = !context.get_dynamic_type (instance,
2226 OBJ_TYPE_REF_OBJECT (target),
2227 obj_type_ref_class (target), call);
2228 cs->indirect_info->context = context;
2231 if (TREE_CODE (target) == SSA_NAME)
2232 ipa_analyze_indirect_call_uses (fbi, call, target);
2233 else if (virtual_method_call_p (target))
2234 ipa_analyze_virtual_call_uses (fbi, call, target);
2238 /* Analyze the call statement STMT with respect to formal parameters (described
2239 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2240 formal parameters are called. */
2242 static void
2243 ipa_analyze_stmt_uses (struct func_body_info *fbi, gimple stmt)
2245 if (is_gimple_call (stmt))
2246 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2249 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2250 If OP is a parameter declaration, mark it as used in the info structure
2251 passed in DATA. */
2253 static bool
2254 visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
2256 struct ipa_node_params *info = (struct ipa_node_params *) data;
2258 op = get_base_address (op);
2259 if (op
2260 && TREE_CODE (op) == PARM_DECL)
2262 int index = ipa_get_param_decl_index (info, op);
2263 gcc_assert (index >= 0);
2264 ipa_set_param_used (info, index, true);
2267 return false;
2270 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2271 the findings in various structures of the associated ipa_node_params
2272 structure, such as parameter flags, notes etc. FBI holds various data about
2273 the function being analyzed. */
2275 static void
2276 ipa_analyze_params_uses_in_bb (struct func_body_info *fbi, basic_block bb)
2278 gimple_stmt_iterator gsi;
2279 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2281 gimple stmt = gsi_stmt (gsi);
2283 if (is_gimple_debug (stmt))
2284 continue;
2286 ipa_analyze_stmt_uses (fbi, stmt);
2287 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2288 visit_ref_for_mod_analysis,
2289 visit_ref_for_mod_analysis,
2290 visit_ref_for_mod_analysis);
2292 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2293 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2294 visit_ref_for_mod_analysis,
2295 visit_ref_for_mod_analysis,
2296 visit_ref_for_mod_analysis);
2299 /* Calculate controlled uses of parameters of NODE. */
2301 static void
2302 ipa_analyze_controlled_uses (struct cgraph_node *node)
2304 struct ipa_node_params *info = IPA_NODE_REF (node);
2306 for (int i = 0; i < ipa_get_param_count (info); i++)
2308 tree parm = ipa_get_param (info, i);
2309 int controlled_uses = 0;
2311 /* For SSA regs see if parameter is used. For non-SSA we compute
2312 the flag during modification analysis. */
2313 if (is_gimple_reg (parm))
2315 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2316 parm);
2317 if (ddef && !has_zero_uses (ddef))
2319 imm_use_iterator imm_iter;
2320 use_operand_p use_p;
2322 ipa_set_param_used (info, i, true);
2323 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2324 if (!is_gimple_call (USE_STMT (use_p)))
2326 if (!is_gimple_debug (USE_STMT (use_p)))
2328 controlled_uses = IPA_UNDESCRIBED_USE;
2329 break;
2332 else
2333 controlled_uses++;
2335 else
2336 controlled_uses = 0;
2338 else
2339 controlled_uses = IPA_UNDESCRIBED_USE;
2340 ipa_set_controlled_uses (info, i, controlled_uses);
2344 /* Free stuff in BI. */
2346 static void
2347 free_ipa_bb_info (struct ipa_bb_info *bi)
2349 bi->cg_edges.release ();
2350 bi->param_aa_statuses.release ();
2353 /* Dominator walker driving the analysis. */
2355 class analysis_dom_walker : public dom_walker
2357 public:
2358 analysis_dom_walker (struct func_body_info *fbi)
2359 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2361 virtual void before_dom_children (basic_block);
2363 private:
2364 struct func_body_info *m_fbi;
2367 void
2368 analysis_dom_walker::before_dom_children (basic_block bb)
2370 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2371 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2374 /* Initialize the array describing properties of of formal parameters
2375 of NODE, analyze their uses and compute jump functions associated
2376 with actual arguments of calls from within NODE. */
2378 void
2379 ipa_analyze_node (struct cgraph_node *node)
2381 struct func_body_info fbi;
2382 struct ipa_node_params *info;
2384 ipa_check_create_node_params ();
2385 ipa_check_create_edge_args ();
2386 info = IPA_NODE_REF (node);
2388 if (info->analysis_done)
2389 return;
2390 info->analysis_done = 1;
2392 if (ipa_func_spec_opts_forbid_analysis_p (node))
2394 for (int i = 0; i < ipa_get_param_count (info); i++)
2396 ipa_set_param_used (info, i, true);
2397 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2399 return;
2402 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2403 push_cfun (func);
2404 calculate_dominance_info (CDI_DOMINATORS);
2405 ipa_initialize_node_params (node);
2406 ipa_analyze_controlled_uses (node);
2408 fbi.node = node;
2409 fbi.info = IPA_NODE_REF (node);
2410 fbi.bb_infos = vNULL;
2411 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2412 fbi.param_count = ipa_get_param_count (info);
2413 fbi.aa_walked = 0;
2415 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2417 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2418 bi->cg_edges.safe_push (cs);
2421 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2423 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2424 bi->cg_edges.safe_push (cs);
2427 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2429 int i;
2430 struct ipa_bb_info *bi;
2431 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
2432 free_ipa_bb_info (bi);
2433 fbi.bb_infos.release ();
2434 free_dominance_info (CDI_DOMINATORS);
2435 pop_cfun ();
2438 /* Update the jump functions associated with call graph edge E when the call
2439 graph edge CS is being inlined, assuming that E->caller is already (possibly
2440 indirectly) inlined into CS->callee and that E has not been inlined. */
2442 static void
2443 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2444 struct cgraph_edge *e)
2446 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2447 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2448 int count = ipa_get_cs_argument_count (args);
2449 int i;
2451 for (i = 0; i < count; i++)
2453 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2454 struct ipa_polymorphic_call_context *dst_ctx
2455 = ipa_get_ith_polymorhic_call_context (args, i);
2457 if (dst->type == IPA_JF_ANCESTOR)
2459 struct ipa_jump_func *src;
2460 int dst_fid = dst->value.ancestor.formal_id;
2461 struct ipa_polymorphic_call_context *src_ctx
2462 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2464 /* Variable number of arguments can cause havoc if we try to access
2465 one that does not exist in the inlined edge. So make sure we
2466 don't. */
2467 if (dst_fid >= ipa_get_cs_argument_count (top))
2469 ipa_set_jf_unknown (dst);
2470 continue;
2473 src = ipa_get_ith_jump_func (top, dst_fid);
2475 if (src_ctx && !src_ctx->useless_p ())
2477 struct ipa_polymorphic_call_context ctx = *src_ctx;
2479 /* TODO: Make type preserved safe WRT contexts. */
2480 if (!ipa_get_jf_ancestor_type_preserved (dst))
2481 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2482 ctx.offset_by (dst->value.ancestor.offset);
2483 if (!ctx.useless_p ())
2485 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2486 count);
2487 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2489 dst_ctx->combine_with (ctx);
2492 if (src->agg.items
2493 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2495 struct ipa_agg_jf_item *item;
2496 int j;
2498 /* Currently we do not produce clobber aggregate jump functions,
2499 replace with merging when we do. */
2500 gcc_assert (!dst->agg.items);
2502 dst->agg.items = vec_safe_copy (src->agg.items);
2503 dst->agg.by_ref = src->agg.by_ref;
2504 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2505 item->offset -= dst->value.ancestor.offset;
2508 if (src->type == IPA_JF_PASS_THROUGH
2509 && src->value.pass_through.operation == NOP_EXPR)
2511 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2512 dst->value.ancestor.agg_preserved &=
2513 src->value.pass_through.agg_preserved;
2515 else if (src->type == IPA_JF_ANCESTOR)
2517 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2518 dst->value.ancestor.offset += src->value.ancestor.offset;
2519 dst->value.ancestor.agg_preserved &=
2520 src->value.ancestor.agg_preserved;
2522 else
2523 ipa_set_jf_unknown (dst);
2525 else if (dst->type == IPA_JF_PASS_THROUGH)
2527 struct ipa_jump_func *src;
2528 /* We must check range due to calls with variable number of arguments
2529 and we cannot combine jump functions with operations. */
2530 if (dst->value.pass_through.operation == NOP_EXPR
2531 && (dst->value.pass_through.formal_id
2532 < ipa_get_cs_argument_count (top)))
2534 int dst_fid = dst->value.pass_through.formal_id;
2535 src = ipa_get_ith_jump_func (top, dst_fid);
2536 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2537 struct ipa_polymorphic_call_context *src_ctx
2538 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2540 if (src_ctx && !src_ctx->useless_p ())
2542 struct ipa_polymorphic_call_context ctx = *src_ctx;
2544 /* TODO: Make type preserved safe WRT contexts. */
2545 if (!ipa_get_jf_pass_through_type_preserved (dst))
2546 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2547 if (!ctx.useless_p ())
2549 if (!dst_ctx)
2551 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2552 count);
2553 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2555 dst_ctx->combine_with (ctx);
2558 switch (src->type)
2560 case IPA_JF_UNKNOWN:
2561 ipa_set_jf_unknown (dst);
2562 break;
2563 case IPA_JF_CONST:
2564 ipa_set_jf_cst_copy (dst, src);
2565 break;
2567 case IPA_JF_PASS_THROUGH:
2569 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2570 enum tree_code operation;
2571 operation = ipa_get_jf_pass_through_operation (src);
2573 if (operation == NOP_EXPR)
2575 bool agg_p;
2576 agg_p = dst_agg_p
2577 && ipa_get_jf_pass_through_agg_preserved (src);
2578 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2580 else
2582 tree operand = ipa_get_jf_pass_through_operand (src);
2583 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2584 operation);
2586 break;
2588 case IPA_JF_ANCESTOR:
2590 bool agg_p;
2591 agg_p = dst_agg_p
2592 && ipa_get_jf_ancestor_agg_preserved (src);
2593 ipa_set_ancestor_jf (dst,
2594 ipa_get_jf_ancestor_offset (src),
2595 ipa_get_jf_ancestor_formal_id (src),
2596 agg_p);
2597 break;
2599 default:
2600 gcc_unreachable ();
2603 if (src->agg.items
2604 && (dst_agg_p || !src->agg.by_ref))
2606 /* Currently we do not produce clobber aggregate jump
2607 functions, replace with merging when we do. */
2608 gcc_assert (!dst->agg.items);
2610 dst->agg.by_ref = src->agg.by_ref;
2611 dst->agg.items = vec_safe_copy (src->agg.items);
2614 else
2615 ipa_set_jf_unknown (dst);
2620 /* If TARGET is an addr_expr of a function declaration, make it the
2621 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2622 Otherwise, return NULL. */
2624 struct cgraph_edge *
2625 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2626 bool speculative)
2628 struct cgraph_node *callee;
2629 struct inline_edge_summary *es = inline_edge_summary (ie);
2630 bool unreachable = false;
2632 if (TREE_CODE (target) == ADDR_EXPR)
2633 target = TREE_OPERAND (target, 0);
2634 if (TREE_CODE (target) != FUNCTION_DECL)
2636 target = canonicalize_constructor_val (target, NULL);
2637 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2639 if (ie->indirect_info->member_ptr)
2640 /* Member pointer call that goes through a VMT lookup. */
2641 return NULL;
2643 if (dump_enabled_p ())
2645 location_t loc = gimple_location_safe (ie->call_stmt);
2646 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2647 "discovered direct call to non-function in %s/%i, "
2648 "making it __builtin_unreachable\n",
2649 ie->caller->name (), ie->caller->order);
2652 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2653 callee = cgraph_node::get_create (target);
2654 unreachable = true;
2656 else
2657 callee = cgraph_node::get (target);
2659 else
2660 callee = cgraph_node::get (target);
2662 /* Because may-edges are not explicitely represented and vtable may be external,
2663 we may create the first reference to the object in the unit. */
2664 if (!callee || callee->global.inlined_to)
2667 /* We are better to ensure we can refer to it.
2668 In the case of static functions we are out of luck, since we already
2669 removed its body. In the case of public functions we may or may
2670 not introduce the reference. */
2671 if (!canonicalize_constructor_val (target, NULL)
2672 || !TREE_PUBLIC (target))
2674 if (dump_file)
2675 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2676 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2677 xstrdup_for_dump (ie->caller->name ()),
2678 ie->caller->order,
2679 xstrdup_for_dump (ie->callee->name ()),
2680 ie->callee->order);
2681 return NULL;
2683 callee = cgraph_node::get_create (target);
2686 /* If the edge is already speculated. */
2687 if (speculative && ie->speculative)
2689 struct cgraph_edge *e2;
2690 struct ipa_ref *ref;
2691 ie->speculative_call_info (e2, ie, ref);
2692 if (e2->callee->ultimate_alias_target ()
2693 != callee->ultimate_alias_target ())
2695 if (dump_file)
2696 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2697 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2698 xstrdup_for_dump (ie->caller->name ()),
2699 ie->caller->order,
2700 xstrdup_for_dump (callee->name ()),
2701 callee->order,
2702 xstrdup_for_dump (e2->callee->name ()),
2703 e2->callee->order);
2705 else
2707 if (dump_file)
2708 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2709 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2710 xstrdup_for_dump (ie->caller->name ()),
2711 ie->caller->order,
2712 xstrdup_for_dump (callee->name ()),
2713 callee->order);
2715 return NULL;
2718 if (!dbg_cnt (devirt))
2719 return NULL;
2721 ipa_check_create_node_params ();
2723 /* We can not make edges to inline clones. It is bug that someone removed
2724 the cgraph node too early. */
2725 gcc_assert (!callee->global.inlined_to);
2727 if (dump_file && !unreachable)
2729 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2730 "(%s/%i -> %s/%i), for stmt ",
2731 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2732 speculative ? "speculative" : "known",
2733 xstrdup_for_dump (ie->caller->name ()),
2734 ie->caller->order,
2735 xstrdup_for_dump (callee->name ()),
2736 callee->order);
2737 if (ie->call_stmt)
2738 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2739 else
2740 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2742 if (dump_enabled_p ())
2744 location_t loc = gimple_location_safe (ie->call_stmt);
2746 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2747 "converting indirect call in %s to direct call to %s\n",
2748 ie->caller->name (), callee->name ());
2750 if (!speculative)
2752 struct cgraph_edge *orig = ie;
2753 ie = ie->make_direct (callee);
2754 /* If we resolved speculative edge the cost is already up to date
2755 for direct call (adjusted by inline_edge_duplication_hook). */
2756 if (ie == orig)
2758 es = inline_edge_summary (ie);
2759 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2760 - eni_size_weights.call_cost);
2761 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2762 - eni_time_weights.call_cost);
2765 else
2767 if (!callee->can_be_discarded_p ())
2769 cgraph_node *alias;
2770 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2771 if (alias)
2772 callee = alias;
2774 /* make_speculative will update ie's cost to direct call cost. */
2775 ie = ie->make_speculative
2776 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2779 return ie;
2782 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2783 return NULL if there is not any. BY_REF specifies whether the value has to
2784 be passed by reference or by value. */
2786 tree
2787 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2788 HOST_WIDE_INT offset, bool by_ref)
2790 struct ipa_agg_jf_item *item;
2791 int i;
2793 if (by_ref != agg->by_ref)
2794 return NULL;
2796 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2797 if (item->offset == offset)
2799 /* Currently we do not have clobber values, return NULL for them once
2800 we do. */
2801 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2802 return item->value;
2804 return NULL;
2807 /* Remove a reference to SYMBOL from the list of references of a node given by
2808 reference description RDESC. Return true if the reference has been
2809 successfully found and removed. */
2811 static bool
2812 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
2814 struct ipa_ref *to_del;
2815 struct cgraph_edge *origin;
2817 origin = rdesc->cs;
2818 if (!origin)
2819 return false;
2820 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
2821 origin->lto_stmt_uid);
2822 if (!to_del)
2823 return false;
2825 to_del->remove_reference ();
2826 if (dump_file)
2827 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2828 xstrdup_for_dump (origin->caller->name ()),
2829 origin->caller->order, xstrdup_for_dump (symbol->name ()));
2830 return true;
2833 /* If JFUNC has a reference description with refcount different from
2834 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2835 NULL. JFUNC must be a constant jump function. */
2837 static struct ipa_cst_ref_desc *
2838 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2840 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2841 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2842 return rdesc;
2843 else
2844 return NULL;
2847 /* If the value of constant jump function JFUNC is an address of a function
2848 declaration, return the associated call graph node. Otherwise return
2849 NULL. */
2851 static cgraph_node *
2852 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2854 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2855 tree cst = ipa_get_jf_constant (jfunc);
2856 if (TREE_CODE (cst) != ADDR_EXPR
2857 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2858 return NULL;
2860 return cgraph_node::get (TREE_OPERAND (cst, 0));
2864 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2865 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2866 the edge specified in the rdesc. Return false if either the symbol or the
2867 reference could not be found, otherwise return true. */
2869 static bool
2870 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2872 struct ipa_cst_ref_desc *rdesc;
2873 if (jfunc->type == IPA_JF_CONST
2874 && (rdesc = jfunc_rdesc_usable (jfunc))
2875 && --rdesc->refcount == 0)
2877 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
2878 if (!symbol)
2879 return false;
2881 return remove_described_reference (symbol, rdesc);
2883 return true;
2886 /* Try to find a destination for indirect edge IE that corresponds to a simple
2887 call or a call of a member function pointer and where the destination is a
2888 pointer formal parameter described by jump function JFUNC. If it can be
2889 determined, return the newly direct edge, otherwise return NULL.
2890 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2892 static struct cgraph_edge *
2893 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
2894 struct ipa_jump_func *jfunc,
2895 struct ipa_node_params *new_root_info)
2897 struct cgraph_edge *cs;
2898 tree target;
2899 bool agg_contents = ie->indirect_info->agg_contents;
2901 if (ie->indirect_info->agg_contents)
2902 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2903 ie->indirect_info->offset,
2904 ie->indirect_info->by_ref);
2905 else
2906 target = ipa_value_from_jfunc (new_root_info, jfunc);
2907 if (!target)
2908 return NULL;
2909 cs = ipa_make_edge_direct_to_target (ie, target);
2911 if (cs && !agg_contents)
2913 bool ok;
2914 gcc_checking_assert (cs->callee
2915 && (cs != ie
2916 || jfunc->type != IPA_JF_CONST
2917 || !cgraph_node_for_jfunc (jfunc)
2918 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2919 ok = try_decrement_rdesc_refcount (jfunc);
2920 gcc_checking_assert (ok);
2923 return cs;
2926 /* Return the target to be used in cases of impossible devirtualization. IE
2927 and target (the latter can be NULL) are dumped when dumping is enabled. */
2929 tree
2930 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
2932 if (dump_file)
2934 if (target)
2935 fprintf (dump_file,
2936 "Type inconsistent devirtualization: %s/%i->%s\n",
2937 ie->caller->name (), ie->caller->order,
2938 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
2939 else
2940 fprintf (dump_file,
2941 "No devirtualization target in %s/%i\n",
2942 ie->caller->name (), ie->caller->order);
2944 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2945 cgraph_node::get_create (new_target);
2946 return new_target;
2949 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2950 call based on a formal parameter which is described by jump function JFUNC
2951 and if it can be determined, make it direct and return the direct edge.
2952 Otherwise, return NULL. CTX describes the polymorphic context that the
2953 parameter the call is based on brings along with it. */
2955 static struct cgraph_edge *
2956 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
2957 struct ipa_jump_func *jfunc,
2958 struct ipa_polymorphic_call_context ctx)
2960 tree target = NULL;
2961 bool speculative = false;
2963 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
2964 return NULL;
2966 gcc_assert (!ie->indirect_info->by_ref);
2968 /* Try to do lookup via known virtual table pointer value. */
2969 if (!ie->indirect_info->vptr_changed
2970 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
2972 tree vtable;
2973 unsigned HOST_WIDE_INT offset;
2974 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
2975 ie->indirect_info->offset,
2976 true);
2977 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
2979 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
2980 vtable, offset);
2981 if (t)
2983 if ((TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
2984 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
2985 || !possible_polymorphic_call_target_p
2986 (ie, cgraph_node::get (t)))
2988 /* Do not speculate builtin_unreachable, it is stupid! */
2989 if (!ie->indirect_info->vptr_changed)
2990 target = ipa_impossible_devirt_target (ie, target);
2992 else
2994 target = t;
2995 speculative = ie->indirect_info->vptr_changed;
3001 ipa_polymorphic_call_context ie_context (ie);
3002 vec <cgraph_node *>targets;
3003 bool final;
3005 ctx.offset_by (ie->indirect_info->offset);
3006 if (ie->indirect_info->vptr_changed)
3007 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3008 ie->indirect_info->otr_type);
3009 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3010 targets = possible_polymorphic_call_targets
3011 (ie->indirect_info->otr_type,
3012 ie->indirect_info->otr_token,
3013 ctx, &final);
3014 if (final && targets.length () <= 1)
3016 speculative = false;
3017 if (targets.length () == 1)
3018 target = targets[0]->decl;
3019 else
3020 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3022 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3023 && !ie->speculative && ie->maybe_hot_p ())
3025 cgraph_node *n;
3026 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3027 ie->indirect_info->otr_token,
3028 ie->indirect_info->context);
3029 if (n)
3031 target = n->decl;
3032 speculative = true;
3036 if (target)
3038 if (!possible_polymorphic_call_target_p
3039 (ie, cgraph_node::get_create (target)))
3041 if (speculative)
3042 return NULL;
3043 target = ipa_impossible_devirt_target (ie, target);
3045 return ipa_make_edge_direct_to_target (ie, target, speculative);
3047 else
3048 return NULL;
3051 /* Update the param called notes associated with NODE when CS is being inlined,
3052 assuming NODE is (potentially indirectly) inlined into CS->callee.
3053 Moreover, if the callee is discovered to be constant, create a new cgraph
3054 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3055 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3057 static bool
3058 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3059 struct cgraph_node *node,
3060 vec<cgraph_edge *> *new_edges)
3062 struct ipa_edge_args *top;
3063 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3064 struct ipa_node_params *new_root_info;
3065 bool res = false;
3067 ipa_check_create_edge_args ();
3068 top = IPA_EDGE_REF (cs);
3069 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3070 ? cs->caller->global.inlined_to
3071 : cs->caller);
3073 for (ie = node->indirect_calls; ie; ie = next_ie)
3075 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3076 struct ipa_jump_func *jfunc;
3077 int param_index;
3078 cgraph_node *spec_target = NULL;
3080 next_ie = ie->next_callee;
3082 if (ici->param_index == -1)
3083 continue;
3085 /* We must check range due to calls with variable number of arguments: */
3086 if (ici->param_index >= ipa_get_cs_argument_count (top))
3088 ici->param_index = -1;
3089 continue;
3092 param_index = ici->param_index;
3093 jfunc = ipa_get_ith_jump_func (top, param_index);
3095 if (ie->speculative)
3097 struct cgraph_edge *de;
3098 struct ipa_ref *ref;
3099 ie->speculative_call_info (de, ie, ref);
3100 spec_target = de->callee;
3103 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3104 new_direct_edge = NULL;
3105 else if (ici->polymorphic)
3107 ipa_polymorphic_call_context ctx;
3108 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3109 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3111 else
3112 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3113 new_root_info);
3114 /* If speculation was removed, then we need to do nothing. */
3115 if (new_direct_edge && new_direct_edge != ie
3116 && new_direct_edge->callee == spec_target)
3118 new_direct_edge->indirect_inlining_edge = 1;
3119 top = IPA_EDGE_REF (cs);
3120 res = true;
3121 if (!new_direct_edge->speculative)
3122 continue;
3124 else if (new_direct_edge)
3126 new_direct_edge->indirect_inlining_edge = 1;
3127 if (new_direct_edge->call_stmt)
3128 new_direct_edge->call_stmt_cannot_inline_p
3129 = !gimple_check_call_matching_types (
3130 new_direct_edge->call_stmt,
3131 new_direct_edge->callee->decl, false);
3132 if (new_edges)
3134 new_edges->safe_push (new_direct_edge);
3135 res = true;
3137 top = IPA_EDGE_REF (cs);
3138 /* If speculative edge was introduced we still need to update
3139 call info of the indirect edge. */
3140 if (!new_direct_edge->speculative)
3141 continue;
3143 if (jfunc->type == IPA_JF_PASS_THROUGH
3144 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3146 if (ici->agg_contents
3147 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3148 && !ici->polymorphic)
3149 ici->param_index = -1;
3150 else
3152 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3153 if (ici->polymorphic
3154 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3155 ici->vptr_changed = true;
3158 else if (jfunc->type == IPA_JF_ANCESTOR)
3160 if (ici->agg_contents
3161 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3162 && !ici->polymorphic)
3163 ici->param_index = -1;
3164 else
3166 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3167 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3168 if (ici->polymorphic
3169 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3170 ici->vptr_changed = true;
3173 else
3174 /* Either we can find a destination for this edge now or never. */
3175 ici->param_index = -1;
3178 return res;
3181 /* Recursively traverse subtree of NODE (including node) made of inlined
3182 cgraph_edges when CS has been inlined and invoke
3183 update_indirect_edges_after_inlining on all nodes and
3184 update_jump_functions_after_inlining on all non-inlined edges that lead out
3185 of this subtree. Newly discovered indirect edges will be added to
3186 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3187 created. */
3189 static bool
3190 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3191 struct cgraph_node *node,
3192 vec<cgraph_edge *> *new_edges)
3194 struct cgraph_edge *e;
3195 bool res;
3197 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3199 for (e = node->callees; e; e = e->next_callee)
3200 if (!e->inline_failed)
3201 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3202 else
3203 update_jump_functions_after_inlining (cs, e);
3204 for (e = node->indirect_calls; e; e = e->next_callee)
3205 update_jump_functions_after_inlining (cs, e);
3207 return res;
3210 /* Combine two controlled uses counts as done during inlining. */
3212 static int
3213 combine_controlled_uses_counters (int c, int d)
3215 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3216 return IPA_UNDESCRIBED_USE;
3217 else
3218 return c + d - 1;
3221 /* Propagate number of controlled users from CS->caleee to the new root of the
3222 tree of inlined nodes. */
3224 static void
3225 propagate_controlled_uses (struct cgraph_edge *cs)
3227 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3228 struct cgraph_node *new_root = cs->caller->global.inlined_to
3229 ? cs->caller->global.inlined_to : cs->caller;
3230 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3231 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3232 int count, i;
3234 count = MIN (ipa_get_cs_argument_count (args),
3235 ipa_get_param_count (old_root_info));
3236 for (i = 0; i < count; i++)
3238 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3239 struct ipa_cst_ref_desc *rdesc;
3241 if (jf->type == IPA_JF_PASS_THROUGH)
3243 int src_idx, c, d;
3244 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3245 c = ipa_get_controlled_uses (new_root_info, src_idx);
3246 d = ipa_get_controlled_uses (old_root_info, i);
3248 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3249 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3250 c = combine_controlled_uses_counters (c, d);
3251 ipa_set_controlled_uses (new_root_info, src_idx, c);
3252 if (c == 0 && new_root_info->ipcp_orig_node)
3254 struct cgraph_node *n;
3255 struct ipa_ref *ref;
3256 tree t = new_root_info->known_csts[src_idx];
3258 if (t && TREE_CODE (t) == ADDR_EXPR
3259 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3260 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3261 && (ref = new_root->find_reference (n, NULL, 0)))
3263 if (dump_file)
3264 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3265 "reference from %s/%i to %s/%i.\n",
3266 xstrdup_for_dump (new_root->name ()),
3267 new_root->order,
3268 xstrdup_for_dump (n->name ()), n->order);
3269 ref->remove_reference ();
3273 else if (jf->type == IPA_JF_CONST
3274 && (rdesc = jfunc_rdesc_usable (jf)))
3276 int d = ipa_get_controlled_uses (old_root_info, i);
3277 int c = rdesc->refcount;
3278 rdesc->refcount = combine_controlled_uses_counters (c, d);
3279 if (rdesc->refcount == 0)
3281 tree cst = ipa_get_jf_constant (jf);
3282 struct cgraph_node *n;
3283 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3284 && TREE_CODE (TREE_OPERAND (cst, 0))
3285 == FUNCTION_DECL);
3286 n = cgraph_node::get (TREE_OPERAND (cst, 0));
3287 if (n)
3289 struct cgraph_node *clone;
3290 bool ok;
3291 ok = remove_described_reference (n, rdesc);
3292 gcc_checking_assert (ok);
3294 clone = cs->caller;
3295 while (clone->global.inlined_to
3296 && clone != rdesc->cs->caller
3297 && IPA_NODE_REF (clone)->ipcp_orig_node)
3299 struct ipa_ref *ref;
3300 ref = clone->find_reference (n, NULL, 0);
3301 if (ref)
3303 if (dump_file)
3304 fprintf (dump_file, "ipa-prop: Removing "
3305 "cloning-created reference "
3306 "from %s/%i to %s/%i.\n",
3307 xstrdup_for_dump (clone->name ()),
3308 clone->order,
3309 xstrdup_for_dump (n->name ()),
3310 n->order);
3311 ref->remove_reference ();
3313 clone = clone->callers->caller;
3320 for (i = ipa_get_param_count (old_root_info);
3321 i < ipa_get_cs_argument_count (args);
3322 i++)
3324 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3326 if (jf->type == IPA_JF_CONST)
3328 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3329 if (rdesc)
3330 rdesc->refcount = IPA_UNDESCRIBED_USE;
3332 else if (jf->type == IPA_JF_PASS_THROUGH)
3333 ipa_set_controlled_uses (new_root_info,
3334 jf->value.pass_through.formal_id,
3335 IPA_UNDESCRIBED_USE);
3339 /* Update jump functions and call note functions on inlining the call site CS.
3340 CS is expected to lead to a node already cloned by
3341 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3342 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3343 created. */
3345 bool
3346 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3347 vec<cgraph_edge *> *new_edges)
3349 bool changed;
3350 /* Do nothing if the preparation phase has not been carried out yet
3351 (i.e. during early inlining). */
3352 if (!ipa_node_params_sum)
3353 return false;
3354 gcc_assert (ipa_edge_args_vector);
3356 propagate_controlled_uses (cs);
3357 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3359 return changed;
3362 /* Frees all dynamically allocated structures that the argument info points
3363 to. */
3365 void
3366 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3368 vec_free (args->jump_functions);
3369 memset (args, 0, sizeof (*args));
3372 /* Free all ipa_edge structures. */
3374 void
3375 ipa_free_all_edge_args (void)
3377 int i;
3378 struct ipa_edge_args *args;
3380 if (!ipa_edge_args_vector)
3381 return;
3383 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
3384 ipa_free_edge_args_substructures (args);
3386 vec_free (ipa_edge_args_vector);
3389 /* Frees all dynamically allocated structures that the param info points
3390 to. */
3392 ipa_node_params::~ipa_node_params ()
3394 descriptors.release ();
3395 free (lattices);
3396 /* Lattice values and their sources are deallocated with their alocation
3397 pool. */
3398 known_contexts.release ();
3400 lattices = NULL;
3401 ipcp_orig_node = NULL;
3402 analysis_done = 0;
3403 node_enqueued = 0;
3404 do_clone_for_all_contexts = 0;
3405 is_all_contexts_clone = 0;
3406 node_dead = 0;
3409 /* Free all ipa_node_params structures. */
3411 void
3412 ipa_free_all_node_params (void)
3414 delete ipa_node_params_sum;
3415 ipa_node_params_sum = NULL;
3418 /* Grow ipcp_transformations if necessary. */
3420 void
3421 ipcp_grow_transformations_if_necessary (void)
3423 if (vec_safe_length (ipcp_transformations)
3424 <= (unsigned) symtab->cgraph_max_uid)
3425 vec_safe_grow_cleared (ipcp_transformations, symtab->cgraph_max_uid + 1);
3428 /* Set the aggregate replacements of NODE to be AGGVALS. */
3430 void
3431 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3432 struct ipa_agg_replacement_value *aggvals)
3434 ipcp_grow_transformations_if_necessary ();
3435 (*ipcp_transformations)[node->uid].agg_values = aggvals;
3438 /* Hook that is called by cgraph.c when an edge is removed. */
3440 static void
3441 ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
3443 struct ipa_edge_args *args;
3445 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3446 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
3447 return;
3449 args = IPA_EDGE_REF (cs);
3450 if (args->jump_functions)
3452 struct ipa_jump_func *jf;
3453 int i;
3454 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3456 struct ipa_cst_ref_desc *rdesc;
3457 try_decrement_rdesc_refcount (jf);
3458 if (jf->type == IPA_JF_CONST
3459 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3460 && rdesc->cs == cs)
3461 rdesc->cs = NULL;
3465 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
3468 /* Hook that is called by cgraph.c when an edge is duplicated. */
3470 static void
3471 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
3472 void *)
3474 struct ipa_edge_args *old_args, *new_args;
3475 unsigned int i;
3477 ipa_check_create_edge_args ();
3479 old_args = IPA_EDGE_REF (src);
3480 new_args = IPA_EDGE_REF (dst);
3482 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3483 if (old_args->polymorphic_call_contexts)
3484 new_args->polymorphic_call_contexts
3485 = vec_safe_copy (old_args->polymorphic_call_contexts);
3487 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3489 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3490 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3492 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3494 if (src_jf->type == IPA_JF_CONST)
3496 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3498 if (!src_rdesc)
3499 dst_jf->value.constant.rdesc = NULL;
3500 else if (src->caller == dst->caller)
3502 struct ipa_ref *ref;
3503 symtab_node *n = cgraph_node_for_jfunc (src_jf);
3504 gcc_checking_assert (n);
3505 ref = src->caller->find_reference (n, src->call_stmt,
3506 src->lto_stmt_uid);
3507 gcc_checking_assert (ref);
3508 dst->caller->clone_reference (ref, ref->stmt);
3510 gcc_checking_assert (ipa_refdesc_pool);
3511 struct ipa_cst_ref_desc *dst_rdesc
3512 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3513 dst_rdesc->cs = dst;
3514 dst_rdesc->refcount = src_rdesc->refcount;
3515 dst_rdesc->next_duplicate = NULL;
3516 dst_jf->value.constant.rdesc = dst_rdesc;
3518 else if (src_rdesc->cs == src)
3520 struct ipa_cst_ref_desc *dst_rdesc;
3521 gcc_checking_assert (ipa_refdesc_pool);
3522 dst_rdesc
3523 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3524 dst_rdesc->cs = dst;
3525 dst_rdesc->refcount = src_rdesc->refcount;
3526 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3527 src_rdesc->next_duplicate = dst_rdesc;
3528 dst_jf->value.constant.rdesc = dst_rdesc;
3530 else
3532 struct ipa_cst_ref_desc *dst_rdesc;
3533 /* This can happen during inlining, when a JFUNC can refer to a
3534 reference taken in a function up in the tree of inline clones.
3535 We need to find the duplicate that refers to our tree of
3536 inline clones. */
3538 gcc_assert (dst->caller->global.inlined_to);
3539 for (dst_rdesc = src_rdesc->next_duplicate;
3540 dst_rdesc;
3541 dst_rdesc = dst_rdesc->next_duplicate)
3543 struct cgraph_node *top;
3544 top = dst_rdesc->cs->caller->global.inlined_to
3545 ? dst_rdesc->cs->caller->global.inlined_to
3546 : dst_rdesc->cs->caller;
3547 if (dst->caller->global.inlined_to == top)
3548 break;
3550 gcc_assert (dst_rdesc);
3551 dst_jf->value.constant.rdesc = dst_rdesc;
3554 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3555 && src->caller == dst->caller)
3557 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3558 ? dst->caller->global.inlined_to : dst->caller;
3559 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3560 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3562 int c = ipa_get_controlled_uses (root_info, idx);
3563 if (c != IPA_UNDESCRIBED_USE)
3565 c++;
3566 ipa_set_controlled_uses (root_info, idx, c);
3572 /* Analyze newly added function into callgraph. */
3574 static void
3575 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3577 if (node->has_gimple_body_p ())
3578 ipa_analyze_node (node);
3581 /* Hook that is called by summary when a node is duplicated. */
3583 void
3584 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3585 ipa_node_params *old_info,
3586 ipa_node_params *new_info)
3588 ipa_agg_replacement_value *old_av, *new_av;
3590 new_info->descriptors = old_info->descriptors.copy ();
3591 new_info->lattices = NULL;
3592 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3594 new_info->analysis_done = old_info->analysis_done;
3595 new_info->node_enqueued = old_info->node_enqueued;
3597 old_av = ipa_get_agg_replacements_for_node (src);
3598 if (old_av)
3600 new_av = NULL;
3601 while (old_av)
3603 struct ipa_agg_replacement_value *v;
3605 v = ggc_alloc<ipa_agg_replacement_value> ();
3606 memcpy (v, old_av, sizeof (*v));
3607 v->next = new_av;
3608 new_av = v;
3609 old_av = old_av->next;
3611 ipa_set_node_agg_value_chain (dst, new_av);
3614 ipcp_transformation_summary *src_trans = ipcp_get_transformation_summary (src);
3616 if (src_trans && vec_safe_length (src_trans->alignments) > 0)
3618 ipcp_grow_transformations_if_necessary ();
3619 src_trans = ipcp_get_transformation_summary (src);
3620 const vec<ipa_alignment, va_gc> *src_alignments = src_trans->alignments;
3621 vec<ipa_alignment, va_gc> *&dst_alignments
3622 = ipcp_get_transformation_summary (dst)->alignments;
3623 vec_safe_reserve_exact (dst_alignments, src_alignments->length ());
3624 for (unsigned i = 0; i < src_alignments->length (); ++i)
3625 dst_alignments->quick_push ((*src_alignments)[i]);
3629 /* Register our cgraph hooks if they are not already there. */
3631 void
3632 ipa_register_cgraph_hooks (void)
3634 ipa_check_create_node_params ();
3636 if (!edge_removal_hook_holder)
3637 edge_removal_hook_holder =
3638 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3639 if (!edge_duplication_hook_holder)
3640 edge_duplication_hook_holder =
3641 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3642 function_insertion_hook_holder =
3643 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3646 /* Unregister our cgraph hooks if they are not already there. */
3648 static void
3649 ipa_unregister_cgraph_hooks (void)
3651 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
3652 edge_removal_hook_holder = NULL;
3653 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
3654 edge_duplication_hook_holder = NULL;
3655 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3656 function_insertion_hook_holder = NULL;
3659 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3660 longer needed after ipa-cp. */
3662 void
3663 ipa_free_all_structures_after_ipa_cp (void)
3665 if (!optimize && !in_lto_p)
3667 ipa_free_all_edge_args ();
3668 ipa_free_all_node_params ();
3669 free_alloc_pool (ipcp_sources_pool);
3670 free_alloc_pool (ipcp_cst_values_pool);
3671 free_alloc_pool (ipcp_poly_ctx_values_pool);
3672 free_alloc_pool (ipcp_agg_lattice_pool);
3673 ipa_unregister_cgraph_hooks ();
3674 if (ipa_refdesc_pool)
3675 free_alloc_pool (ipa_refdesc_pool);
3679 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3680 longer needed after indirect inlining. */
3682 void
3683 ipa_free_all_structures_after_iinln (void)
3685 ipa_free_all_edge_args ();
3686 ipa_free_all_node_params ();
3687 ipa_unregister_cgraph_hooks ();
3688 if (ipcp_sources_pool)
3689 free_alloc_pool (ipcp_sources_pool);
3690 if (ipcp_cst_values_pool)
3691 free_alloc_pool (ipcp_cst_values_pool);
3692 if (ipcp_poly_ctx_values_pool)
3693 free_alloc_pool (ipcp_poly_ctx_values_pool);
3694 if (ipcp_agg_lattice_pool)
3695 free_alloc_pool (ipcp_agg_lattice_pool);
3696 if (ipa_refdesc_pool)
3697 free_alloc_pool (ipa_refdesc_pool);
3700 /* Print ipa_tree_map data structures of all functions in the
3701 callgraph to F. */
3703 void
3704 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3706 int i, count;
3707 struct ipa_node_params *info;
3709 if (!node->definition)
3710 return;
3711 info = IPA_NODE_REF (node);
3712 fprintf (f, " function %s/%i parameter descriptors:\n",
3713 node->name (), node->order);
3714 count = ipa_get_param_count (info);
3715 for (i = 0; i < count; i++)
3717 int c;
3719 fprintf (f, " ");
3720 ipa_dump_param (f, info, i);
3721 if (ipa_is_param_used (info, i))
3722 fprintf (f, " used");
3723 c = ipa_get_controlled_uses (info, i);
3724 if (c == IPA_UNDESCRIBED_USE)
3725 fprintf (f, " undescribed_use");
3726 else
3727 fprintf (f, " controlled_uses=%i", c);
3728 fprintf (f, "\n");
3732 /* Print ipa_tree_map data structures of all functions in the
3733 callgraph to F. */
3735 void
3736 ipa_print_all_params (FILE * f)
3738 struct cgraph_node *node;
3740 fprintf (f, "\nFunction parameters:\n");
3741 FOR_EACH_FUNCTION (node)
3742 ipa_print_node_params (f, node);
3745 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3747 vec<tree>
3748 ipa_get_vector_of_formal_parms (tree fndecl)
3750 vec<tree> args;
3751 int count;
3752 tree parm;
3754 gcc_assert (!flag_wpa);
3755 count = count_formal_params (fndecl);
3756 args.create (count);
3757 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3758 args.quick_push (parm);
3760 return args;
3763 /* Return a heap allocated vector containing types of formal parameters of
3764 function type FNTYPE. */
3766 vec<tree>
3767 ipa_get_vector_of_formal_parm_types (tree fntype)
3769 vec<tree> types;
3770 int count = 0;
3771 tree t;
3773 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3774 count++;
3776 types.create (count);
3777 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3778 types.quick_push (TREE_VALUE (t));
3780 return types;
3783 /* Modify the function declaration FNDECL and its type according to the plan in
3784 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3785 to reflect the actual parameters being modified which are determined by the
3786 base_index field. */
3788 void
3789 ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3791 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3792 tree orig_type = TREE_TYPE (fndecl);
3793 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3795 /* The following test is an ugly hack, some functions simply don't have any
3796 arguments in their type. This is probably a bug but well... */
3797 bool care_for_types = (old_arg_types != NULL_TREE);
3798 bool last_parm_void;
3799 vec<tree> otypes;
3800 if (care_for_types)
3802 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3803 == void_type_node);
3804 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3805 if (last_parm_void)
3806 gcc_assert (oparms.length () + 1 == otypes.length ());
3807 else
3808 gcc_assert (oparms.length () == otypes.length ());
3810 else
3812 last_parm_void = false;
3813 otypes.create (0);
3816 int len = adjustments.length ();
3817 tree *link = &DECL_ARGUMENTS (fndecl);
3818 tree new_arg_types = NULL;
3819 for (int i = 0; i < len; i++)
3821 struct ipa_parm_adjustment *adj;
3822 gcc_assert (link);
3824 adj = &adjustments[i];
3825 tree parm;
3826 if (adj->op == IPA_PARM_OP_NEW)
3827 parm = NULL;
3828 else
3829 parm = oparms[adj->base_index];
3830 adj->base = parm;
3832 if (adj->op == IPA_PARM_OP_COPY)
3834 if (care_for_types)
3835 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3836 new_arg_types);
3837 *link = parm;
3838 link = &DECL_CHAIN (parm);
3840 else if (adj->op != IPA_PARM_OP_REMOVE)
3842 tree new_parm;
3843 tree ptype;
3845 if (adj->by_ref)
3846 ptype = build_pointer_type (adj->type);
3847 else
3849 ptype = adj->type;
3850 if (is_gimple_reg_type (ptype))
3852 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
3853 if (TYPE_ALIGN (ptype) < malign)
3854 ptype = build_aligned_type (ptype, malign);
3858 if (care_for_types)
3859 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3861 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3862 ptype);
3863 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
3864 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3865 DECL_ARTIFICIAL (new_parm) = 1;
3866 DECL_ARG_TYPE (new_parm) = ptype;
3867 DECL_CONTEXT (new_parm) = fndecl;
3868 TREE_USED (new_parm) = 1;
3869 DECL_IGNORED_P (new_parm) = 1;
3870 layout_decl (new_parm, 0);
3872 if (adj->op == IPA_PARM_OP_NEW)
3873 adj->base = NULL;
3874 else
3875 adj->base = parm;
3876 adj->new_decl = new_parm;
3878 *link = new_parm;
3879 link = &DECL_CHAIN (new_parm);
3883 *link = NULL_TREE;
3885 tree new_reversed = NULL;
3886 if (care_for_types)
3888 new_reversed = nreverse (new_arg_types);
3889 if (last_parm_void)
3891 if (new_reversed)
3892 TREE_CHAIN (new_arg_types) = void_list_node;
3893 else
3894 new_reversed = void_list_node;
3898 /* Use copy_node to preserve as much as possible from original type
3899 (debug info, attribute lists etc.)
3900 Exception is METHOD_TYPEs must have THIS argument.
3901 When we are asked to remove it, we need to build new FUNCTION_TYPE
3902 instead. */
3903 tree new_type = NULL;
3904 if (TREE_CODE (orig_type) != METHOD_TYPE
3905 || (adjustments[0].op == IPA_PARM_OP_COPY
3906 && adjustments[0].base_index == 0))
3908 new_type = build_distinct_type_copy (orig_type);
3909 TYPE_ARG_TYPES (new_type) = new_reversed;
3911 else
3913 new_type
3914 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3915 new_reversed));
3916 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3917 DECL_VINDEX (fndecl) = NULL_TREE;
3920 /* When signature changes, we need to clear builtin info. */
3921 if (DECL_BUILT_IN (fndecl))
3923 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3924 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3927 TREE_TYPE (fndecl) = new_type;
3928 DECL_VIRTUAL_P (fndecl) = 0;
3929 DECL_LANG_SPECIFIC (fndecl) = NULL;
3930 otypes.release ();
3931 oparms.release ();
3934 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3935 If this is a directly recursive call, CS must be NULL. Otherwise it must
3936 contain the corresponding call graph edge. */
3938 void
3939 ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt,
3940 ipa_parm_adjustment_vec adjustments)
3942 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
3943 vec<tree> vargs;
3944 vec<tree, va_gc> **debug_args = NULL;
3945 gcall *new_stmt;
3946 gimple_stmt_iterator gsi, prev_gsi;
3947 tree callee_decl;
3948 int i, len;
3950 len = adjustments.length ();
3951 vargs.create (len);
3952 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3953 current_node->remove_stmt_references (stmt);
3955 gsi = gsi_for_stmt (stmt);
3956 prev_gsi = gsi;
3957 gsi_prev (&prev_gsi);
3958 for (i = 0; i < len; i++)
3960 struct ipa_parm_adjustment *adj;
3962 adj = &adjustments[i];
3964 if (adj->op == IPA_PARM_OP_COPY)
3966 tree arg = gimple_call_arg (stmt, adj->base_index);
3968 vargs.quick_push (arg);
3970 else if (adj->op != IPA_PARM_OP_REMOVE)
3972 tree expr, base, off;
3973 location_t loc;
3974 unsigned int deref_align = 0;
3975 bool deref_base = false;
3977 /* We create a new parameter out of the value of the old one, we can
3978 do the following kind of transformations:
3980 - A scalar passed by reference is converted to a scalar passed by
3981 value. (adj->by_ref is false and the type of the original
3982 actual argument is a pointer to a scalar).
3984 - A part of an aggregate is passed instead of the whole aggregate.
3985 The part can be passed either by value or by reference, this is
3986 determined by value of adj->by_ref. Moreover, the code below
3987 handles both situations when the original aggregate is passed by
3988 value (its type is not a pointer) and when it is passed by
3989 reference (it is a pointer to an aggregate).
3991 When the new argument is passed by reference (adj->by_ref is true)
3992 it must be a part of an aggregate and therefore we form it by
3993 simply taking the address of a reference inside the original
3994 aggregate. */
3996 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3997 base = gimple_call_arg (stmt, adj->base_index);
3998 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3999 : EXPR_LOCATION (base);
4001 if (TREE_CODE (base) != ADDR_EXPR
4002 && POINTER_TYPE_P (TREE_TYPE (base)))
4003 off = build_int_cst (adj->alias_ptr_type,
4004 adj->offset / BITS_PER_UNIT);
4005 else
4007 HOST_WIDE_INT base_offset;
4008 tree prev_base;
4009 bool addrof;
4011 if (TREE_CODE (base) == ADDR_EXPR)
4013 base = TREE_OPERAND (base, 0);
4014 addrof = true;
4016 else
4017 addrof = false;
4018 prev_base = base;
4019 base = get_addr_base_and_unit_offset (base, &base_offset);
4020 /* Aggregate arguments can have non-invariant addresses. */
4021 if (!base)
4023 base = build_fold_addr_expr (prev_base);
4024 off = build_int_cst (adj->alias_ptr_type,
4025 adj->offset / BITS_PER_UNIT);
4027 else if (TREE_CODE (base) == MEM_REF)
4029 if (!addrof)
4031 deref_base = true;
4032 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4034 off = build_int_cst (adj->alias_ptr_type,
4035 base_offset
4036 + adj->offset / BITS_PER_UNIT);
4037 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
4038 off);
4039 base = TREE_OPERAND (base, 0);
4041 else
4043 off = build_int_cst (adj->alias_ptr_type,
4044 base_offset
4045 + adj->offset / BITS_PER_UNIT);
4046 base = build_fold_addr_expr (base);
4050 if (!adj->by_ref)
4052 tree type = adj->type;
4053 unsigned int align;
4054 unsigned HOST_WIDE_INT misalign;
4056 if (deref_base)
4058 align = deref_align;
4059 misalign = 0;
4061 else
4063 get_pointer_alignment_1 (base, &align, &misalign);
4064 if (TYPE_ALIGN (type) > align)
4065 align = TYPE_ALIGN (type);
4067 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
4068 * BITS_PER_UNIT);
4069 misalign = misalign & (align - 1);
4070 if (misalign != 0)
4071 align = (misalign & -misalign);
4072 if (align < TYPE_ALIGN (type))
4073 type = build_aligned_type (type, align);
4074 base = force_gimple_operand_gsi (&gsi, base,
4075 true, NULL, true, GSI_SAME_STMT);
4076 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4077 /* If expr is not a valid gimple call argument emit
4078 a load into a temporary. */
4079 if (is_gimple_reg_type (TREE_TYPE (expr)))
4081 gimple tem = gimple_build_assign (NULL_TREE, expr);
4082 if (gimple_in_ssa_p (cfun))
4084 gimple_set_vuse (tem, gimple_vuse (stmt));
4085 expr = make_ssa_name (TREE_TYPE (expr), tem);
4087 else
4088 expr = create_tmp_reg (TREE_TYPE (expr));
4089 gimple_assign_set_lhs (tem, expr);
4090 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4093 else
4095 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4096 expr = build_fold_addr_expr (expr);
4097 expr = force_gimple_operand_gsi (&gsi, expr,
4098 true, NULL, true, GSI_SAME_STMT);
4100 vargs.quick_push (expr);
4102 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
4104 unsigned int ix;
4105 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4106 gimple def_temp;
4108 arg = gimple_call_arg (stmt, adj->base_index);
4109 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4111 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4112 continue;
4113 arg = fold_convert_loc (gimple_location (stmt),
4114 TREE_TYPE (origin), arg);
4116 if (debug_args == NULL)
4117 debug_args = decl_debug_args_insert (callee_decl);
4118 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
4119 if (ddecl == origin)
4121 ddecl = (**debug_args)[ix + 1];
4122 break;
4124 if (ddecl == NULL)
4126 ddecl = make_node (DEBUG_EXPR_DECL);
4127 DECL_ARTIFICIAL (ddecl) = 1;
4128 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4129 DECL_MODE (ddecl) = DECL_MODE (origin);
4131 vec_safe_push (*debug_args, origin);
4132 vec_safe_push (*debug_args, ddecl);
4134 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
4135 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4139 if (dump_file && (dump_flags & TDF_DETAILS))
4141 fprintf (dump_file, "replacing stmt:");
4142 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4145 new_stmt = gimple_build_call_vec (callee_decl, vargs);
4146 vargs.release ();
4147 if (gimple_call_lhs (stmt))
4148 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4150 gimple_set_block (new_stmt, gimple_block (stmt));
4151 if (gimple_has_location (stmt))
4152 gimple_set_location (new_stmt, gimple_location (stmt));
4153 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
4154 gimple_call_copy_flags (new_stmt, stmt);
4155 if (gimple_in_ssa_p (cfun))
4157 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4158 if (gimple_vdef (stmt))
4160 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4161 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4165 if (dump_file && (dump_flags & TDF_DETAILS))
4167 fprintf (dump_file, "with stmt:");
4168 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4169 fprintf (dump_file, "\n");
4171 gsi_replace (&gsi, new_stmt, true);
4172 if (cs)
4173 cs->set_call_stmt (new_stmt);
4176 current_node->record_stmt_references (gsi_stmt (gsi));
4177 gsi_prev (&gsi);
4179 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
4182 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4183 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4184 specifies whether the function should care about type incompatibility the
4185 current and new expressions. If it is false, the function will leave
4186 incompatibility issues to the caller. Return true iff the expression
4187 was modified. */
4189 bool
4190 ipa_modify_expr (tree *expr, bool convert,
4191 ipa_parm_adjustment_vec adjustments)
4193 struct ipa_parm_adjustment *cand
4194 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4195 if (!cand)
4196 return false;
4198 tree src;
4199 if (cand->by_ref)
4200 src = build_simple_mem_ref (cand->new_decl);
4201 else
4202 src = cand->new_decl;
4204 if (dump_file && (dump_flags & TDF_DETAILS))
4206 fprintf (dump_file, "About to replace expr ");
4207 print_generic_expr (dump_file, *expr, 0);
4208 fprintf (dump_file, " with ");
4209 print_generic_expr (dump_file, src, 0);
4210 fprintf (dump_file, "\n");
4213 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4215 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4216 *expr = vce;
4218 else
4219 *expr = src;
4220 return true;
4223 /* If T is an SSA_NAME, return NULL if it is not a default def or
4224 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4225 the base variable is always returned, regardless if it is a default
4226 def. Return T if it is not an SSA_NAME. */
4228 static tree
4229 get_ssa_base_param (tree t, bool ignore_default_def)
4231 if (TREE_CODE (t) == SSA_NAME)
4233 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4234 return SSA_NAME_VAR (t);
4235 else
4236 return NULL_TREE;
4238 return t;
4241 /* Given an expression, return an adjustment entry specifying the
4242 transformation to be done on EXPR. If no suitable adjustment entry
4243 was found, returns NULL.
4245 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4246 default def, otherwise bail on them.
4248 If CONVERT is non-NULL, this function will set *CONVERT if the
4249 expression provided is a component reference. ADJUSTMENTS is the
4250 adjustments vector. */
4252 ipa_parm_adjustment *
4253 ipa_get_adjustment_candidate (tree **expr, bool *convert,
4254 ipa_parm_adjustment_vec adjustments,
4255 bool ignore_default_def)
4257 if (TREE_CODE (**expr) == BIT_FIELD_REF
4258 || TREE_CODE (**expr) == IMAGPART_EXPR
4259 || TREE_CODE (**expr) == REALPART_EXPR)
4261 *expr = &TREE_OPERAND (**expr, 0);
4262 if (convert)
4263 *convert = true;
4266 HOST_WIDE_INT offset, size, max_size;
4267 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
4268 if (!base || size == -1 || max_size == -1)
4269 return NULL;
4271 if (TREE_CODE (base) == MEM_REF)
4273 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
4274 base = TREE_OPERAND (base, 0);
4277 base = get_ssa_base_param (base, ignore_default_def);
4278 if (!base || TREE_CODE (base) != PARM_DECL)
4279 return NULL;
4281 struct ipa_parm_adjustment *cand = NULL;
4282 unsigned int len = adjustments.length ();
4283 for (unsigned i = 0; i < len; i++)
4285 struct ipa_parm_adjustment *adj = &adjustments[i];
4287 if (adj->base == base
4288 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4290 cand = adj;
4291 break;
4295 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4296 return NULL;
4297 return cand;
4300 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4302 static bool
4303 index_in_adjustments_multiple_times_p (int base_index,
4304 ipa_parm_adjustment_vec adjustments)
4306 int i, len = adjustments.length ();
4307 bool one = false;
4309 for (i = 0; i < len; i++)
4311 struct ipa_parm_adjustment *adj;
4312 adj = &adjustments[i];
4314 if (adj->base_index == base_index)
4316 if (one)
4317 return true;
4318 else
4319 one = true;
4322 return false;
4326 /* Return adjustments that should have the same effect on function parameters
4327 and call arguments as if they were first changed according to adjustments in
4328 INNER and then by adjustments in OUTER. */
4330 ipa_parm_adjustment_vec
4331 ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4332 ipa_parm_adjustment_vec outer)
4334 int i, outlen = outer.length ();
4335 int inlen = inner.length ();
4336 int removals = 0;
4337 ipa_parm_adjustment_vec adjustments, tmp;
4339 tmp.create (inlen);
4340 for (i = 0; i < inlen; i++)
4342 struct ipa_parm_adjustment *n;
4343 n = &inner[i];
4345 if (n->op == IPA_PARM_OP_REMOVE)
4346 removals++;
4347 else
4349 /* FIXME: Handling of new arguments are not implemented yet. */
4350 gcc_assert (n->op != IPA_PARM_OP_NEW);
4351 tmp.quick_push (*n);
4355 adjustments.create (outlen + removals);
4356 for (i = 0; i < outlen; i++)
4358 struct ipa_parm_adjustment r;
4359 struct ipa_parm_adjustment *out = &outer[i];
4360 struct ipa_parm_adjustment *in = &tmp[out->base_index];
4362 memset (&r, 0, sizeof (r));
4363 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4364 if (out->op == IPA_PARM_OP_REMOVE)
4366 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4368 r.op = IPA_PARM_OP_REMOVE;
4369 adjustments.quick_push (r);
4371 continue;
4373 else
4375 /* FIXME: Handling of new arguments are not implemented yet. */
4376 gcc_assert (out->op != IPA_PARM_OP_NEW);
4379 r.base_index = in->base_index;
4380 r.type = out->type;
4382 /* FIXME: Create nonlocal value too. */
4384 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4385 r.op = IPA_PARM_OP_COPY;
4386 else if (in->op == IPA_PARM_OP_COPY)
4387 r.offset = out->offset;
4388 else if (out->op == IPA_PARM_OP_COPY)
4389 r.offset = in->offset;
4390 else
4391 r.offset = in->offset + out->offset;
4392 adjustments.quick_push (r);
4395 for (i = 0; i < inlen; i++)
4397 struct ipa_parm_adjustment *n = &inner[i];
4399 if (n->op == IPA_PARM_OP_REMOVE)
4400 adjustments.quick_push (*n);
4403 tmp.release ();
4404 return adjustments;
4407 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4408 friendly way, assuming they are meant to be applied to FNDECL. */
4410 void
4411 ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4412 tree fndecl)
4414 int i, len = adjustments.length ();
4415 bool first = true;
4416 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
4418 fprintf (file, "IPA param adjustments: ");
4419 for (i = 0; i < len; i++)
4421 struct ipa_parm_adjustment *adj;
4422 adj = &adjustments[i];
4424 if (!first)
4425 fprintf (file, " ");
4426 else
4427 first = false;
4429 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
4430 print_generic_expr (file, parms[adj->base_index], 0);
4431 if (adj->base)
4433 fprintf (file, ", base: ");
4434 print_generic_expr (file, adj->base, 0);
4436 if (adj->new_decl)
4438 fprintf (file, ", new_decl: ");
4439 print_generic_expr (file, adj->new_decl, 0);
4441 if (adj->new_ssa_base)
4443 fprintf (file, ", new_ssa_base: ");
4444 print_generic_expr (file, adj->new_ssa_base, 0);
4447 if (adj->op == IPA_PARM_OP_COPY)
4448 fprintf (file, ", copy_param");
4449 else if (adj->op == IPA_PARM_OP_REMOVE)
4450 fprintf (file, ", remove_param");
4451 else
4452 fprintf (file, ", offset %li", (long) adj->offset);
4453 if (adj->by_ref)
4454 fprintf (file, ", by_ref");
4455 print_node_brief (file, ", type: ", adj->type, 0);
4456 fprintf (file, "\n");
4458 parms.release ();
4461 /* Dump the AV linked list. */
4463 void
4464 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4466 bool comma = false;
4467 fprintf (f, " Aggregate replacements:");
4468 for (; av; av = av->next)
4470 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4471 av->index, av->offset);
4472 print_generic_expr (f, av->value, 0);
4473 comma = true;
4475 fprintf (f, "\n");
4478 /* Stream out jump function JUMP_FUNC to OB. */
4480 static void
4481 ipa_write_jump_function (struct output_block *ob,
4482 struct ipa_jump_func *jump_func)
4484 struct ipa_agg_jf_item *item;
4485 struct bitpack_d bp;
4486 int i, count;
4488 streamer_write_uhwi (ob, jump_func->type);
4489 switch (jump_func->type)
4491 case IPA_JF_UNKNOWN:
4492 break;
4493 case IPA_JF_CONST:
4494 gcc_assert (
4495 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4496 stream_write_tree (ob, jump_func->value.constant.value, true);
4497 break;
4498 case IPA_JF_PASS_THROUGH:
4499 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4500 if (jump_func->value.pass_through.operation == NOP_EXPR)
4502 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4503 bp = bitpack_create (ob->main_stream);
4504 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4505 streamer_write_bitpack (&bp);
4507 else
4509 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4510 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4512 break;
4513 case IPA_JF_ANCESTOR:
4514 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4515 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4516 bp = bitpack_create (ob->main_stream);
4517 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4518 streamer_write_bitpack (&bp);
4519 break;
4522 count = vec_safe_length (jump_func->agg.items);
4523 streamer_write_uhwi (ob, count);
4524 if (count)
4526 bp = bitpack_create (ob->main_stream);
4527 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4528 streamer_write_bitpack (&bp);
4531 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4533 streamer_write_uhwi (ob, item->offset);
4534 stream_write_tree (ob, item->value, true);
4537 bp = bitpack_create (ob->main_stream);
4538 bp_pack_value (&bp, jump_func->alignment.known, 1);
4539 streamer_write_bitpack (&bp);
4540 if (jump_func->alignment.known)
4542 streamer_write_uhwi (ob, jump_func->alignment.align);
4543 streamer_write_uhwi (ob, jump_func->alignment.misalign);
4547 /* Read in jump function JUMP_FUNC from IB. */
4549 static void
4550 ipa_read_jump_function (struct lto_input_block *ib,
4551 struct ipa_jump_func *jump_func,
4552 struct cgraph_edge *cs,
4553 struct data_in *data_in)
4555 enum jump_func_type jftype;
4556 enum tree_code operation;
4557 int i, count;
4559 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4560 switch (jftype)
4562 case IPA_JF_UNKNOWN:
4563 ipa_set_jf_unknown (jump_func);
4564 break;
4565 case IPA_JF_CONST:
4566 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4567 break;
4568 case IPA_JF_PASS_THROUGH:
4569 operation = (enum tree_code) streamer_read_uhwi (ib);
4570 if (operation == NOP_EXPR)
4572 int formal_id = streamer_read_uhwi (ib);
4573 struct bitpack_d bp = streamer_read_bitpack (ib);
4574 bool agg_preserved = bp_unpack_value (&bp, 1);
4575 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4577 else
4579 tree operand = stream_read_tree (ib, data_in);
4580 int formal_id = streamer_read_uhwi (ib);
4581 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4582 operation);
4584 break;
4585 case IPA_JF_ANCESTOR:
4587 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4588 int formal_id = streamer_read_uhwi (ib);
4589 struct bitpack_d bp = streamer_read_bitpack (ib);
4590 bool agg_preserved = bp_unpack_value (&bp, 1);
4591 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4592 break;
4596 count = streamer_read_uhwi (ib);
4597 vec_alloc (jump_func->agg.items, count);
4598 if (count)
4600 struct bitpack_d bp = streamer_read_bitpack (ib);
4601 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4603 for (i = 0; i < count; i++)
4605 struct ipa_agg_jf_item item;
4606 item.offset = streamer_read_uhwi (ib);
4607 item.value = stream_read_tree (ib, data_in);
4608 jump_func->agg.items->quick_push (item);
4611 struct bitpack_d bp = streamer_read_bitpack (ib);
4612 bool alignment_known = bp_unpack_value (&bp, 1);
4613 if (alignment_known)
4615 jump_func->alignment.known = true;
4616 jump_func->alignment.align = streamer_read_uhwi (ib);
4617 jump_func->alignment.misalign = streamer_read_uhwi (ib);
4619 else
4620 jump_func->alignment.known = false;
4623 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4624 relevant to indirect inlining to OB. */
4626 static void
4627 ipa_write_indirect_edge_info (struct output_block *ob,
4628 struct cgraph_edge *cs)
4630 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4631 struct bitpack_d bp;
4633 streamer_write_hwi (ob, ii->param_index);
4634 bp = bitpack_create (ob->main_stream);
4635 bp_pack_value (&bp, ii->polymorphic, 1);
4636 bp_pack_value (&bp, ii->agg_contents, 1);
4637 bp_pack_value (&bp, ii->member_ptr, 1);
4638 bp_pack_value (&bp, ii->by_ref, 1);
4639 bp_pack_value (&bp, ii->vptr_changed, 1);
4640 streamer_write_bitpack (&bp);
4641 if (ii->agg_contents || ii->polymorphic)
4642 streamer_write_hwi (ob, ii->offset);
4643 else
4644 gcc_assert (ii->offset == 0);
4646 if (ii->polymorphic)
4648 streamer_write_hwi (ob, ii->otr_token);
4649 stream_write_tree (ob, ii->otr_type, true);
4650 ii->context.stream_out (ob);
4654 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4655 relevant to indirect inlining from IB. */
4657 static void
4658 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4659 struct data_in *data_in,
4660 struct cgraph_edge *cs)
4662 struct cgraph_indirect_call_info *ii = cs->indirect_info;
4663 struct bitpack_d bp;
4665 ii->param_index = (int) streamer_read_hwi (ib);
4666 bp = streamer_read_bitpack (ib);
4667 ii->polymorphic = bp_unpack_value (&bp, 1);
4668 ii->agg_contents = bp_unpack_value (&bp, 1);
4669 ii->member_ptr = bp_unpack_value (&bp, 1);
4670 ii->by_ref = bp_unpack_value (&bp, 1);
4671 ii->vptr_changed = bp_unpack_value (&bp, 1);
4672 if (ii->agg_contents || ii->polymorphic)
4673 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4674 else
4675 ii->offset = 0;
4676 if (ii->polymorphic)
4678 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4679 ii->otr_type = stream_read_tree (ib, data_in);
4680 ii->context.stream_in (ib, data_in);
4684 /* Stream out NODE info to OB. */
4686 static void
4687 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4689 int node_ref;
4690 lto_symtab_encoder_t encoder;
4691 struct ipa_node_params *info = IPA_NODE_REF (node);
4692 int j;
4693 struct cgraph_edge *e;
4694 struct bitpack_d bp;
4696 encoder = ob->decl_state->symtab_node_encoder;
4697 node_ref = lto_symtab_encoder_encode (encoder, node);
4698 streamer_write_uhwi (ob, node_ref);
4700 streamer_write_uhwi (ob, ipa_get_param_count (info));
4701 for (j = 0; j < ipa_get_param_count (info); j++)
4702 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4703 bp = bitpack_create (ob->main_stream);
4704 gcc_assert (info->analysis_done
4705 || ipa_get_param_count (info) == 0);
4706 gcc_assert (!info->node_enqueued);
4707 gcc_assert (!info->ipcp_orig_node);
4708 for (j = 0; j < ipa_get_param_count (info); j++)
4709 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4710 streamer_write_bitpack (&bp);
4711 for (j = 0; j < ipa_get_param_count (info); j++)
4712 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4713 for (e = node->callees; e; e = e->next_callee)
4715 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4717 streamer_write_uhwi (ob,
4718 ipa_get_cs_argument_count (args) * 2
4719 + (args->polymorphic_call_contexts != NULL));
4720 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4722 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4723 if (args->polymorphic_call_contexts != NULL)
4724 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4727 for (e = node->indirect_calls; e; e = e->next_callee)
4729 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4731 streamer_write_uhwi (ob,
4732 ipa_get_cs_argument_count (args) * 2
4733 + (args->polymorphic_call_contexts != NULL));
4734 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4736 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4737 if (args->polymorphic_call_contexts != NULL)
4738 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4740 ipa_write_indirect_edge_info (ob, e);
4744 /* Stream in NODE info from IB. */
4746 static void
4747 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4748 struct data_in *data_in)
4750 struct ipa_node_params *info = IPA_NODE_REF (node);
4751 int k;
4752 struct cgraph_edge *e;
4753 struct bitpack_d bp;
4755 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4757 for (k = 0; k < ipa_get_param_count (info); k++)
4758 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4760 bp = streamer_read_bitpack (ib);
4761 if (ipa_get_param_count (info) != 0)
4762 info->analysis_done = true;
4763 info->node_enqueued = false;
4764 for (k = 0; k < ipa_get_param_count (info); k++)
4765 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4766 for (k = 0; k < ipa_get_param_count (info); k++)
4767 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4768 for (e = node->callees; e; e = e->next_callee)
4770 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4771 int count = streamer_read_uhwi (ib);
4772 bool contexts_computed = count & 1;
4773 count /= 2;
4775 if (!count)
4776 continue;
4777 vec_safe_grow_cleared (args->jump_functions, count);
4778 if (contexts_computed)
4779 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4781 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4783 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4784 data_in);
4785 if (contexts_computed)
4786 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4789 for (e = node->indirect_calls; e; e = e->next_callee)
4791 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4792 int count = streamer_read_uhwi (ib);
4793 bool contexts_computed = count & 1;
4794 count /= 2;
4796 if (count)
4798 vec_safe_grow_cleared (args->jump_functions, count);
4799 if (contexts_computed)
4800 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4801 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4803 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4804 data_in);
4805 if (contexts_computed)
4806 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4809 ipa_read_indirect_edge_info (ib, data_in, e);
4813 /* Write jump functions for nodes in SET. */
4815 void
4816 ipa_prop_write_jump_functions (void)
4818 struct cgraph_node *node;
4819 struct output_block *ob;
4820 unsigned int count = 0;
4821 lto_symtab_encoder_iterator lsei;
4822 lto_symtab_encoder_t encoder;
4824 if (!ipa_node_params_sum)
4825 return;
4827 ob = create_output_block (LTO_section_jump_functions);
4828 encoder = ob->decl_state->symtab_node_encoder;
4829 ob->symbol = NULL;
4830 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4831 lsei_next_function_in_partition (&lsei))
4833 node = lsei_cgraph_node (lsei);
4834 if (node->has_gimple_body_p ()
4835 && IPA_NODE_REF (node) != NULL)
4836 count++;
4839 streamer_write_uhwi (ob, count);
4841 /* Process all of the functions. */
4842 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4843 lsei_next_function_in_partition (&lsei))
4845 node = lsei_cgraph_node (lsei);
4846 if (node->has_gimple_body_p ()
4847 && IPA_NODE_REF (node) != NULL)
4848 ipa_write_node_info (ob, node);
4850 streamer_write_char_stream (ob->main_stream, 0);
4851 produce_asm (ob, NULL);
4852 destroy_output_block (ob);
4855 /* Read section in file FILE_DATA of length LEN with data DATA. */
4857 static void
4858 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4859 size_t len)
4861 const struct lto_function_header *header =
4862 (const struct lto_function_header *) data;
4863 const int cfg_offset = sizeof (struct lto_function_header);
4864 const int main_offset = cfg_offset + header->cfg_size;
4865 const int string_offset = main_offset + header->main_size;
4866 struct data_in *data_in;
4867 unsigned int i;
4868 unsigned int count;
4870 lto_input_block ib_main ((const char *) data + main_offset,
4871 header->main_size, file_data->mode_table);
4873 data_in =
4874 lto_data_in_create (file_data, (const char *) data + string_offset,
4875 header->string_size, vNULL);
4876 count = streamer_read_uhwi (&ib_main);
4878 for (i = 0; i < count; i++)
4880 unsigned int index;
4881 struct cgraph_node *node;
4882 lto_symtab_encoder_t encoder;
4884 index = streamer_read_uhwi (&ib_main);
4885 encoder = file_data->symtab_node_encoder;
4886 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4887 index));
4888 gcc_assert (node->definition);
4889 ipa_read_node_info (&ib_main, node, data_in);
4891 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4892 len);
4893 lto_data_in_delete (data_in);
4896 /* Read ipcp jump functions. */
4898 void
4899 ipa_prop_read_jump_functions (void)
4901 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4902 struct lto_file_decl_data *file_data;
4903 unsigned int j = 0;
4905 ipa_check_create_node_params ();
4906 ipa_check_create_edge_args ();
4907 ipa_register_cgraph_hooks ();
4909 while ((file_data = file_data_vec[j++]))
4911 size_t len;
4912 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4914 if (data)
4915 ipa_prop_read_section (file_data, data, len);
4919 /* After merging units, we can get mismatch in argument counts.
4920 Also decl merging might've rendered parameter lists obsolete.
4921 Also compute called_with_variable_arg info. */
4923 void
4924 ipa_update_after_lto_read (void)
4926 ipa_check_create_node_params ();
4927 ipa_check_create_edge_args ();
4930 void
4931 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
4933 int node_ref;
4934 unsigned int count = 0;
4935 lto_symtab_encoder_t encoder;
4936 struct ipa_agg_replacement_value *aggvals, *av;
4938 aggvals = ipa_get_agg_replacements_for_node (node);
4939 encoder = ob->decl_state->symtab_node_encoder;
4940 node_ref = lto_symtab_encoder_encode (encoder, node);
4941 streamer_write_uhwi (ob, node_ref);
4943 for (av = aggvals; av; av = av->next)
4944 count++;
4945 streamer_write_uhwi (ob, count);
4947 for (av = aggvals; av; av = av->next)
4949 struct bitpack_d bp;
4951 streamer_write_uhwi (ob, av->offset);
4952 streamer_write_uhwi (ob, av->index);
4953 stream_write_tree (ob, av->value, true);
4955 bp = bitpack_create (ob->main_stream);
4956 bp_pack_value (&bp, av->by_ref, 1);
4957 streamer_write_bitpack (&bp);
4960 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4961 if (ts && vec_safe_length (ts->alignments) > 0)
4963 count = ts->alignments->length ();
4965 streamer_write_uhwi (ob, count);
4966 for (unsigned i = 0; i < count; ++i)
4968 ipa_alignment *parm_al = &(*ts->alignments)[i];
4970 struct bitpack_d bp;
4971 bp = bitpack_create (ob->main_stream);
4972 bp_pack_value (&bp, parm_al->known, 1);
4973 streamer_write_bitpack (&bp);
4974 if (parm_al->known)
4976 streamer_write_uhwi (ob, parm_al->align);
4977 streamer_write_hwi_in_range (ob->main_stream, 0, parm_al->align,
4978 parm_al->misalign);
4982 else
4983 streamer_write_uhwi (ob, 0);
4986 /* Stream in the aggregate value replacement chain for NODE from IB. */
4988 static void
4989 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
4990 data_in *data_in)
4992 struct ipa_agg_replacement_value *aggvals = NULL;
4993 unsigned int count, i;
4995 count = streamer_read_uhwi (ib);
4996 for (i = 0; i <count; i++)
4998 struct ipa_agg_replacement_value *av;
4999 struct bitpack_d bp;
5001 av = ggc_alloc<ipa_agg_replacement_value> ();
5002 av->offset = streamer_read_uhwi (ib);
5003 av->index = streamer_read_uhwi (ib);
5004 av->value = stream_read_tree (ib, data_in);
5005 bp = streamer_read_bitpack (ib);
5006 av->by_ref = bp_unpack_value (&bp, 1);
5007 av->next = aggvals;
5008 aggvals = av;
5010 ipa_set_node_agg_value_chain (node, aggvals);
5012 count = streamer_read_uhwi (ib);
5013 if (count > 0)
5015 ipcp_grow_transformations_if_necessary ();
5017 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5018 vec_safe_grow_cleared (ts->alignments, count);
5020 for (i = 0; i < count; i++)
5022 ipa_alignment *parm_al;
5023 parm_al = &(*ts->alignments)[i];
5024 struct bitpack_d bp;
5025 bp = streamer_read_bitpack (ib);
5026 parm_al->known = bp_unpack_value (&bp, 1);
5027 if (parm_al->known)
5029 parm_al->align = streamer_read_uhwi (ib);
5030 parm_al->misalign
5031 = streamer_read_hwi_in_range (ib, "ipa-prop misalign",
5032 0, parm_al->align);
5038 /* Write all aggregate replacement for nodes in set. */
5040 void
5041 ipcp_write_transformation_summaries (void)
5043 struct cgraph_node *node;
5044 struct output_block *ob;
5045 unsigned int count = 0;
5046 lto_symtab_encoder_iterator lsei;
5047 lto_symtab_encoder_t encoder;
5049 ob = create_output_block (LTO_section_ipcp_transform);
5050 encoder = ob->decl_state->symtab_node_encoder;
5051 ob->symbol = NULL;
5052 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5053 lsei_next_function_in_partition (&lsei))
5055 node = lsei_cgraph_node (lsei);
5056 if (node->has_gimple_body_p ())
5057 count++;
5060 streamer_write_uhwi (ob, count);
5062 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5063 lsei_next_function_in_partition (&lsei))
5065 node = lsei_cgraph_node (lsei);
5066 if (node->has_gimple_body_p ())
5067 write_ipcp_transformation_info (ob, node);
5069 streamer_write_char_stream (ob->main_stream, 0);
5070 produce_asm (ob, NULL);
5071 destroy_output_block (ob);
5074 /* Read replacements section in file FILE_DATA of length LEN with data
5075 DATA. */
5077 static void
5078 read_replacements_section (struct lto_file_decl_data *file_data,
5079 const char *data,
5080 size_t len)
5082 const struct lto_function_header *header =
5083 (const struct lto_function_header *) data;
5084 const int cfg_offset = sizeof (struct lto_function_header);
5085 const int main_offset = cfg_offset + header->cfg_size;
5086 const int string_offset = main_offset + header->main_size;
5087 struct data_in *data_in;
5088 unsigned int i;
5089 unsigned int count;
5091 lto_input_block ib_main ((const char *) data + main_offset,
5092 header->main_size, file_data->mode_table);
5094 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5095 header->string_size, vNULL);
5096 count = streamer_read_uhwi (&ib_main);
5098 for (i = 0; i < count; i++)
5100 unsigned int index;
5101 struct cgraph_node *node;
5102 lto_symtab_encoder_t encoder;
5104 index = streamer_read_uhwi (&ib_main);
5105 encoder = file_data->symtab_node_encoder;
5106 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5107 index));
5108 gcc_assert (node->definition);
5109 read_ipcp_transformation_info (&ib_main, node, data_in);
5111 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5112 len);
5113 lto_data_in_delete (data_in);
5116 /* Read IPA-CP aggregate replacements. */
5118 void
5119 ipcp_read_transformation_summaries (void)
5121 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5122 struct lto_file_decl_data *file_data;
5123 unsigned int j = 0;
5125 while ((file_data = file_data_vec[j++]))
5127 size_t len;
5128 const char *data = lto_get_section_data (file_data,
5129 LTO_section_ipcp_transform,
5130 NULL, &len);
5131 if (data)
5132 read_replacements_section (file_data, data, len);
5136 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5137 NODE. */
5139 static void
5140 adjust_agg_replacement_values (struct cgraph_node *node,
5141 struct ipa_agg_replacement_value *aggval)
5143 struct ipa_agg_replacement_value *v;
5144 int i, c = 0, d = 0, *adj;
5146 if (!node->clone.combined_args_to_skip)
5147 return;
5149 for (v = aggval; v; v = v->next)
5151 gcc_assert (v->index >= 0);
5152 if (c < v->index)
5153 c = v->index;
5155 c++;
5157 adj = XALLOCAVEC (int, c);
5158 for (i = 0; i < c; i++)
5159 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5161 adj[i] = -1;
5162 d++;
5164 else
5165 adj[i] = i - d;
5167 for (v = aggval; v; v = v->next)
5168 v->index = adj[v->index];
5171 /* Dominator walker driving the ipcp modification phase. */
5173 class ipcp_modif_dom_walker : public dom_walker
5175 public:
5176 ipcp_modif_dom_walker (struct func_body_info *fbi,
5177 vec<ipa_param_descriptor> descs,
5178 struct ipa_agg_replacement_value *av,
5179 bool *sc, bool *cc)
5180 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5181 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5183 virtual void before_dom_children (basic_block);
5185 private:
5186 struct func_body_info *m_fbi;
5187 vec<ipa_param_descriptor> m_descriptors;
5188 struct ipa_agg_replacement_value *m_aggval;
5189 bool *m_something_changed, *m_cfg_changed;
5192 void
5193 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5195 gimple_stmt_iterator gsi;
5196 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5198 struct ipa_agg_replacement_value *v;
5199 gimple stmt = gsi_stmt (gsi);
5200 tree rhs, val, t;
5201 HOST_WIDE_INT offset, size;
5202 int index;
5203 bool by_ref, vce;
5205 if (!gimple_assign_load_p (stmt))
5206 continue;
5207 rhs = gimple_assign_rhs1 (stmt);
5208 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5209 continue;
5211 vce = false;
5212 t = rhs;
5213 while (handled_component_p (t))
5215 /* V_C_E can do things like convert an array of integers to one
5216 bigger integer and similar things we do not handle below. */
5217 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5219 vce = true;
5220 break;
5222 t = TREE_OPERAND (t, 0);
5224 if (vce)
5225 continue;
5227 if (!ipa_load_from_parm_agg_1 (m_fbi, m_descriptors, stmt, rhs, &index,
5228 &offset, &size, &by_ref))
5229 continue;
5230 for (v = m_aggval; v; v = v->next)
5231 if (v->index == index
5232 && v->offset == offset)
5233 break;
5234 if (!v
5235 || v->by_ref != by_ref
5236 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5237 continue;
5239 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5240 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5242 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5243 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5244 else if (TYPE_SIZE (TREE_TYPE (rhs))
5245 == TYPE_SIZE (TREE_TYPE (v->value)))
5246 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5247 else
5249 if (dump_file)
5251 fprintf (dump_file, " const ");
5252 print_generic_expr (dump_file, v->value, 0);
5253 fprintf (dump_file, " can't be converted to type of ");
5254 print_generic_expr (dump_file, rhs, 0);
5255 fprintf (dump_file, "\n");
5257 continue;
5260 else
5261 val = v->value;
5263 if (dump_file && (dump_flags & TDF_DETAILS))
5265 fprintf (dump_file, "Modifying stmt:\n ");
5266 print_gimple_stmt (dump_file, stmt, 0, 0);
5268 gimple_assign_set_rhs_from_tree (&gsi, val);
5269 update_stmt (stmt);
5271 if (dump_file && (dump_flags & TDF_DETAILS))
5273 fprintf (dump_file, "into:\n ");
5274 print_gimple_stmt (dump_file, stmt, 0, 0);
5275 fprintf (dump_file, "\n");
5278 *m_something_changed = true;
5279 if (maybe_clean_eh_stmt (stmt)
5280 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5281 *m_cfg_changed = true;
5286 /* Update alignment of formal parameters as described in
5287 ipcp_transformation_summary. */
5289 static void
5290 ipcp_update_alignments (struct cgraph_node *node)
5292 tree fndecl = node->decl;
5293 tree parm = DECL_ARGUMENTS (fndecl);
5294 tree next_parm = parm;
5295 ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5296 if (!ts || vec_safe_length (ts->alignments) == 0)
5297 return;
5298 const vec<ipa_alignment, va_gc> &alignments = *ts->alignments;
5299 unsigned count = alignments.length ();
5301 for (unsigned i = 0; i < count; ++i, parm = next_parm)
5303 if (node->clone.combined_args_to_skip
5304 && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5305 continue;
5306 gcc_checking_assert (parm);
5307 next_parm = DECL_CHAIN (parm);
5309 if (!alignments[i].known || !is_gimple_reg (parm))
5310 continue;
5311 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5312 if (!ddef)
5313 continue;
5315 if (dump_file)
5316 fprintf (dump_file, " Adjusting alignment of param %u to %u, "
5317 "misalignment to %u\n", i, alignments[i].align,
5318 alignments[i].misalign);
5320 struct ptr_info_def *pi = get_ptr_info (ddef);
5321 gcc_checking_assert (pi);
5322 unsigned old_align;
5323 unsigned old_misalign;
5324 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5326 if (old_known
5327 && old_align >= alignments[i].align)
5329 if (dump_file)
5330 fprintf (dump_file, " But the alignment was already %u.\n",
5331 old_align);
5332 continue;
5334 set_ptr_info_alignment (pi, alignments[i].align, alignments[i].misalign);
5338 /* IPCP transformation phase doing propagation of aggregate values. */
5340 unsigned int
5341 ipcp_transform_function (struct cgraph_node *node)
5343 vec<ipa_param_descriptor> descriptors = vNULL;
5344 struct func_body_info fbi;
5345 struct ipa_agg_replacement_value *aggval;
5346 int param_count;
5347 bool cfg_changed = false, something_changed = false;
5349 gcc_checking_assert (cfun);
5350 gcc_checking_assert (current_function_decl);
5352 if (dump_file)
5353 fprintf (dump_file, "Modification phase of node %s/%i\n",
5354 node->name (), node->order);
5356 ipcp_update_alignments (node);
5357 aggval = ipa_get_agg_replacements_for_node (node);
5358 if (!aggval)
5359 return 0;
5360 param_count = count_formal_params (node->decl);
5361 if (param_count == 0)
5362 return 0;
5363 adjust_agg_replacement_values (node, aggval);
5364 if (dump_file)
5365 ipa_dump_agg_replacement_values (dump_file, aggval);
5367 fbi.node = node;
5368 fbi.info = NULL;
5369 fbi.bb_infos = vNULL;
5370 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5371 fbi.param_count = param_count;
5372 fbi.aa_walked = 0;
5374 descriptors.safe_grow_cleared (param_count);
5375 ipa_populate_param_decls (node, descriptors);
5376 calculate_dominance_info (CDI_DOMINATORS);
5377 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5378 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5380 int i;
5381 struct ipa_bb_info *bi;
5382 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5383 free_ipa_bb_info (bi);
5384 fbi.bb_infos.release ();
5385 free_dominance_info (CDI_DOMINATORS);
5386 (*ipcp_transformations)[node->uid].agg_values = NULL;
5387 (*ipcp_transformations)[node->uid].alignments = NULL;
5388 descriptors.release ();
5390 if (!something_changed)
5391 return 0;
5392 else if (cfg_changed)
5393 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5394 else
5395 return TODO_update_ssa_only_virtuals;